[Regression-test](Export) add regression test for export #18897
This commit is contained in:
430
regression-test/data/export_p0/test_export_basic.out
Normal file
430
regression-test/data/export_p0/test_export_basic.out
Normal file
@ -0,0 +1,430 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_export --
|
||||
1 ftw-1 19
|
||||
2 ftw-2 20
|
||||
3 ftw-3 21
|
||||
4 ftw-4 22
|
||||
5 ftw-5 23
|
||||
6 ftw-6 24
|
||||
7 ftw-7 25
|
||||
8 ftw-8 26
|
||||
9 ftw-9 27
|
||||
10 ftw-10 28
|
||||
11 ftw-11 29
|
||||
12 ftw-12 30
|
||||
13 ftw-13 31
|
||||
14 ftw-14 32
|
||||
15 ftw-15 33
|
||||
16 ftw-16 34
|
||||
17 ftw-17 35
|
||||
18 ftw-18 36
|
||||
19 ftw-19 37
|
||||
20 ftw-20 38
|
||||
21 ftw-21 39
|
||||
22 ftw-22 40
|
||||
23 ftw-23 41
|
||||
24 ftw-24 42
|
||||
25 ftw-25 43
|
||||
26 ftw-26 44
|
||||
27 ftw-27 45
|
||||
28 ftw-28 46
|
||||
29 ftw-29 47
|
||||
30 ftw-30 48
|
||||
31 ftw-31 49
|
||||
32 ftw-32 50
|
||||
33 ftw-33 51
|
||||
34 ftw-34 52
|
||||
35 ftw-35 53
|
||||
36 ftw-36 54
|
||||
37 ftw-37 55
|
||||
38 ftw-38 56
|
||||
39 ftw-39 57
|
||||
40 ftw-40 58
|
||||
41 ftw-41 59
|
||||
42 ftw-42 60
|
||||
43 ftw-43 61
|
||||
44 ftw-44 62
|
||||
45 ftw-45 63
|
||||
46 ftw-46 64
|
||||
47 ftw-47 65
|
||||
48 ftw-48 66
|
||||
49 ftw-49 67
|
||||
50 ftw-50 68
|
||||
51 ftw-51 69
|
||||
52 ftw-52 70
|
||||
53 ftw-53 71
|
||||
54 ftw-54 72
|
||||
55 ftw-55 73
|
||||
56 ftw-56 74
|
||||
57 ftw-57 75
|
||||
58 ftw-58 76
|
||||
59 ftw-59 77
|
||||
60 ftw-60 78
|
||||
61 ftw-61 79
|
||||
62 ftw-62 80
|
||||
63 ftw-63 81
|
||||
64 ftw-64 82
|
||||
65 ftw-65 83
|
||||
66 ftw-66 84
|
||||
67 ftw-67 85
|
||||
68 ftw-68 86
|
||||
69 ftw-69 87
|
||||
70 ftw-70 88
|
||||
71 ftw-71 89
|
||||
72 ftw-72 90
|
||||
73 ftw-73 91
|
||||
74 ftw-74 92
|
||||
75 ftw-75 93
|
||||
76 ftw-76 94
|
||||
77 ftw-77 95
|
||||
78 ftw-78 96
|
||||
79 ftw-79 97
|
||||
80 ftw-80 98
|
||||
81 ftw-81 99
|
||||
82 ftw-82 100
|
||||
83 ftw-83 101
|
||||
84 ftw-84 102
|
||||
85 ftw-85 103
|
||||
86 ftw-86 104
|
||||
87 ftw-87 105
|
||||
88 ftw-88 106
|
||||
89 ftw-89 107
|
||||
90 ftw-90 108
|
||||
91 ftw-91 109
|
||||
92 ftw-92 110
|
||||
93 ftw-93 111
|
||||
94 ftw-94 112
|
||||
95 ftw-95 113
|
||||
96 ftw-96 114
|
||||
97 ftw-97 115
|
||||
98 ftw-98 116
|
||||
99 ftw-99 117
|
||||
100 ftw-100 118
|
||||
101 ftw-101 119
|
||||
102 ftw-102 120
|
||||
103 ftw-103 121
|
||||
104 ftw-104 122
|
||||
105 ftw-105 123
|
||||
106 ftw-106 124
|
||||
107 ftw-107 125
|
||||
108 ftw-108 126
|
||||
109 ftw-109 127
|
||||
110 ftw-110 128
|
||||
111 ftw-111 129
|
||||
112 ftw-112 130
|
||||
113 ftw-113 131
|
||||
114 ftw-114 132
|
||||
115 ftw-115 133
|
||||
116 ftw-116 134
|
||||
117 ftw-117 135
|
||||
118 ftw-118 136
|
||||
119 ftw-119 137
|
||||
120 ftw-120 138
|
||||
121 ftw-121 139
|
||||
122 ftw-122 140
|
||||
123 ftw-123 141
|
||||
124 ftw-124 142
|
||||
125 ftw-125 143
|
||||
126 ftw-126 144
|
||||
127 ftw-127 145
|
||||
128 ftw-128 146
|
||||
129 ftw-129 147
|
||||
130 ftw-130 148
|
||||
131 ftw-131 149
|
||||
132 ftw-132 150
|
||||
133 ftw-133 151
|
||||
134 ftw-134 152
|
||||
135 ftw-135 153
|
||||
136 ftw-136 154
|
||||
137 ftw-137 155
|
||||
138 ftw-138 156
|
||||
139 ftw-139 157
|
||||
140 ftw-140 158
|
||||
141 ftw-141 159
|
||||
142 ftw-142 160
|
||||
143 ftw-143 161
|
||||
144 ftw-144 162
|
||||
145 ftw-145 163
|
||||
146 ftw-146 164
|
||||
147 ftw-147 165
|
||||
148 ftw-148 166
|
||||
149 ftw-149 167
|
||||
150 \N \N
|
||||
|
||||
-- !select_load1 --
|
||||
1 ftw-1 19
|
||||
2 ftw-2 20
|
||||
3 ftw-3 21
|
||||
4 ftw-4 22
|
||||
5 ftw-5 23
|
||||
6 ftw-6 24
|
||||
7 ftw-7 25
|
||||
8 ftw-8 26
|
||||
9 ftw-9 27
|
||||
10 ftw-10 28
|
||||
11 ftw-11 29
|
||||
12 ftw-12 30
|
||||
13 ftw-13 31
|
||||
14 ftw-14 32
|
||||
15 ftw-15 33
|
||||
16 ftw-16 34
|
||||
17 ftw-17 35
|
||||
18 ftw-18 36
|
||||
19 ftw-19 37
|
||||
20 ftw-20 38
|
||||
21 ftw-21 39
|
||||
22 ftw-22 40
|
||||
23 ftw-23 41
|
||||
24 ftw-24 42
|
||||
25 ftw-25 43
|
||||
26 ftw-26 44
|
||||
27 ftw-27 45
|
||||
28 ftw-28 46
|
||||
29 ftw-29 47
|
||||
30 ftw-30 48
|
||||
31 ftw-31 49
|
||||
32 ftw-32 50
|
||||
33 ftw-33 51
|
||||
34 ftw-34 52
|
||||
35 ftw-35 53
|
||||
36 ftw-36 54
|
||||
37 ftw-37 55
|
||||
38 ftw-38 56
|
||||
39 ftw-39 57
|
||||
40 ftw-40 58
|
||||
41 ftw-41 59
|
||||
42 ftw-42 60
|
||||
43 ftw-43 61
|
||||
44 ftw-44 62
|
||||
45 ftw-45 63
|
||||
46 ftw-46 64
|
||||
47 ftw-47 65
|
||||
48 ftw-48 66
|
||||
49 ftw-49 67
|
||||
50 ftw-50 68
|
||||
51 ftw-51 69
|
||||
52 ftw-52 70
|
||||
53 ftw-53 71
|
||||
54 ftw-54 72
|
||||
55 ftw-55 73
|
||||
56 ftw-56 74
|
||||
57 ftw-57 75
|
||||
58 ftw-58 76
|
||||
59 ftw-59 77
|
||||
60 ftw-60 78
|
||||
61 ftw-61 79
|
||||
62 ftw-62 80
|
||||
63 ftw-63 81
|
||||
64 ftw-64 82
|
||||
65 ftw-65 83
|
||||
66 ftw-66 84
|
||||
67 ftw-67 85
|
||||
68 ftw-68 86
|
||||
69 ftw-69 87
|
||||
70 ftw-70 88
|
||||
71 ftw-71 89
|
||||
72 ftw-72 90
|
||||
73 ftw-73 91
|
||||
74 ftw-74 92
|
||||
75 ftw-75 93
|
||||
76 ftw-76 94
|
||||
77 ftw-77 95
|
||||
78 ftw-78 96
|
||||
79 ftw-79 97
|
||||
80 ftw-80 98
|
||||
81 ftw-81 99
|
||||
82 ftw-82 100
|
||||
83 ftw-83 101
|
||||
84 ftw-84 102
|
||||
85 ftw-85 103
|
||||
86 ftw-86 104
|
||||
87 ftw-87 105
|
||||
88 ftw-88 106
|
||||
89 ftw-89 107
|
||||
90 ftw-90 108
|
||||
91 ftw-91 109
|
||||
92 ftw-92 110
|
||||
93 ftw-93 111
|
||||
94 ftw-94 112
|
||||
95 ftw-95 113
|
||||
96 ftw-96 114
|
||||
97 ftw-97 115
|
||||
98 ftw-98 116
|
||||
99 ftw-99 117
|
||||
100 ftw-100 118
|
||||
101 ftw-101 119
|
||||
102 ftw-102 120
|
||||
103 ftw-103 121
|
||||
104 ftw-104 122
|
||||
105 ftw-105 123
|
||||
106 ftw-106 124
|
||||
107 ftw-107 125
|
||||
108 ftw-108 126
|
||||
109 ftw-109 127
|
||||
110 ftw-110 128
|
||||
111 ftw-111 129
|
||||
112 ftw-112 130
|
||||
113 ftw-113 131
|
||||
114 ftw-114 132
|
||||
115 ftw-115 133
|
||||
116 ftw-116 134
|
||||
117 ftw-117 135
|
||||
118 ftw-118 136
|
||||
119 ftw-119 137
|
||||
120 ftw-120 138
|
||||
121 ftw-121 139
|
||||
122 ftw-122 140
|
||||
123 ftw-123 141
|
||||
124 ftw-124 142
|
||||
125 ftw-125 143
|
||||
126 ftw-126 144
|
||||
127 ftw-127 145
|
||||
128 ftw-128 146
|
||||
129 ftw-129 147
|
||||
130 ftw-130 148
|
||||
131 ftw-131 149
|
||||
132 ftw-132 150
|
||||
133 ftw-133 151
|
||||
134 ftw-134 152
|
||||
135 ftw-135 153
|
||||
136 ftw-136 154
|
||||
137 ftw-137 155
|
||||
138 ftw-138 156
|
||||
139 ftw-139 157
|
||||
140 ftw-140 158
|
||||
141 ftw-141 159
|
||||
142 ftw-142 160
|
||||
143 ftw-143 161
|
||||
144 ftw-144 162
|
||||
145 ftw-145 163
|
||||
146 ftw-146 164
|
||||
147 ftw-147 165
|
||||
148 ftw-148 166
|
||||
149 ftw-149 167
|
||||
150 \N \N
|
||||
|
||||
-- !select_load2 --
|
||||
1 ftw-1 19
|
||||
2 ftw-2 20
|
||||
3 ftw-3 21
|
||||
4 ftw-4 22
|
||||
5 ftw-5 23
|
||||
6 ftw-6 24
|
||||
7 ftw-7 25
|
||||
8 ftw-8 26
|
||||
9 ftw-9 27
|
||||
10 ftw-10 28
|
||||
11 ftw-11 29
|
||||
12 ftw-12 30
|
||||
13 ftw-13 31
|
||||
14 ftw-14 32
|
||||
15 ftw-15 33
|
||||
16 ftw-16 34
|
||||
17 ftw-17 35
|
||||
18 ftw-18 36
|
||||
19 ftw-19 37
|
||||
|
||||
-- !select_load3 --
|
||||
20 ftw-20 38
|
||||
21 ftw-21 39
|
||||
22 ftw-22 40
|
||||
23 ftw-23 41
|
||||
24 ftw-24 42
|
||||
25 ftw-25 43
|
||||
26 ftw-26 44
|
||||
27 ftw-27 45
|
||||
28 ftw-28 46
|
||||
29 ftw-29 47
|
||||
30 ftw-30 48
|
||||
31 ftw-31 49
|
||||
32 ftw-32 50
|
||||
33 ftw-33 51
|
||||
34 ftw-34 52
|
||||
35 ftw-35 53
|
||||
36 ftw-36 54
|
||||
37 ftw-37 55
|
||||
38 ftw-38 56
|
||||
39 ftw-39 57
|
||||
40 ftw-40 58
|
||||
41 ftw-41 59
|
||||
42 ftw-42 60
|
||||
43 ftw-43 61
|
||||
44 ftw-44 62
|
||||
45 ftw-45 63
|
||||
46 ftw-46 64
|
||||
47 ftw-47 65
|
||||
48 ftw-48 66
|
||||
49 ftw-49 67
|
||||
50 ftw-50 68
|
||||
51 ftw-51 69
|
||||
52 ftw-52 70
|
||||
53 ftw-53 71
|
||||
54 ftw-54 72
|
||||
55 ftw-55 73
|
||||
56 ftw-56 74
|
||||
57 ftw-57 75
|
||||
58 ftw-58 76
|
||||
59 ftw-59 77
|
||||
60 ftw-60 78
|
||||
61 ftw-61 79
|
||||
62 ftw-62 80
|
||||
63 ftw-63 81
|
||||
64 ftw-64 82
|
||||
65 ftw-65 83
|
||||
66 ftw-66 84
|
||||
67 ftw-67 85
|
||||
68 ftw-68 86
|
||||
69 ftw-69 87
|
||||
|
||||
-- !select_load3 --
|
||||
101 ftw-101 119
|
||||
102 ftw-102 120
|
||||
103 ftw-103 121
|
||||
104 ftw-104 122
|
||||
105 ftw-105 123
|
||||
106 ftw-106 124
|
||||
107 ftw-107 125
|
||||
108 ftw-108 126
|
||||
109 ftw-109 127
|
||||
110 ftw-110 128
|
||||
111 ftw-111 129
|
||||
112 ftw-112 130
|
||||
113 ftw-113 131
|
||||
114 ftw-114 132
|
||||
115 ftw-115 133
|
||||
116 ftw-116 134
|
||||
117 ftw-117 135
|
||||
118 ftw-118 136
|
||||
119 ftw-119 137
|
||||
120 ftw-120 138
|
||||
121 ftw-121 139
|
||||
122 ftw-122 140
|
||||
123 ftw-123 141
|
||||
124 ftw-124 142
|
||||
125 ftw-125 143
|
||||
126 ftw-126 144
|
||||
127 ftw-127 145
|
||||
128 ftw-128 146
|
||||
129 ftw-129 147
|
||||
130 ftw-130 148
|
||||
131 ftw-131 149
|
||||
132 ftw-132 150
|
||||
133 ftw-133 151
|
||||
134 ftw-134 152
|
||||
135 ftw-135 153
|
||||
136 ftw-136 154
|
||||
137 ftw-137 155
|
||||
138 ftw-138 156
|
||||
139 ftw-139 157
|
||||
140 ftw-140 158
|
||||
141 ftw-141 159
|
||||
142 ftw-142 160
|
||||
143 ftw-143 161
|
||||
144 ftw-144 162
|
||||
145 ftw-145 163
|
||||
146 ftw-146 164
|
||||
147 ftw-147 165
|
||||
148 ftw-148 166
|
||||
149 ftw-149 167
|
||||
150 \N \N
|
||||
|
||||
241
regression-test/data/export_p0/test_export_csv.out
Normal file
241
regression-test/data/export_p0/test_export_csv.out
Normal file
@ -0,0 +1,241 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_export1 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
11 2017-10-01 2017-10-01T00:00 Beijing 11 11 true 11 11 11 11.11 11.11 char11 11
|
||||
12 2017-10-01 2017-10-01T00:00 Beijing 12 12 true 12 12 12 12.12 12.12 char12 12
|
||||
13 2017-10-01 2017-10-01T00:00 Beijing 13 13 true 13 13 13 13.13 13.13 char13 13
|
||||
14 2017-10-01 2017-10-01T00:00 Beijing 14 14 true 14 14 14 14.14 14.14 char14 14
|
||||
15 2017-10-01 2017-10-01T00:00 Beijing 15 15 true 15 15 15 15.15 15.15 char15 15
|
||||
16 2017-10-01 2017-10-01T00:00 Beijing 16 16 true 16 16 16 16.16 16.16 char16 16
|
||||
17 2017-10-01 2017-10-01T00:00 Beijing 17 17 true 17 17 17 17.17 17.17 char17 17
|
||||
18 2017-10-01 2017-10-01T00:00 Beijing 18 18 true 18 18 18 18.18 18.18 char18 18
|
||||
19 2017-10-01 2017-10-01T00:00 Beijing 19 19 true 19 19 19 19.19 19.19 char19 19
|
||||
20 2017-10-01 2017-10-01T00:00 Beijing 20 20 true 20 20 20 20.2 20.2 char20 20
|
||||
21 2017-10-01 2017-10-01T00:00 Beijing 21 21 true 21 21 21 21.21 21.21 char21 21
|
||||
22 2017-10-01 2017-10-01T00:00 Beijing 22 22 true 22 22 22 22.22 22.22 char22 22
|
||||
23 2017-10-01 2017-10-01T00:00 Beijing 23 23 true 23 23 23 23.23 23.23 char23 23
|
||||
24 2017-10-01 2017-10-01T00:00 Beijing 24 24 true 24 24 24 24.24 24.24 char24 24
|
||||
25 2017-10-01 2017-10-01T00:00 Beijing 25 25 true 25 25 25 25.25 25.25 char25 25
|
||||
26 2017-10-01 2017-10-01T00:00 Beijing 26 26 true 26 26 26 26.26 26.26 char26 26
|
||||
27 2017-10-01 2017-10-01T00:00 Beijing 27 27 true 27 27 27 27.27 27.27 char27 27
|
||||
28 2017-10-01 2017-10-01T00:00 Beijing 28 28 true 28 28 28 28.28 28.28 char28 28
|
||||
29 2017-10-01 2017-10-01T00:00 Beijing 29 29 true 29 29 29 29.29 29.29 char29 29
|
||||
30 2017-10-01 2017-10-01T00:00 Beijing 30 30 true 30 30 30 30.3 30.3 char30 30
|
||||
31 2017-10-01 2017-10-01T00:00 Beijing 31 31 true 31 31 31 31.31 31.31 char31 31
|
||||
32 2017-10-01 2017-10-01T00:00 Beijing 32 32 true 32 32 32 32.32 32.32 char32 32
|
||||
33 2017-10-01 2017-10-01T00:00 Beijing 33 33 true 33 33 33 33.33 33.33 char33 33
|
||||
34 2017-10-01 2017-10-01T00:00 Beijing 34 34 true 34 34 34 34.34 34.34 char34 34
|
||||
35 2017-10-01 2017-10-01T00:00 Beijing 35 35 true 35 35 35 35.35 35.35 char35 35
|
||||
36 2017-10-01 2017-10-01T00:00 Beijing 36 36 true 36 36 36 36.36 36.36 char36 36
|
||||
37 2017-10-01 2017-10-01T00:00 Beijing 37 37 true 37 37 37 37.37 37.37 char37 37
|
||||
38 2017-10-01 2017-10-01T00:00 Beijing 38 38 true 38 38 38 38.38 38.38 char38 38
|
||||
39 2017-10-01 2017-10-01T00:00 Beijing 39 39 true 39 39 39 39.39 39.39 char39 39
|
||||
40 2017-10-01 2017-10-01T00:00 Beijing 40 40 true 40 40 40 40.4 40.4 char40 40
|
||||
41 2017-10-01 2017-10-01T00:00 Beijing 41 41 true 41 41 41 41.41 41.41 char41 41
|
||||
42 2017-10-01 2017-10-01T00:00 Beijing 42 42 true 42 42 42 42.42 42.42 char42 42
|
||||
43 2017-10-01 2017-10-01T00:00 Beijing 43 43 true 43 43 43 43.43 43.43 char43 43
|
||||
44 2017-10-01 2017-10-01T00:00 Beijing 44 44 true 44 44 44 44.44 44.44 char44 44
|
||||
45 2017-10-01 2017-10-01T00:00 Beijing 45 45 true 45 45 45 45.45 45.45 char45 45
|
||||
46 2017-10-01 2017-10-01T00:00 Beijing 46 46 true 46 46 46 46.46 46.46 char46 46
|
||||
47 2017-10-01 2017-10-01T00:00 Beijing 47 47 true 47 47 47 47.47 47.47 char47 47
|
||||
48 2017-10-01 2017-10-01T00:00 Beijing 48 48 true 48 48 48 48.48 48.48 char48 48
|
||||
49 2017-10-01 2017-10-01T00:00 Beijing 49 49 true 49 49 49 49.49 49.49 char49 49
|
||||
50 2017-10-01 2017-10-01T00:00 Beijing 50 50 true 50 50 50 50.5 50.5 char50 50
|
||||
51 2017-10-01 2017-10-01T00:00 Beijing 51 51 true 51 51 51 51.51 51.51 char51 51
|
||||
52 2017-10-01 2017-10-01T00:00 Beijing 52 52 true 52 52 52 52.52 52.52 char52 52
|
||||
53 2017-10-01 2017-10-01T00:00 Beijing 53 53 true 53 53 53 53.53 53.53 char53 53
|
||||
54 2017-10-01 2017-10-01T00:00 Beijing 54 54 true 54 54 54 54.54 54.54 char54 54
|
||||
55 2017-10-01 2017-10-01T00:00 Beijing 55 55 true 55 55 55 55.55 55.55 char55 55
|
||||
56 2017-10-01 2017-10-01T00:00 Beijing 56 56 true 56 56 56 56.56 56.56 char56 56
|
||||
57 2017-10-01 2017-10-01T00:00 Beijing 57 57 true 57 57 57 57.57 57.57 char57 57
|
||||
58 2017-10-01 2017-10-01T00:00 Beijing 58 58 true 58 58 58 58.58 58.58 char58 58
|
||||
59 2017-10-01 2017-10-01T00:00 Beijing 59 59 true 59 59 59 59.59 59.59 char59 59
|
||||
60 2017-10-01 2017-10-01T00:00 Beijing 60 60 true 60 60 60 60.6 60.6 char60 60
|
||||
61 2017-10-01 2017-10-01T00:00 Beijing 61 61 true 61 61 61 61.61 61.61 char61 61
|
||||
62 2017-10-01 2017-10-01T00:00 Beijing 62 62 true 62 62 62 62.62 62.62 char62 62
|
||||
63 2017-10-01 2017-10-01T00:00 Beijing 63 63 true 63 63 63 63.63 63.63 char63 63
|
||||
64 2017-10-01 2017-10-01T00:00 Beijing 64 64 true 64 64 64 64.64 64.64 char64 64
|
||||
65 2017-10-01 2017-10-01T00:00 Beijing 65 65 true 65 65 65 65.65 65.65 char65 65
|
||||
66 2017-10-01 2017-10-01T00:00 Beijing 66 66 true 66 66 66 66.66 66.66 char66 66
|
||||
67 2017-10-01 2017-10-01T00:00 Beijing 67 67 true 67 67 67 67.67 67.67 char67 67
|
||||
68 2017-10-01 2017-10-01T00:00 Beijing 68 68 true 68 68 68 68.68 68.68 char68 68
|
||||
69 2017-10-01 2017-10-01T00:00 Beijing 69 69 true 69 69 69 69.69 69.69 char69 69
|
||||
70 2017-10-01 2017-10-01T00:00 Beijing 70 70 true 70 70 70 70.7 70.7 char70 70
|
||||
71 2017-10-01 2017-10-01T00:00 Beijing 71 71 true 71 71 71 71.71 71.71 char71 71
|
||||
72 2017-10-01 2017-10-01T00:00 Beijing 72 72 true 72 72 72 72.72 72.72 char72 72
|
||||
73 2017-10-01 2017-10-01T00:00 Beijing 73 73 true 73 73 73 73.73 73.73 char73 73
|
||||
74 2017-10-01 2017-10-01T00:00 Beijing 74 74 true 74 74 74 74.74 74.74 char74 74
|
||||
75 2017-10-01 2017-10-01T00:00 Beijing 75 75 true 75 75 75 75.75 75.75 char75 75
|
||||
76 2017-10-01 2017-10-01T00:00 Beijing 76 76 true 76 76 76 76.76 76.76 char76 76
|
||||
77 2017-10-01 2017-10-01T00:00 Beijing 77 77 true 77 77 77 77.77 77.77 char77 77
|
||||
78 2017-10-01 2017-10-01T00:00 Beijing 78 78 true 78 78 78 78.78 78.78 char78 78
|
||||
79 2017-10-01 2017-10-01T00:00 Beijing 79 79 true 79 79 79 79.79 79.79 char79 79
|
||||
80 2017-10-01 2017-10-01T00:00 Beijing 80 80 true 80 80 80 80.8 80.8 char80 80
|
||||
81 2017-10-01 2017-10-01T00:00 Beijing 81 81 true 81 81 81 81.81 81.81 char81 81
|
||||
82 2017-10-01 2017-10-01T00:00 Beijing 82 82 true 82 82 82 82.82 82.82 char82 82
|
||||
83 2017-10-01 2017-10-01T00:00 Beijing 83 83 true 83 83 83 83.83 83.83 char83 83
|
||||
84 2017-10-01 2017-10-01T00:00 Beijing 84 84 true 84 84 84 84.84 84.84 char84 84
|
||||
85 2017-10-01 2017-10-01T00:00 Beijing 85 85 true 85 85 85 85.85 85.85 char85 85
|
||||
86 2017-10-01 2017-10-01T00:00 Beijing 86 86 true 86 86 86 86.86 86.86 char86 86
|
||||
87 2017-10-01 2017-10-01T00:00 Beijing 87 87 true 87 87 87 87.87 87.87 char87 87
|
||||
88 2017-10-01 2017-10-01T00:00 Beijing 88 88 true 88 88 88 88.88 88.88 char88 88
|
||||
89 2017-10-01 2017-10-01T00:00 Beijing 89 89 true 89 89 89 89.89 89.89 char89 89
|
||||
90 2017-10-01 2017-10-01T00:00 Beijing 90 90 true 90 90 90 90.9 90.9 char90 90
|
||||
91 2017-10-01 2017-10-01T00:00 Beijing 91 91 true 91 91 91 91.91 91.91 char91 91
|
||||
92 2017-10-01 2017-10-01T00:00 Beijing 92 92 true 92 92 92 92.92 92.92 char92 92
|
||||
93 2017-10-01 2017-10-01T00:00 Beijing 93 93 true 93 93 93 93.93 93.93 char93 93
|
||||
94 2017-10-01 2017-10-01T00:00 Beijing 94 94 true 94 94 94 94.94 94.94 char94 94
|
||||
95 2017-10-01 2017-10-01T00:00 Beijing 95 95 true 95 95 95 95.95 95.95 char95 95
|
||||
96 2017-10-01 2017-10-01T00:00 Beijing 96 96 true 96 96 96 96.96 96.96 char96 96
|
||||
97 2017-10-01 2017-10-01T00:00 Beijing 97 97 true 97 97 97 97.97 97.97 char97 97
|
||||
98 2017-10-01 2017-10-01T00:00 Beijing 98 98 true 98 98 98 98.98 98.98 char98 98
|
||||
99 2017-10-01 2017-10-01T00:00 Beijing 99 99 true 99 99 99 99.99 99.99 char99 99
|
||||
100 2017-10-01 2017-10-01T00:00 \N \N \N \N \N \N \N \N \N \N \N
|
||||
|
||||
-- !select_load1 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
11 2017-10-01 2017-10-01T00:00 Beijing 11 11 true 11 11 11 11.11 11.11 char11 11
|
||||
12 2017-10-01 2017-10-01T00:00 Beijing 12 12 true 12 12 12 12.12 12.12 char12 12
|
||||
13 2017-10-01 2017-10-01T00:00 Beijing 13 13 true 13 13 13 13.13 13.13 char13 13
|
||||
14 2017-10-01 2017-10-01T00:00 Beijing 14 14 true 14 14 14 14.14 14.14 char14 14
|
||||
15 2017-10-01 2017-10-01T00:00 Beijing 15 15 true 15 15 15 15.15 15.15 char15 15
|
||||
16 2017-10-01 2017-10-01T00:00 Beijing 16 16 true 16 16 16 16.16 16.16 char16 16
|
||||
17 2017-10-01 2017-10-01T00:00 Beijing 17 17 true 17 17 17 17.17 17.17 char17 17
|
||||
18 2017-10-01 2017-10-01T00:00 Beijing 18 18 true 18 18 18 18.18 18.18 char18 18
|
||||
19 2017-10-01 2017-10-01T00:00 Beijing 19 19 true 19 19 19 19.19 19.19 char19 19
|
||||
20 2017-10-01 2017-10-01T00:00 Beijing 20 20 true 20 20 20 20.2 20.2 char20 20
|
||||
21 2017-10-01 2017-10-01T00:00 Beijing 21 21 true 21 21 21 21.21 21.21 char21 21
|
||||
22 2017-10-01 2017-10-01T00:00 Beijing 22 22 true 22 22 22 22.22 22.22 char22 22
|
||||
23 2017-10-01 2017-10-01T00:00 Beijing 23 23 true 23 23 23 23.23 23.23 char23 23
|
||||
24 2017-10-01 2017-10-01T00:00 Beijing 24 24 true 24 24 24 24.24 24.24 char24 24
|
||||
25 2017-10-01 2017-10-01T00:00 Beijing 25 25 true 25 25 25 25.25 25.25 char25 25
|
||||
26 2017-10-01 2017-10-01T00:00 Beijing 26 26 true 26 26 26 26.26 26.26 char26 26
|
||||
27 2017-10-01 2017-10-01T00:00 Beijing 27 27 true 27 27 27 27.27 27.27 char27 27
|
||||
28 2017-10-01 2017-10-01T00:00 Beijing 28 28 true 28 28 28 28.28 28.28 char28 28
|
||||
29 2017-10-01 2017-10-01T00:00 Beijing 29 29 true 29 29 29 29.29 29.29 char29 29
|
||||
30 2017-10-01 2017-10-01T00:00 Beijing 30 30 true 30 30 30 30.3 30.3 char30 30
|
||||
31 2017-10-01 2017-10-01T00:00 Beijing 31 31 true 31 31 31 31.31 31.31 char31 31
|
||||
32 2017-10-01 2017-10-01T00:00 Beijing 32 32 true 32 32 32 32.32 32.32 char32 32
|
||||
33 2017-10-01 2017-10-01T00:00 Beijing 33 33 true 33 33 33 33.33 33.33 char33 33
|
||||
34 2017-10-01 2017-10-01T00:00 Beijing 34 34 true 34 34 34 34.34 34.34 char34 34
|
||||
35 2017-10-01 2017-10-01T00:00 Beijing 35 35 true 35 35 35 35.35 35.35 char35 35
|
||||
36 2017-10-01 2017-10-01T00:00 Beijing 36 36 true 36 36 36 36.36 36.36 char36 36
|
||||
37 2017-10-01 2017-10-01T00:00 Beijing 37 37 true 37 37 37 37.37 37.37 char37 37
|
||||
38 2017-10-01 2017-10-01T00:00 Beijing 38 38 true 38 38 38 38.38 38.38 char38 38
|
||||
39 2017-10-01 2017-10-01T00:00 Beijing 39 39 true 39 39 39 39.39 39.39 char39 39
|
||||
40 2017-10-01 2017-10-01T00:00 Beijing 40 40 true 40 40 40 40.4 40.4 char40 40
|
||||
41 2017-10-01 2017-10-01T00:00 Beijing 41 41 true 41 41 41 41.41 41.41 char41 41
|
||||
42 2017-10-01 2017-10-01T00:00 Beijing 42 42 true 42 42 42 42.42 42.42 char42 42
|
||||
43 2017-10-01 2017-10-01T00:00 Beijing 43 43 true 43 43 43 43.43 43.43 char43 43
|
||||
44 2017-10-01 2017-10-01T00:00 Beijing 44 44 true 44 44 44 44.44 44.44 char44 44
|
||||
45 2017-10-01 2017-10-01T00:00 Beijing 45 45 true 45 45 45 45.45 45.45 char45 45
|
||||
46 2017-10-01 2017-10-01T00:00 Beijing 46 46 true 46 46 46 46.46 46.46 char46 46
|
||||
47 2017-10-01 2017-10-01T00:00 Beijing 47 47 true 47 47 47 47.47 47.47 char47 47
|
||||
48 2017-10-01 2017-10-01T00:00 Beijing 48 48 true 48 48 48 48.48 48.48 char48 48
|
||||
49 2017-10-01 2017-10-01T00:00 Beijing 49 49 true 49 49 49 49.49 49.49 char49 49
|
||||
50 2017-10-01 2017-10-01T00:00 Beijing 50 50 true 50 50 50 50.5 50.5 char50 50
|
||||
51 2017-10-01 2017-10-01T00:00 Beijing 51 51 true 51 51 51 51.51 51.51 char51 51
|
||||
52 2017-10-01 2017-10-01T00:00 Beijing 52 52 true 52 52 52 52.52 52.52 char52 52
|
||||
53 2017-10-01 2017-10-01T00:00 Beijing 53 53 true 53 53 53 53.53 53.53 char53 53
|
||||
54 2017-10-01 2017-10-01T00:00 Beijing 54 54 true 54 54 54 54.54 54.54 char54 54
|
||||
55 2017-10-01 2017-10-01T00:00 Beijing 55 55 true 55 55 55 55.55 55.55 char55 55
|
||||
56 2017-10-01 2017-10-01T00:00 Beijing 56 56 true 56 56 56 56.56 56.56 char56 56
|
||||
57 2017-10-01 2017-10-01T00:00 Beijing 57 57 true 57 57 57 57.57 57.57 char57 57
|
||||
58 2017-10-01 2017-10-01T00:00 Beijing 58 58 true 58 58 58 58.58 58.58 char58 58
|
||||
59 2017-10-01 2017-10-01T00:00 Beijing 59 59 true 59 59 59 59.59 59.59 char59 59
|
||||
60 2017-10-01 2017-10-01T00:00 Beijing 60 60 true 60 60 60 60.6 60.6 char60 60
|
||||
61 2017-10-01 2017-10-01T00:00 Beijing 61 61 true 61 61 61 61.61 61.61 char61 61
|
||||
62 2017-10-01 2017-10-01T00:00 Beijing 62 62 true 62 62 62 62.62 62.62 char62 62
|
||||
63 2017-10-01 2017-10-01T00:00 Beijing 63 63 true 63 63 63 63.63 63.63 char63 63
|
||||
64 2017-10-01 2017-10-01T00:00 Beijing 64 64 true 64 64 64 64.64 64.64 char64 64
|
||||
65 2017-10-01 2017-10-01T00:00 Beijing 65 65 true 65 65 65 65.65 65.65 char65 65
|
||||
66 2017-10-01 2017-10-01T00:00 Beijing 66 66 true 66 66 66 66.66 66.66 char66 66
|
||||
67 2017-10-01 2017-10-01T00:00 Beijing 67 67 true 67 67 67 67.67 67.67 char67 67
|
||||
68 2017-10-01 2017-10-01T00:00 Beijing 68 68 true 68 68 68 68.68 68.68 char68 68
|
||||
69 2017-10-01 2017-10-01T00:00 Beijing 69 69 true 69 69 69 69.69 69.69 char69 69
|
||||
70 2017-10-01 2017-10-01T00:00 Beijing 70 70 true 70 70 70 70.7 70.7 char70 70
|
||||
71 2017-10-01 2017-10-01T00:00 Beijing 71 71 true 71 71 71 71.71 71.71 char71 71
|
||||
72 2017-10-01 2017-10-01T00:00 Beijing 72 72 true 72 72 72 72.72 72.72 char72 72
|
||||
73 2017-10-01 2017-10-01T00:00 Beijing 73 73 true 73 73 73 73.73 73.73 char73 73
|
||||
74 2017-10-01 2017-10-01T00:00 Beijing 74 74 true 74 74 74 74.74 74.74 char74 74
|
||||
75 2017-10-01 2017-10-01T00:00 Beijing 75 75 true 75 75 75 75.75 75.75 char75 75
|
||||
76 2017-10-01 2017-10-01T00:00 Beijing 76 76 true 76 76 76 76.76 76.76 char76 76
|
||||
77 2017-10-01 2017-10-01T00:00 Beijing 77 77 true 77 77 77 77.77 77.77 char77 77
|
||||
78 2017-10-01 2017-10-01T00:00 Beijing 78 78 true 78 78 78 78.78 78.78 char78 78
|
||||
79 2017-10-01 2017-10-01T00:00 Beijing 79 79 true 79 79 79 79.79 79.79 char79 79
|
||||
80 2017-10-01 2017-10-01T00:00 Beijing 80 80 true 80 80 80 80.8 80.8 char80 80
|
||||
81 2017-10-01 2017-10-01T00:00 Beijing 81 81 true 81 81 81 81.81 81.81 char81 81
|
||||
82 2017-10-01 2017-10-01T00:00 Beijing 82 82 true 82 82 82 82.82 82.82 char82 82
|
||||
83 2017-10-01 2017-10-01T00:00 Beijing 83 83 true 83 83 83 83.83 83.83 char83 83
|
||||
84 2017-10-01 2017-10-01T00:00 Beijing 84 84 true 84 84 84 84.84 84.84 char84 84
|
||||
85 2017-10-01 2017-10-01T00:00 Beijing 85 85 true 85 85 85 85.85 85.85 char85 85
|
||||
86 2017-10-01 2017-10-01T00:00 Beijing 86 86 true 86 86 86 86.86 86.86 char86 86
|
||||
87 2017-10-01 2017-10-01T00:00 Beijing 87 87 true 87 87 87 87.87 87.87 char87 87
|
||||
88 2017-10-01 2017-10-01T00:00 Beijing 88 88 true 88 88 88 88.88 88.88 char88 88
|
||||
89 2017-10-01 2017-10-01T00:00 Beijing 89 89 true 89 89 89 89.89 89.89 char89 89
|
||||
90 2017-10-01 2017-10-01T00:00 Beijing 90 90 true 90 90 90 90.9 90.9 char90 90
|
||||
91 2017-10-01 2017-10-01T00:00 Beijing 91 91 true 91 91 91 91.91 91.91 char91 91
|
||||
92 2017-10-01 2017-10-01T00:00 Beijing 92 92 true 92 92 92 92.92 92.92 char92 92
|
||||
93 2017-10-01 2017-10-01T00:00 Beijing 93 93 true 93 93 93 93.93 93.93 char93 93
|
||||
94 2017-10-01 2017-10-01T00:00 Beijing 94 94 true 94 94 94 94.94 94.94 char94 94
|
||||
95 2017-10-01 2017-10-01T00:00 Beijing 95 95 true 95 95 95 95.95 95.95 char95 95
|
||||
96 2017-10-01 2017-10-01T00:00 Beijing 96 96 true 96 96 96 96.96 96.96 char96 96
|
||||
97 2017-10-01 2017-10-01T00:00 Beijing 97 97 true 97 97 97 97.97 97.97 char97 97
|
||||
98 2017-10-01 2017-10-01T00:00 Beijing 98 98 true 98 98 98 98.98 98.98 char98 98
|
||||
99 2017-10-01 2017-10-01T00:00 Beijing 99 99 true 99 99 99 99.99 99.99 char99 99
|
||||
100 2017-10-01 2017-10-01T00:00 \N \N \N \N \N \N \N \N \N \N \N
|
||||
|
||||
-- !select_load2 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
|
||||
-- !select_load3 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
|
||||
-- !select_load4 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
|
||||
36
regression-test/data/export_p0/test_export_data_types.out
Normal file
36
regression-test/data/export_p0/test_export_data_types.out
Normal file
@ -0,0 +1,36 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_export1 --
|
||||
1 2023-04-20 2023-04-20 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 1 1 true 1 1 1 1.1 1.1 char1 1 1 1 0.1 1.00000000 1.0000000000 1 1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
|
||||
2 9999-12-31 9999-12-31 9999-12-31T23:59:59 9999-12-31T23:59:59 2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400 Haidian -32768 -128 true -2147483648 -9223372036854775808 2 1.4E-45 4.9E-324 char2 100000000 100000000 4 0.1 0.99999999 9999999999.9999999999 99999999999999999999999999999999999999 9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
|
||||
3 2023-04-21 2023-04-21 2023-04-20T12:34:56 2023-04-20T00:00 2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456 Beijing 32767 127 true 2147483647 9223372036854775807 3 3.4028235e+38 1.7976931348623157E308 char3 999999999 999999999 9 0.9 9.99999999 1234567890.0123456789 12345678901234567890123456789012345678 1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
|
||||
4 0000-01-01 0000-01-01 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 4 4 true 4 4 4 4.4 4.4 char4 4 4 4 0.4 4.00000000 4.0000000000 4 4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
|
||||
|
||||
-- !select_load1 --
|
||||
1 2023-04-20 2023-04-20 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 1 1 true 1 1 1 1.1 1.1 char1 1 1 1 0.1 1.00000000 1.0000000000 1 1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
|
||||
2 9999-12-31 9999-12-31 9999-12-31T23:59:59 9999-12-31T23:59:59 2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400 Haidian -32768 -128 true -2147483648 -9223372036854775808 2 1.4E-45 4.9E-324 char2 100000000 100000000 4 0.1 0.99999999 9999999999.9999999999 99999999999999999999999999999999999999 9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
|
||||
3 2023-04-21 2023-04-21 2023-04-20T12:34:56 2023-04-20T00:00 2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456 Beijing 32767 127 true 2147483647 9223372036854775807 3 3.4028235e+38 1.7976931348623157E308 char3 999999999 999999999 9 0.9 9.99999999 1234567890.0123456789 12345678901234567890123456789012345678 1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
|
||||
4 0000-01-01 0000-01-01 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 4 4 true 4 4 4 4.4 4.4 char4 4 4 4 0.4 4.00000000 4.0000000000 4 4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
|
||||
|
||||
-- !select_load2 --
|
||||
1 2023-04-20 2023-04-20 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 1 1 true 1 1 1.1 1.1 char1 1 1 1 0.1 1.00000000 1.0000000000 1 1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
|
||||
2 9999-12-31 9999-12-31 9999-12-31T23:59:59 9999-12-31T23:59:59 2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400 Haidian -32768 -128 true -2147483648 -9223372036854775808 1.4E-45 4.9E-324 char2 100000000 100000000 4 0.1 0.99999999 9999999999.9999999999 99999999999999999999999999999999999999 9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
|
||||
3 2023-04-21 2023-04-21 2023-04-20T12:34:56 2023-04-20T00:00 2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456 Beijing 32767 127 true 2147483647 9223372036854775807 3.4028235e+38 1.7976931348623157E308 char3 999999999 999999999 9 0.9 9.99999999 1234567890.0123456789 12345678901234567890123456789012345678 1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
|
||||
|
||||
-- !select_load3 --
|
||||
1 2023-04-20 Beijing Haidian 1 1 true 1 1 1.1 1.1 char1 1
|
||||
2 9999-12-31 Haidian -32768 -128 true -2147483648 -9223372036854775808 1.4E-45 4.9E-324 char2 100000000
|
||||
3 2023-04-21 Beijing 32767 127 true 2147483647 9223372036854775807 3.4028235e+38 1.7976931348623157E308 char3 999999999
|
||||
4 0000-01-01 Beijing Haidian 4 4 true 4 4 4.4 4.4 char4 4
|
||||
|
||||
-- !select_load4 --
|
||||
1 2023-04-20 2023-04-20 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 1 1 true 1 1 1 1.1 1.1 char1 1 1 1 0.1 1.00000000 1.0000000000 1 1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
|
||||
2 9999-12-31 9999-12-31 9999-12-31T23:59:59 9999-12-31T23:59:59 2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400 Haidian -32768 -128 true -2147483648 -9223372036854775808 2 1.4E-45 4.9E-324 char2 100000000 100000000 4 0.1 0.99999999 9999999999.9999999999 99999999999999999999999999999999999999 9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
|
||||
3 2023-04-21 2023-04-21 2023-04-20T12:34:56 2023-04-20T00:00 2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456 Beijing 32767 127 true 2147483647 9223372036854775807 3 3.4028235e+38 1.7976931348623157E308 char3 999999999 999999999 9 0.9 9.99999999 1234567890.0123456789 12345678901234567890123456789012345678 1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
|
||||
4 0000-01-01 0000-01-01 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 4 4 true 4 4 4 4.4 4.4 char4 4 4 4 0.4 4.00000000 4.0000000000 4 4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
|
||||
|
||||
-- !select_load5 --
|
||||
1 2023-04-20 2023-04-20 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 1 1 true 1 1 1 1.1 1.1 char1 1 1 1 0.1 1.00000000 1.0000000000 1 1.0000000000000000000000000000000000000 0.10000000000000000000000000000000000000
|
||||
2 9999-12-31 9999-12-31 9999-12-31T23:59:59 9999-12-31T23:59:59 2023-04-20T00:00:00.120 2023-04-20T00:00:00.334400 Haidian -32768 -128 true -2147483648 -9223372036854775808 2 1.4E-45 4.9E-324 char2 100000000 100000000 4 0.1 0.99999999 9999999999.9999999999 99999999999999999999999999999999999999 9.9999999999999999999999999999999999999 0.99999999999999999999999999999999999999
|
||||
3 2023-04-21 2023-04-21 2023-04-20T12:34:56 2023-04-20T00:00 2023-04-20T00:00:00.123 2023-04-20T00:00:00.123456 Beijing 32767 127 true 2147483647 9223372036854775807 3 3.4028235e+38 1.7976931348623157E308 char3 999999999 999999999 9 0.9 9.99999999 1234567890.0123456789 12345678901234567890123456789012345678 1.2345678901234567890123456789012345678 0.12345678901234567890123456789012345678
|
||||
4 0000-01-01 0000-01-01 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 2023-04-20T00:00 Beijing Haidian 4 4 true 4 4 4 4.4 4.4 char4 4 4 4 0.4 4.00000000 4.0000000000 4 4.0000000000000000000000000000000000000 0.40000000000000000000000000000000000000
|
||||
|
||||
13
regression-test/data/export_p0/test_export_empty_table.out
Normal file
13
regression-test/data/export_p0/test_export_empty_table.out
Normal file
@ -0,0 +1,13 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_export1 --
|
||||
|
||||
-- !select_load1 --
|
||||
|
||||
-- !select_load2 --
|
||||
|
||||
-- !select_load3 --
|
||||
|
||||
-- !select_load4 --
|
||||
|
||||
-- !select_load5 --
|
||||
|
||||
205
regression-test/data/export_p0/test_export_orc.out
Normal file
205
regression-test/data/export_p0/test_export_orc.out
Normal file
@ -0,0 +1,205 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_export1 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
11 2017-10-01 2017-10-01T00:00 Beijing 11 11 true 11 11 11 11.11 11.11 char11 11
|
||||
12 2017-10-01 2017-10-01T00:00 Beijing 12 12 true 12 12 12 12.12 12.12 char12 12
|
||||
13 2017-10-01 2017-10-01T00:00 Beijing 13 13 true 13 13 13 13.13 13.13 char13 13
|
||||
14 2017-10-01 2017-10-01T00:00 Beijing 14 14 true 14 14 14 14.14 14.14 char14 14
|
||||
15 2017-10-01 2017-10-01T00:00 Beijing 15 15 true 15 15 15 15.15 15.15 char15 15
|
||||
16 2017-10-01 2017-10-01T00:00 Beijing 16 16 true 16 16 16 16.16 16.16 char16 16
|
||||
17 2017-10-01 2017-10-01T00:00 Beijing 17 17 true 17 17 17 17.17 17.17 char17 17
|
||||
18 2017-10-01 2017-10-01T00:00 Beijing 18 18 true 18 18 18 18.18 18.18 char18 18
|
||||
19 2017-10-01 2017-10-01T00:00 Beijing 19 19 true 19 19 19 19.19 19.19 char19 19
|
||||
20 2017-10-01 2017-10-01T00:00 Beijing 20 20 true 20 20 20 20.2 20.2 char20 20
|
||||
21 2017-10-01 2017-10-01T00:00 Beijing 21 21 true 21 21 21 21.21 21.21 char21 21
|
||||
22 2017-10-01 2017-10-01T00:00 Beijing 22 22 true 22 22 22 22.22 22.22 char22 22
|
||||
23 2017-10-01 2017-10-01T00:00 Beijing 23 23 true 23 23 23 23.23 23.23 char23 23
|
||||
24 2017-10-01 2017-10-01T00:00 Beijing 24 24 true 24 24 24 24.24 24.24 char24 24
|
||||
25 2017-10-01 2017-10-01T00:00 Beijing 25 25 true 25 25 25 25.25 25.25 char25 25
|
||||
26 2017-10-01 2017-10-01T00:00 Beijing 26 26 true 26 26 26 26.26 26.26 char26 26
|
||||
27 2017-10-01 2017-10-01T00:00 Beijing 27 27 true 27 27 27 27.27 27.27 char27 27
|
||||
28 2017-10-01 2017-10-01T00:00 Beijing 28 28 true 28 28 28 28.28 28.28 char28 28
|
||||
29 2017-10-01 2017-10-01T00:00 Beijing 29 29 true 29 29 29 29.29 29.29 char29 29
|
||||
30 2017-10-01 2017-10-01T00:00 Beijing 30 30 true 30 30 30 30.3 30.3 char30 30
|
||||
31 2017-10-01 2017-10-01T00:00 Beijing 31 31 true 31 31 31 31.31 31.31 char31 31
|
||||
32 2017-10-01 2017-10-01T00:00 Beijing 32 32 true 32 32 32 32.32 32.32 char32 32
|
||||
33 2017-10-01 2017-10-01T00:00 Beijing 33 33 true 33 33 33 33.33 33.33 char33 33
|
||||
34 2017-10-01 2017-10-01T00:00 Beijing 34 34 true 34 34 34 34.34 34.34 char34 34
|
||||
35 2017-10-01 2017-10-01T00:00 Beijing 35 35 true 35 35 35 35.35 35.35 char35 35
|
||||
36 2017-10-01 2017-10-01T00:00 Beijing 36 36 true 36 36 36 36.36 36.36 char36 36
|
||||
37 2017-10-01 2017-10-01T00:00 Beijing 37 37 true 37 37 37 37.37 37.37 char37 37
|
||||
38 2017-10-01 2017-10-01T00:00 Beijing 38 38 true 38 38 38 38.38 38.38 char38 38
|
||||
39 2017-10-01 2017-10-01T00:00 Beijing 39 39 true 39 39 39 39.39 39.39 char39 39
|
||||
40 2017-10-01 2017-10-01T00:00 Beijing 40 40 true 40 40 40 40.4 40.4 char40 40
|
||||
41 2017-10-01 2017-10-01T00:00 Beijing 41 41 true 41 41 41 41.41 41.41 char41 41
|
||||
42 2017-10-01 2017-10-01T00:00 Beijing 42 42 true 42 42 42 42.42 42.42 char42 42
|
||||
43 2017-10-01 2017-10-01T00:00 Beijing 43 43 true 43 43 43 43.43 43.43 char43 43
|
||||
44 2017-10-01 2017-10-01T00:00 Beijing 44 44 true 44 44 44 44.44 44.44 char44 44
|
||||
45 2017-10-01 2017-10-01T00:00 Beijing 45 45 true 45 45 45 45.45 45.45 char45 45
|
||||
46 2017-10-01 2017-10-01T00:00 Beijing 46 46 true 46 46 46 46.46 46.46 char46 46
|
||||
47 2017-10-01 2017-10-01T00:00 Beijing 47 47 true 47 47 47 47.47 47.47 char47 47
|
||||
48 2017-10-01 2017-10-01T00:00 Beijing 48 48 true 48 48 48 48.48 48.48 char48 48
|
||||
49 2017-10-01 2017-10-01T00:00 Beijing 49 49 true 49 49 49 49.49 49.49 char49 49
|
||||
50 2017-10-01 2017-10-01T00:00 Beijing 50 50 true 50 50 50 50.5 50.5 char50 50
|
||||
51 2017-10-01 2017-10-01T00:00 Beijing 51 51 true 51 51 51 51.51 51.51 char51 51
|
||||
52 2017-10-01 2017-10-01T00:00 Beijing 52 52 true 52 52 52 52.52 52.52 char52 52
|
||||
53 2017-10-01 2017-10-01T00:00 Beijing 53 53 true 53 53 53 53.53 53.53 char53 53
|
||||
54 2017-10-01 2017-10-01T00:00 Beijing 54 54 true 54 54 54 54.54 54.54 char54 54
|
||||
55 2017-10-01 2017-10-01T00:00 Beijing 55 55 true 55 55 55 55.55 55.55 char55 55
|
||||
56 2017-10-01 2017-10-01T00:00 Beijing 56 56 true 56 56 56 56.56 56.56 char56 56
|
||||
57 2017-10-01 2017-10-01T00:00 Beijing 57 57 true 57 57 57 57.57 57.57 char57 57
|
||||
58 2017-10-01 2017-10-01T00:00 Beijing 58 58 true 58 58 58 58.58 58.58 char58 58
|
||||
59 2017-10-01 2017-10-01T00:00 Beijing 59 59 true 59 59 59 59.59 59.59 char59 59
|
||||
60 2017-10-01 2017-10-01T00:00 Beijing 60 60 true 60 60 60 60.6 60.6 char60 60
|
||||
61 2017-10-01 2017-10-01T00:00 Beijing 61 61 true 61 61 61 61.61 61.61 char61 61
|
||||
62 2017-10-01 2017-10-01T00:00 Beijing 62 62 true 62 62 62 62.62 62.62 char62 62
|
||||
63 2017-10-01 2017-10-01T00:00 Beijing 63 63 true 63 63 63 63.63 63.63 char63 63
|
||||
64 2017-10-01 2017-10-01T00:00 Beijing 64 64 true 64 64 64 64.64 64.64 char64 64
|
||||
65 2017-10-01 2017-10-01T00:00 Beijing 65 65 true 65 65 65 65.65 65.65 char65 65
|
||||
66 2017-10-01 2017-10-01T00:00 Beijing 66 66 true 66 66 66 66.66 66.66 char66 66
|
||||
67 2017-10-01 2017-10-01T00:00 Beijing 67 67 true 67 67 67 67.67 67.67 char67 67
|
||||
68 2017-10-01 2017-10-01T00:00 Beijing 68 68 true 68 68 68 68.68 68.68 char68 68
|
||||
69 2017-10-01 2017-10-01T00:00 Beijing 69 69 true 69 69 69 69.69 69.69 char69 69
|
||||
70 2017-10-01 2017-10-01T00:00 Beijing 70 70 true 70 70 70 70.7 70.7 char70 70
|
||||
71 2017-10-01 2017-10-01T00:00 Beijing 71 71 true 71 71 71 71.71 71.71 char71 71
|
||||
72 2017-10-01 2017-10-01T00:00 Beijing 72 72 true 72 72 72 72.72 72.72 char72 72
|
||||
73 2017-10-01 2017-10-01T00:00 Beijing 73 73 true 73 73 73 73.73 73.73 char73 73
|
||||
74 2017-10-01 2017-10-01T00:00 Beijing 74 74 true 74 74 74 74.74 74.74 char74 74
|
||||
75 2017-10-01 2017-10-01T00:00 Beijing 75 75 true 75 75 75 75.75 75.75 char75 75
|
||||
76 2017-10-01 2017-10-01T00:00 Beijing 76 76 true 76 76 76 76.76 76.76 char76 76
|
||||
77 2017-10-01 2017-10-01T00:00 Beijing 77 77 true 77 77 77 77.77 77.77 char77 77
|
||||
78 2017-10-01 2017-10-01T00:00 Beijing 78 78 true 78 78 78 78.78 78.78 char78 78
|
||||
79 2017-10-01 2017-10-01T00:00 Beijing 79 79 true 79 79 79 79.79 79.79 char79 79
|
||||
80 2017-10-01 2017-10-01T00:00 Beijing 80 80 true 80 80 80 80.8 80.8 char80 80
|
||||
81 2017-10-01 2017-10-01T00:00 Beijing 81 81 true 81 81 81 81.81 81.81 char81 81
|
||||
82 2017-10-01 2017-10-01T00:00 Beijing 82 82 true 82 82 82 82.82 82.82 char82 82
|
||||
83 2017-10-01 2017-10-01T00:00 Beijing 83 83 true 83 83 83 83.83 83.83 char83 83
|
||||
84 2017-10-01 2017-10-01T00:00 Beijing 84 84 true 84 84 84 84.84 84.84 char84 84
|
||||
85 2017-10-01 2017-10-01T00:00 Beijing 85 85 true 85 85 85 85.85 85.85 char85 85
|
||||
86 2017-10-01 2017-10-01T00:00 Beijing 86 86 true 86 86 86 86.86 86.86 char86 86
|
||||
87 2017-10-01 2017-10-01T00:00 Beijing 87 87 true 87 87 87 87.87 87.87 char87 87
|
||||
88 2017-10-01 2017-10-01T00:00 Beijing 88 88 true 88 88 88 88.88 88.88 char88 88
|
||||
89 2017-10-01 2017-10-01T00:00 Beijing 89 89 true 89 89 89 89.89 89.89 char89 89
|
||||
90 2017-10-01 2017-10-01T00:00 Beijing 90 90 true 90 90 90 90.9 90.9 char90 90
|
||||
91 2017-10-01 2017-10-01T00:00 Beijing 91 91 true 91 91 91 91.91 91.91 char91 91
|
||||
92 2017-10-01 2017-10-01T00:00 Beijing 92 92 true 92 92 92 92.92 92.92 char92 92
|
||||
93 2017-10-01 2017-10-01T00:00 Beijing 93 93 true 93 93 93 93.93 93.93 char93 93
|
||||
94 2017-10-01 2017-10-01T00:00 Beijing 94 94 true 94 94 94 94.94 94.94 char94 94
|
||||
95 2017-10-01 2017-10-01T00:00 Beijing 95 95 true 95 95 95 95.95 95.95 char95 95
|
||||
96 2017-10-01 2017-10-01T00:00 Beijing 96 96 true 96 96 96 96.96 96.96 char96 96
|
||||
97 2017-10-01 2017-10-01T00:00 Beijing 97 97 true 97 97 97 97.97 97.97 char97 97
|
||||
98 2017-10-01 2017-10-01T00:00 Beijing 98 98 true 98 98 98 98.98 98.98 char98 98
|
||||
99 2017-10-01 2017-10-01T00:00 Beijing 99 99 true 99 99 99 99.99 99.99 char99 99
|
||||
100 2017-10-01 2017-10-01T00:00 \N \N \N \N \N \N \N \N \N \N \N
|
||||
|
||||
-- !select_load1 --
|
||||
1 Beijing 1 1 true 1 1 1.1 1.1 char1 1
|
||||
2 Beijing 2 2 true 2 2 2.2 2.2 char2 2
|
||||
3 Beijing 3 3 true 3 3 3.3 3.3 char3 3
|
||||
4 Beijing 4 4 true 4 4 4.4 4.4 char4 4
|
||||
5 Beijing 5 5 true 5 5 5.5 5.5 char5 5
|
||||
6 Beijing 6 6 true 6 6 6.6 6.6 char6 6
|
||||
7 Beijing 7 7 true 7 7 7.7 7.7 char7 7
|
||||
8 Beijing 8 8 true 8 8 8.8 8.8 char8 8
|
||||
9 Beijing 9 9 true 9 9 9.9 9.9 char9 9
|
||||
10 Beijing 10 10 true 10 10 10.1 10.1 char10 10
|
||||
11 Beijing 11 11 true 11 11 11.11 11.11 char11 11
|
||||
12 Beijing 12 12 true 12 12 12.12 12.12 char12 12
|
||||
13 Beijing 13 13 true 13 13 13.13 13.13 char13 13
|
||||
14 Beijing 14 14 true 14 14 14.14 14.14 char14 14
|
||||
15 Beijing 15 15 true 15 15 15.15 15.15 char15 15
|
||||
16 Beijing 16 16 true 16 16 16.16 16.16 char16 16
|
||||
17 Beijing 17 17 true 17 17 17.17 17.17 char17 17
|
||||
18 Beijing 18 18 true 18 18 18.18 18.18 char18 18
|
||||
19 Beijing 19 19 true 19 19 19.19 19.19 char19 19
|
||||
20 Beijing 20 20 true 20 20 20.2 20.2 char20 20
|
||||
21 Beijing 21 21 true 21 21 21.21 21.21 char21 21
|
||||
22 Beijing 22 22 true 22 22 22.22 22.22 char22 22
|
||||
23 Beijing 23 23 true 23 23 23.23 23.23 char23 23
|
||||
24 Beijing 24 24 true 24 24 24.24 24.24 char24 24
|
||||
25 Beijing 25 25 true 25 25 25.25 25.25 char25 25
|
||||
26 Beijing 26 26 true 26 26 26.26 26.26 char26 26
|
||||
27 Beijing 27 27 true 27 27 27.27 27.27 char27 27
|
||||
28 Beijing 28 28 true 28 28 28.28 28.28 char28 28
|
||||
29 Beijing 29 29 true 29 29 29.29 29.29 char29 29
|
||||
30 Beijing 30 30 true 30 30 30.3 30.3 char30 30
|
||||
31 Beijing 31 31 true 31 31 31.31 31.31 char31 31
|
||||
32 Beijing 32 32 true 32 32 32.32 32.32 char32 32
|
||||
33 Beijing 33 33 true 33 33 33.33 33.33 char33 33
|
||||
34 Beijing 34 34 true 34 34 34.34 34.34 char34 34
|
||||
35 Beijing 35 35 true 35 35 35.35 35.35 char35 35
|
||||
36 Beijing 36 36 true 36 36 36.36 36.36 char36 36
|
||||
37 Beijing 37 37 true 37 37 37.37 37.37 char37 37
|
||||
38 Beijing 38 38 true 38 38 38.38 38.38 char38 38
|
||||
39 Beijing 39 39 true 39 39 39.39 39.39 char39 39
|
||||
40 Beijing 40 40 true 40 40 40.4 40.4 char40 40
|
||||
41 Beijing 41 41 true 41 41 41.41 41.41 char41 41
|
||||
42 Beijing 42 42 true 42 42 42.42 42.42 char42 42
|
||||
43 Beijing 43 43 true 43 43 43.43 43.43 char43 43
|
||||
44 Beijing 44 44 true 44 44 44.44 44.44 char44 44
|
||||
45 Beijing 45 45 true 45 45 45.45 45.45 char45 45
|
||||
46 Beijing 46 46 true 46 46 46.46 46.46 char46 46
|
||||
47 Beijing 47 47 true 47 47 47.47 47.47 char47 47
|
||||
48 Beijing 48 48 true 48 48 48.48 48.48 char48 48
|
||||
49 Beijing 49 49 true 49 49 49.49 49.49 char49 49
|
||||
50 Beijing 50 50 true 50 50 50.5 50.5 char50 50
|
||||
51 Beijing 51 51 true 51 51 51.51 51.51 char51 51
|
||||
52 Beijing 52 52 true 52 52 52.52 52.52 char52 52
|
||||
53 Beijing 53 53 true 53 53 53.53 53.53 char53 53
|
||||
54 Beijing 54 54 true 54 54 54.54 54.54 char54 54
|
||||
55 Beijing 55 55 true 55 55 55.55 55.55 char55 55
|
||||
56 Beijing 56 56 true 56 56 56.56 56.56 char56 56
|
||||
57 Beijing 57 57 true 57 57 57.57 57.57 char57 57
|
||||
58 Beijing 58 58 true 58 58 58.58 58.58 char58 58
|
||||
59 Beijing 59 59 true 59 59 59.59 59.59 char59 59
|
||||
60 Beijing 60 60 true 60 60 60.6 60.6 char60 60
|
||||
61 Beijing 61 61 true 61 61 61.61 61.61 char61 61
|
||||
62 Beijing 62 62 true 62 62 62.62 62.62 char62 62
|
||||
63 Beijing 63 63 true 63 63 63.63 63.63 char63 63
|
||||
64 Beijing 64 64 true 64 64 64.64 64.64 char64 64
|
||||
65 Beijing 65 65 true 65 65 65.65 65.65 char65 65
|
||||
66 Beijing 66 66 true 66 66 66.66 66.66 char66 66
|
||||
67 Beijing 67 67 true 67 67 67.67 67.67 char67 67
|
||||
68 Beijing 68 68 true 68 68 68.68 68.68 char68 68
|
||||
69 Beijing 69 69 true 69 69 69.69 69.69 char69 69
|
||||
70 Beijing 70 70 true 70 70 70.7 70.7 char70 70
|
||||
71 Beijing 71 71 true 71 71 71.71 71.71 char71 71
|
||||
72 Beijing 72 72 true 72 72 72.72 72.72 char72 72
|
||||
73 Beijing 73 73 true 73 73 73.73 73.73 char73 73
|
||||
74 Beijing 74 74 true 74 74 74.74 74.74 char74 74
|
||||
75 Beijing 75 75 true 75 75 75.75 75.75 char75 75
|
||||
76 Beijing 76 76 true 76 76 76.76 76.76 char76 76
|
||||
77 Beijing 77 77 true 77 77 77.77 77.77 char77 77
|
||||
78 Beijing 78 78 true 78 78 78.78 78.78 char78 78
|
||||
79 Beijing 79 79 true 79 79 79.79 79.79 char79 79
|
||||
80 Beijing 80 80 true 80 80 80.8 80.8 char80 80
|
||||
81 Beijing 81 81 true 81 81 81.81 81.81 char81 81
|
||||
82 Beijing 82 82 true 82 82 82.82 82.82 char82 82
|
||||
83 Beijing 83 83 true 83 83 83.83 83.83 char83 83
|
||||
84 Beijing 84 84 true 84 84 84.84 84.84 char84 84
|
||||
85 Beijing 85 85 true 85 85 85.85 85.85 char85 85
|
||||
86 Beijing 86 86 true 86 86 86.86 86.86 char86 86
|
||||
87 Beijing 87 87 true 87 87 87.87 87.87 char87 87
|
||||
88 Beijing 88 88 true 88 88 88.88 88.88 char88 88
|
||||
89 Beijing 89 89 true 89 89 89.89 89.89 char89 89
|
||||
90 Beijing 90 90 true 90 90 90.9 90.9 char90 90
|
||||
91 Beijing 91 91 true 91 91 91.91 91.91 char91 91
|
||||
92 Beijing 92 92 true 92 92 92.92 92.92 char92 92
|
||||
93 Beijing 93 93 true 93 93 93.93 93.93 char93 93
|
||||
94 Beijing 94 94 true 94 94 94.94 94.94 char94 94
|
||||
95 Beijing 95 95 true 95 95 95.95 95.95 char95 95
|
||||
96 Beijing 96 96 true 96 96 96.96 96.96 char96 96
|
||||
97 Beijing 97 97 true 97 97 97.97 97.97 char97 97
|
||||
98 Beijing 98 98 true 98 98 98.98 98.98 char98 98
|
||||
99 Beijing 99 99 true 99 99 99.99 99.99 char99 99
|
||||
100 \N \N \N \N \N \N \N \N \N \N
|
||||
|
||||
205
regression-test/data/export_p0/test_export_parquet.out
Normal file
205
regression-test/data/export_p0/test_export_parquet.out
Normal file
@ -0,0 +1,205 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_export1 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 1 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 2 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 3 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 4 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 5 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 6 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 7 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 8 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 9 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 10 10.1 10.1 char10 10
|
||||
11 2017-10-01 2017-10-01T00:00 Beijing 11 11 true 11 11 11 11.11 11.11 char11 11
|
||||
12 2017-10-01 2017-10-01T00:00 Beijing 12 12 true 12 12 12 12.12 12.12 char12 12
|
||||
13 2017-10-01 2017-10-01T00:00 Beijing 13 13 true 13 13 13 13.13 13.13 char13 13
|
||||
14 2017-10-01 2017-10-01T00:00 Beijing 14 14 true 14 14 14 14.14 14.14 char14 14
|
||||
15 2017-10-01 2017-10-01T00:00 Beijing 15 15 true 15 15 15 15.15 15.15 char15 15
|
||||
16 2017-10-01 2017-10-01T00:00 Beijing 16 16 true 16 16 16 16.16 16.16 char16 16
|
||||
17 2017-10-01 2017-10-01T00:00 Beijing 17 17 true 17 17 17 17.17 17.17 char17 17
|
||||
18 2017-10-01 2017-10-01T00:00 Beijing 18 18 true 18 18 18 18.18 18.18 char18 18
|
||||
19 2017-10-01 2017-10-01T00:00 Beijing 19 19 true 19 19 19 19.19 19.19 char19 19
|
||||
20 2017-10-01 2017-10-01T00:00 Beijing 20 20 true 20 20 20 20.2 20.2 char20 20
|
||||
21 2017-10-01 2017-10-01T00:00 Beijing 21 21 true 21 21 21 21.21 21.21 char21 21
|
||||
22 2017-10-01 2017-10-01T00:00 Beijing 22 22 true 22 22 22 22.22 22.22 char22 22
|
||||
23 2017-10-01 2017-10-01T00:00 Beijing 23 23 true 23 23 23 23.23 23.23 char23 23
|
||||
24 2017-10-01 2017-10-01T00:00 Beijing 24 24 true 24 24 24 24.24 24.24 char24 24
|
||||
25 2017-10-01 2017-10-01T00:00 Beijing 25 25 true 25 25 25 25.25 25.25 char25 25
|
||||
26 2017-10-01 2017-10-01T00:00 Beijing 26 26 true 26 26 26 26.26 26.26 char26 26
|
||||
27 2017-10-01 2017-10-01T00:00 Beijing 27 27 true 27 27 27 27.27 27.27 char27 27
|
||||
28 2017-10-01 2017-10-01T00:00 Beijing 28 28 true 28 28 28 28.28 28.28 char28 28
|
||||
29 2017-10-01 2017-10-01T00:00 Beijing 29 29 true 29 29 29 29.29 29.29 char29 29
|
||||
30 2017-10-01 2017-10-01T00:00 Beijing 30 30 true 30 30 30 30.3 30.3 char30 30
|
||||
31 2017-10-01 2017-10-01T00:00 Beijing 31 31 true 31 31 31 31.31 31.31 char31 31
|
||||
32 2017-10-01 2017-10-01T00:00 Beijing 32 32 true 32 32 32 32.32 32.32 char32 32
|
||||
33 2017-10-01 2017-10-01T00:00 Beijing 33 33 true 33 33 33 33.33 33.33 char33 33
|
||||
34 2017-10-01 2017-10-01T00:00 Beijing 34 34 true 34 34 34 34.34 34.34 char34 34
|
||||
35 2017-10-01 2017-10-01T00:00 Beijing 35 35 true 35 35 35 35.35 35.35 char35 35
|
||||
36 2017-10-01 2017-10-01T00:00 Beijing 36 36 true 36 36 36 36.36 36.36 char36 36
|
||||
37 2017-10-01 2017-10-01T00:00 Beijing 37 37 true 37 37 37 37.37 37.37 char37 37
|
||||
38 2017-10-01 2017-10-01T00:00 Beijing 38 38 true 38 38 38 38.38 38.38 char38 38
|
||||
39 2017-10-01 2017-10-01T00:00 Beijing 39 39 true 39 39 39 39.39 39.39 char39 39
|
||||
40 2017-10-01 2017-10-01T00:00 Beijing 40 40 true 40 40 40 40.4 40.4 char40 40
|
||||
41 2017-10-01 2017-10-01T00:00 Beijing 41 41 true 41 41 41 41.41 41.41 char41 41
|
||||
42 2017-10-01 2017-10-01T00:00 Beijing 42 42 true 42 42 42 42.42 42.42 char42 42
|
||||
43 2017-10-01 2017-10-01T00:00 Beijing 43 43 true 43 43 43 43.43 43.43 char43 43
|
||||
44 2017-10-01 2017-10-01T00:00 Beijing 44 44 true 44 44 44 44.44 44.44 char44 44
|
||||
45 2017-10-01 2017-10-01T00:00 Beijing 45 45 true 45 45 45 45.45 45.45 char45 45
|
||||
46 2017-10-01 2017-10-01T00:00 Beijing 46 46 true 46 46 46 46.46 46.46 char46 46
|
||||
47 2017-10-01 2017-10-01T00:00 Beijing 47 47 true 47 47 47 47.47 47.47 char47 47
|
||||
48 2017-10-01 2017-10-01T00:00 Beijing 48 48 true 48 48 48 48.48 48.48 char48 48
|
||||
49 2017-10-01 2017-10-01T00:00 Beijing 49 49 true 49 49 49 49.49 49.49 char49 49
|
||||
50 2017-10-01 2017-10-01T00:00 Beijing 50 50 true 50 50 50 50.5 50.5 char50 50
|
||||
51 2017-10-01 2017-10-01T00:00 Beijing 51 51 true 51 51 51 51.51 51.51 char51 51
|
||||
52 2017-10-01 2017-10-01T00:00 Beijing 52 52 true 52 52 52 52.52 52.52 char52 52
|
||||
53 2017-10-01 2017-10-01T00:00 Beijing 53 53 true 53 53 53 53.53 53.53 char53 53
|
||||
54 2017-10-01 2017-10-01T00:00 Beijing 54 54 true 54 54 54 54.54 54.54 char54 54
|
||||
55 2017-10-01 2017-10-01T00:00 Beijing 55 55 true 55 55 55 55.55 55.55 char55 55
|
||||
56 2017-10-01 2017-10-01T00:00 Beijing 56 56 true 56 56 56 56.56 56.56 char56 56
|
||||
57 2017-10-01 2017-10-01T00:00 Beijing 57 57 true 57 57 57 57.57 57.57 char57 57
|
||||
58 2017-10-01 2017-10-01T00:00 Beijing 58 58 true 58 58 58 58.58 58.58 char58 58
|
||||
59 2017-10-01 2017-10-01T00:00 Beijing 59 59 true 59 59 59 59.59 59.59 char59 59
|
||||
60 2017-10-01 2017-10-01T00:00 Beijing 60 60 true 60 60 60 60.6 60.6 char60 60
|
||||
61 2017-10-01 2017-10-01T00:00 Beijing 61 61 true 61 61 61 61.61 61.61 char61 61
|
||||
62 2017-10-01 2017-10-01T00:00 Beijing 62 62 true 62 62 62 62.62 62.62 char62 62
|
||||
63 2017-10-01 2017-10-01T00:00 Beijing 63 63 true 63 63 63 63.63 63.63 char63 63
|
||||
64 2017-10-01 2017-10-01T00:00 Beijing 64 64 true 64 64 64 64.64 64.64 char64 64
|
||||
65 2017-10-01 2017-10-01T00:00 Beijing 65 65 true 65 65 65 65.65 65.65 char65 65
|
||||
66 2017-10-01 2017-10-01T00:00 Beijing 66 66 true 66 66 66 66.66 66.66 char66 66
|
||||
67 2017-10-01 2017-10-01T00:00 Beijing 67 67 true 67 67 67 67.67 67.67 char67 67
|
||||
68 2017-10-01 2017-10-01T00:00 Beijing 68 68 true 68 68 68 68.68 68.68 char68 68
|
||||
69 2017-10-01 2017-10-01T00:00 Beijing 69 69 true 69 69 69 69.69 69.69 char69 69
|
||||
70 2017-10-01 2017-10-01T00:00 Beijing 70 70 true 70 70 70 70.7 70.7 char70 70
|
||||
71 2017-10-01 2017-10-01T00:00 Beijing 71 71 true 71 71 71 71.71 71.71 char71 71
|
||||
72 2017-10-01 2017-10-01T00:00 Beijing 72 72 true 72 72 72 72.72 72.72 char72 72
|
||||
73 2017-10-01 2017-10-01T00:00 Beijing 73 73 true 73 73 73 73.73 73.73 char73 73
|
||||
74 2017-10-01 2017-10-01T00:00 Beijing 74 74 true 74 74 74 74.74 74.74 char74 74
|
||||
75 2017-10-01 2017-10-01T00:00 Beijing 75 75 true 75 75 75 75.75 75.75 char75 75
|
||||
76 2017-10-01 2017-10-01T00:00 Beijing 76 76 true 76 76 76 76.76 76.76 char76 76
|
||||
77 2017-10-01 2017-10-01T00:00 Beijing 77 77 true 77 77 77 77.77 77.77 char77 77
|
||||
78 2017-10-01 2017-10-01T00:00 Beijing 78 78 true 78 78 78 78.78 78.78 char78 78
|
||||
79 2017-10-01 2017-10-01T00:00 Beijing 79 79 true 79 79 79 79.79 79.79 char79 79
|
||||
80 2017-10-01 2017-10-01T00:00 Beijing 80 80 true 80 80 80 80.8 80.8 char80 80
|
||||
81 2017-10-01 2017-10-01T00:00 Beijing 81 81 true 81 81 81 81.81 81.81 char81 81
|
||||
82 2017-10-01 2017-10-01T00:00 Beijing 82 82 true 82 82 82 82.82 82.82 char82 82
|
||||
83 2017-10-01 2017-10-01T00:00 Beijing 83 83 true 83 83 83 83.83 83.83 char83 83
|
||||
84 2017-10-01 2017-10-01T00:00 Beijing 84 84 true 84 84 84 84.84 84.84 char84 84
|
||||
85 2017-10-01 2017-10-01T00:00 Beijing 85 85 true 85 85 85 85.85 85.85 char85 85
|
||||
86 2017-10-01 2017-10-01T00:00 Beijing 86 86 true 86 86 86 86.86 86.86 char86 86
|
||||
87 2017-10-01 2017-10-01T00:00 Beijing 87 87 true 87 87 87 87.87 87.87 char87 87
|
||||
88 2017-10-01 2017-10-01T00:00 Beijing 88 88 true 88 88 88 88.88 88.88 char88 88
|
||||
89 2017-10-01 2017-10-01T00:00 Beijing 89 89 true 89 89 89 89.89 89.89 char89 89
|
||||
90 2017-10-01 2017-10-01T00:00 Beijing 90 90 true 90 90 90 90.9 90.9 char90 90
|
||||
91 2017-10-01 2017-10-01T00:00 Beijing 91 91 true 91 91 91 91.91 91.91 char91 91
|
||||
92 2017-10-01 2017-10-01T00:00 Beijing 92 92 true 92 92 92 92.92 92.92 char92 92
|
||||
93 2017-10-01 2017-10-01T00:00 Beijing 93 93 true 93 93 93 93.93 93.93 char93 93
|
||||
94 2017-10-01 2017-10-01T00:00 Beijing 94 94 true 94 94 94 94.94 94.94 char94 94
|
||||
95 2017-10-01 2017-10-01T00:00 Beijing 95 95 true 95 95 95 95.95 95.95 char95 95
|
||||
96 2017-10-01 2017-10-01T00:00 Beijing 96 96 true 96 96 96 96.96 96.96 char96 96
|
||||
97 2017-10-01 2017-10-01T00:00 Beijing 97 97 true 97 97 97 97.97 97.97 char97 97
|
||||
98 2017-10-01 2017-10-01T00:00 Beijing 98 98 true 98 98 98 98.98 98.98 char98 98
|
||||
99 2017-10-01 2017-10-01T00:00 Beijing 99 99 true 99 99 99 99.99 99.99 char99 99
|
||||
100 2017-10-01 2017-10-01T00:00 \N \N \N \N \N \N \N \N \N \N \N
|
||||
|
||||
-- !select_load1 --
|
||||
1 2017-10-01 2017-10-01T00:00 Beijing 1 1 true 1 1 \N 1.1 1.1 char1 1
|
||||
2 2017-10-01 2017-10-01T00:00 Beijing 2 2 true 2 2 \N 2.2 2.2 char2 2
|
||||
3 2017-10-01 2017-10-01T00:00 Beijing 3 3 true 3 3 \N 3.3 3.3 char3 3
|
||||
4 2017-10-01 2017-10-01T00:00 Beijing 4 4 true 4 4 \N 4.4 4.4 char4 4
|
||||
5 2017-10-01 2017-10-01T00:00 Beijing 5 5 true 5 5 \N 5.5 5.5 char5 5
|
||||
6 2017-10-01 2017-10-01T00:00 Beijing 6 6 true 6 6 \N 6.6 6.6 char6 6
|
||||
7 2017-10-01 2017-10-01T00:00 Beijing 7 7 true 7 7 \N 7.7 7.7 char7 7
|
||||
8 2017-10-01 2017-10-01T00:00 Beijing 8 8 true 8 8 \N 8.8 8.8 char8 8
|
||||
9 2017-10-01 2017-10-01T00:00 Beijing 9 9 true 9 9 \N 9.9 9.9 char9 9
|
||||
10 2017-10-01 2017-10-01T00:00 Beijing 10 10 true 10 10 \N 10.1 10.1 char10 10
|
||||
11 2017-10-01 2017-10-01T00:00 Beijing 11 11 true 11 11 \N 11.11 11.11 char11 11
|
||||
12 2017-10-01 2017-10-01T00:00 Beijing 12 12 true 12 12 \N 12.12 12.12 char12 12
|
||||
13 2017-10-01 2017-10-01T00:00 Beijing 13 13 true 13 13 \N 13.13 13.13 char13 13
|
||||
14 2017-10-01 2017-10-01T00:00 Beijing 14 14 true 14 14 \N 14.14 14.14 char14 14
|
||||
15 2017-10-01 2017-10-01T00:00 Beijing 15 15 true 15 15 \N 15.15 15.15 char15 15
|
||||
16 2017-10-01 2017-10-01T00:00 Beijing 16 16 true 16 16 \N 16.16 16.16 char16 16
|
||||
17 2017-10-01 2017-10-01T00:00 Beijing 17 17 true 17 17 \N 17.17 17.17 char17 17
|
||||
18 2017-10-01 2017-10-01T00:00 Beijing 18 18 true 18 18 \N 18.18 18.18 char18 18
|
||||
19 2017-10-01 2017-10-01T00:00 Beijing 19 19 true 19 19 \N 19.19 19.19 char19 19
|
||||
20 2017-10-01 2017-10-01T00:00 Beijing 20 20 true 20 20 \N 20.2 20.2 char20 20
|
||||
21 2017-10-01 2017-10-01T00:00 Beijing 21 21 true 21 21 \N 21.21 21.21 char21 21
|
||||
22 2017-10-01 2017-10-01T00:00 Beijing 22 22 true 22 22 \N 22.22 22.22 char22 22
|
||||
23 2017-10-01 2017-10-01T00:00 Beijing 23 23 true 23 23 \N 23.23 23.23 char23 23
|
||||
24 2017-10-01 2017-10-01T00:00 Beijing 24 24 true 24 24 \N 24.24 24.24 char24 24
|
||||
25 2017-10-01 2017-10-01T00:00 Beijing 25 25 true 25 25 \N 25.25 25.25 char25 25
|
||||
26 2017-10-01 2017-10-01T00:00 Beijing 26 26 true 26 26 \N 26.26 26.26 char26 26
|
||||
27 2017-10-01 2017-10-01T00:00 Beijing 27 27 true 27 27 \N 27.27 27.27 char27 27
|
||||
28 2017-10-01 2017-10-01T00:00 Beijing 28 28 true 28 28 \N 28.28 28.28 char28 28
|
||||
29 2017-10-01 2017-10-01T00:00 Beijing 29 29 true 29 29 \N 29.29 29.29 char29 29
|
||||
30 2017-10-01 2017-10-01T00:00 Beijing 30 30 true 30 30 \N 30.3 30.3 char30 30
|
||||
31 2017-10-01 2017-10-01T00:00 Beijing 31 31 true 31 31 \N 31.31 31.31 char31 31
|
||||
32 2017-10-01 2017-10-01T00:00 Beijing 32 32 true 32 32 \N 32.32 32.32 char32 32
|
||||
33 2017-10-01 2017-10-01T00:00 Beijing 33 33 true 33 33 \N 33.33 33.33 char33 33
|
||||
34 2017-10-01 2017-10-01T00:00 Beijing 34 34 true 34 34 \N 34.34 34.34 char34 34
|
||||
35 2017-10-01 2017-10-01T00:00 Beijing 35 35 true 35 35 \N 35.35 35.35 char35 35
|
||||
36 2017-10-01 2017-10-01T00:00 Beijing 36 36 true 36 36 \N 36.36 36.36 char36 36
|
||||
37 2017-10-01 2017-10-01T00:00 Beijing 37 37 true 37 37 \N 37.37 37.37 char37 37
|
||||
38 2017-10-01 2017-10-01T00:00 Beijing 38 38 true 38 38 \N 38.38 38.38 char38 38
|
||||
39 2017-10-01 2017-10-01T00:00 Beijing 39 39 true 39 39 \N 39.39 39.39 char39 39
|
||||
40 2017-10-01 2017-10-01T00:00 Beijing 40 40 true 40 40 \N 40.4 40.4 char40 40
|
||||
41 2017-10-01 2017-10-01T00:00 Beijing 41 41 true 41 41 \N 41.41 41.41 char41 41
|
||||
42 2017-10-01 2017-10-01T00:00 Beijing 42 42 true 42 42 \N 42.42 42.42 char42 42
|
||||
43 2017-10-01 2017-10-01T00:00 Beijing 43 43 true 43 43 \N 43.43 43.43 char43 43
|
||||
44 2017-10-01 2017-10-01T00:00 Beijing 44 44 true 44 44 \N 44.44 44.44 char44 44
|
||||
45 2017-10-01 2017-10-01T00:00 Beijing 45 45 true 45 45 \N 45.45 45.45 char45 45
|
||||
46 2017-10-01 2017-10-01T00:00 Beijing 46 46 true 46 46 \N 46.46 46.46 char46 46
|
||||
47 2017-10-01 2017-10-01T00:00 Beijing 47 47 true 47 47 \N 47.47 47.47 char47 47
|
||||
48 2017-10-01 2017-10-01T00:00 Beijing 48 48 true 48 48 \N 48.48 48.48 char48 48
|
||||
49 2017-10-01 2017-10-01T00:00 Beijing 49 49 true 49 49 \N 49.49 49.49 char49 49
|
||||
50 2017-10-01 2017-10-01T00:00 Beijing 50 50 true 50 50 \N 50.5 50.5 char50 50
|
||||
51 2017-10-01 2017-10-01T00:00 Beijing 51 51 true 51 51 \N 51.51 51.51 char51 51
|
||||
52 2017-10-01 2017-10-01T00:00 Beijing 52 52 true 52 52 \N 52.52 52.52 char52 52
|
||||
53 2017-10-01 2017-10-01T00:00 Beijing 53 53 true 53 53 \N 53.53 53.53 char53 53
|
||||
54 2017-10-01 2017-10-01T00:00 Beijing 54 54 true 54 54 \N 54.54 54.54 char54 54
|
||||
55 2017-10-01 2017-10-01T00:00 Beijing 55 55 true 55 55 \N 55.55 55.55 char55 55
|
||||
56 2017-10-01 2017-10-01T00:00 Beijing 56 56 true 56 56 \N 56.56 56.56 char56 56
|
||||
57 2017-10-01 2017-10-01T00:00 Beijing 57 57 true 57 57 \N 57.57 57.57 char57 57
|
||||
58 2017-10-01 2017-10-01T00:00 Beijing 58 58 true 58 58 \N 58.58 58.58 char58 58
|
||||
59 2017-10-01 2017-10-01T00:00 Beijing 59 59 true 59 59 \N 59.59 59.59 char59 59
|
||||
60 2017-10-01 2017-10-01T00:00 Beijing 60 60 true 60 60 \N 60.6 60.6 char60 60
|
||||
61 2017-10-01 2017-10-01T00:00 Beijing 61 61 true 61 61 \N 61.61 61.61 char61 61
|
||||
62 2017-10-01 2017-10-01T00:00 Beijing 62 62 true 62 62 \N 62.62 62.62 char62 62
|
||||
63 2017-10-01 2017-10-01T00:00 Beijing 63 63 true 63 63 \N 63.63 63.63 char63 63
|
||||
64 2017-10-01 2017-10-01T00:00 Beijing 64 64 true 64 64 \N 64.64 64.64 char64 64
|
||||
65 2017-10-01 2017-10-01T00:00 Beijing 65 65 true 65 65 \N 65.65 65.65 char65 65
|
||||
66 2017-10-01 2017-10-01T00:00 Beijing 66 66 true 66 66 \N 66.66 66.66 char66 66
|
||||
67 2017-10-01 2017-10-01T00:00 Beijing 67 67 true 67 67 \N 67.67 67.67 char67 67
|
||||
68 2017-10-01 2017-10-01T00:00 Beijing 68 68 true 68 68 \N 68.68 68.68 char68 68
|
||||
69 2017-10-01 2017-10-01T00:00 Beijing 69 69 true 69 69 \N 69.69 69.69 char69 69
|
||||
70 2017-10-01 2017-10-01T00:00 Beijing 70 70 true 70 70 \N 70.7 70.7 char70 70
|
||||
71 2017-10-01 2017-10-01T00:00 Beijing 71 71 true 71 71 \N 71.71 71.71 char71 71
|
||||
72 2017-10-01 2017-10-01T00:00 Beijing 72 72 true 72 72 \N 72.72 72.72 char72 72
|
||||
73 2017-10-01 2017-10-01T00:00 Beijing 73 73 true 73 73 \N 73.73 73.73 char73 73
|
||||
74 2017-10-01 2017-10-01T00:00 Beijing 74 74 true 74 74 \N 74.74 74.74 char74 74
|
||||
75 2017-10-01 2017-10-01T00:00 Beijing 75 75 true 75 75 \N 75.75 75.75 char75 75
|
||||
76 2017-10-01 2017-10-01T00:00 Beijing 76 76 true 76 76 \N 76.76 76.76 char76 76
|
||||
77 2017-10-01 2017-10-01T00:00 Beijing 77 77 true 77 77 \N 77.77 77.77 char77 77
|
||||
78 2017-10-01 2017-10-01T00:00 Beijing 78 78 true 78 78 \N 78.78 78.78 char78 78
|
||||
79 2017-10-01 2017-10-01T00:00 Beijing 79 79 true 79 79 \N 79.79 79.79 char79 79
|
||||
80 2017-10-01 2017-10-01T00:00 Beijing 80 80 true 80 80 \N 80.8 80.8 char80 80
|
||||
81 2017-10-01 2017-10-01T00:00 Beijing 81 81 true 81 81 \N 81.81 81.81 char81 81
|
||||
82 2017-10-01 2017-10-01T00:00 Beijing 82 82 true 82 82 \N 82.82 82.82 char82 82
|
||||
83 2017-10-01 2017-10-01T00:00 Beijing 83 83 true 83 83 \N 83.83 83.83 char83 83
|
||||
84 2017-10-01 2017-10-01T00:00 Beijing 84 84 true 84 84 \N 84.84 84.84 char84 84
|
||||
85 2017-10-01 2017-10-01T00:00 Beijing 85 85 true 85 85 \N 85.85 85.85 char85 85
|
||||
86 2017-10-01 2017-10-01T00:00 Beijing 86 86 true 86 86 \N 86.86 86.86 char86 86
|
||||
87 2017-10-01 2017-10-01T00:00 Beijing 87 87 true 87 87 \N 87.87 87.87 char87 87
|
||||
88 2017-10-01 2017-10-01T00:00 Beijing 88 88 true 88 88 \N 88.88 88.88 char88 88
|
||||
89 2017-10-01 2017-10-01T00:00 Beijing 89 89 true 89 89 \N 89.89 89.89 char89 89
|
||||
90 2017-10-01 2017-10-01T00:00 Beijing 90 90 true 90 90 \N 90.9 90.9 char90 90
|
||||
91 2017-10-01 2017-10-01T00:00 Beijing 91 91 true 91 91 \N 91.91 91.91 char91 91
|
||||
92 2017-10-01 2017-10-01T00:00 Beijing 92 92 true 92 92 \N 92.92 92.92 char92 92
|
||||
93 2017-10-01 2017-10-01T00:00 Beijing 93 93 true 93 93 \N 93.93 93.93 char93 93
|
||||
94 2017-10-01 2017-10-01T00:00 Beijing 94 94 true 94 94 \N 94.94 94.94 char94 94
|
||||
95 2017-10-01 2017-10-01T00:00 Beijing 95 95 true 95 95 \N 95.95 95.95 char95 95
|
||||
96 2017-10-01 2017-10-01T00:00 Beijing 96 96 true 96 96 \N 96.96 96.96 char96 96
|
||||
97 2017-10-01 2017-10-01T00:00 Beijing 97 97 true 97 97 \N 97.97 97.97 char97 97
|
||||
98 2017-10-01 2017-10-01T00:00 Beijing 98 98 true 98 98 \N 98.98 98.98 char98 98
|
||||
99 2017-10-01 2017-10-01T00:00 Beijing 99 99 true 99 99 \N 99.99 99.99 char99 99
|
||||
100 2017-10-01 2017-10-01T00:00 \N \N \N \N \N \N \N \N \N \N \N
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_export") {
|
||||
// todo: test export, select into outfile
|
||||
sql "show export"
|
||||
}
|
||||
387
regression-test/suites/export_p0/test_export_basic.groovy
Normal file
387
regression-test/suites/export_p0/test_export_basic.groovy
Normal file
@ -0,0 +1,387 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
import org.codehaus.groovy.runtime.IOGroovyMethods
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
suite("test_export_basic", "p0") {
|
||||
// check whether the FE config 'enable_outfile_to_local' is true
|
||||
StringBuilder strBuilder = new StringBuilder()
|
||||
strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword)
|
||||
strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe")
|
||||
|
||||
String command = strBuilder.toString()
|
||||
def process = command.toString().execute()
|
||||
def code = process.waitFor()
|
||||
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
|
||||
def out = process.getText()
|
||||
logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err)
|
||||
assertEquals(code, 0)
|
||||
def response = parseJson(out.trim())
|
||||
assertEquals(response.code, 0)
|
||||
assertEquals(response.msg, "success")
|
||||
def configJson = response.data.rows
|
||||
boolean enableOutfileToLocal = false
|
||||
for (Object conf: configJson) {
|
||||
assert conf instanceof Map
|
||||
if (((Map<String, String>) conf).get("Name").toLowerCase() == "enable_outfile_to_local") {
|
||||
enableOutfileToLocal = ((Map<String, String>) conf).get("Value").toLowerCase() == "true"
|
||||
}
|
||||
}
|
||||
if (!enableOutfileToLocal) {
|
||||
logger.warn("Please set enable_outfile_to_local to true to run test_outfile")
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
def table_export_name = "test_export_basic"
|
||||
def table_load_name = "test_load_basic"
|
||||
def outfile_path_prefix = """/tmp/test_export"""
|
||||
|
||||
// create table and insert
|
||||
sql """ DROP TABLE IF EXISTS ${table_export_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_export_name} (
|
||||
`id` int(11) NULL,
|
||||
`name` string NULL,
|
||||
`age` int(11) NULL
|
||||
)
|
||||
PARTITION BY RANGE(id)
|
||||
(
|
||||
PARTITION less_than_20 VALUES LESS THAN ("20"),
|
||||
PARTITION between_20_70 VALUES [("20"),("70")),
|
||||
PARTITION more_than_70 VALUES LESS THAN ("151")
|
||||
)
|
||||
DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
StringBuilder sb = new StringBuilder()
|
||||
int i = 1
|
||||
for (; i < 150; i ++) {
|
||||
sb.append("""
|
||||
(${i}, 'ftw-${i}', ${i + 18}),
|
||||
""")
|
||||
}
|
||||
sb.append("""
|
||||
(${i}, NULL, NULL)
|
||||
""")
|
||||
sql """ INSERT INTO ${table_export_name} VALUES
|
||||
${sb.toString()}
|
||||
"""
|
||||
qt_select_export """ SELECT * FROM ${table_export_name} t ORDER BY id; """
|
||||
|
||||
|
||||
def check_path_exists = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (!path.exists()) {
|
||||
assert path.mkdirs()
|
||||
} else {
|
||||
throw new IllegalStateException("""${dir_path} already exists! """)
|
||||
}
|
||||
}
|
||||
|
||||
def check_file_amounts = { dir_path, amount ->
|
||||
File path = new File(dir_path)
|
||||
File[] files = path.listFiles()
|
||||
assert files.length == amount
|
||||
}
|
||||
|
||||
def delete_files = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (path.exists()) {
|
||||
for (File f: path.listFiles()) {
|
||||
f.delete();
|
||||
}
|
||||
path.delete();
|
||||
}
|
||||
}
|
||||
|
||||
def waiting_export = { export_label ->
|
||||
while (true) {
|
||||
def res = sql """ show export where label = "${export_label}" """
|
||||
logger.info("export state: " + res[0][2])
|
||||
if (res[0][2] == "FINISHED") {
|
||||
break;
|
||||
} else if (res[0][2] == "CANCELLED") {
|
||||
throw new IllegalStateException("""export failed: ${res[0][10]}""")
|
||||
} else {
|
||||
sleep(5000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. basic test
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
def outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
def label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`id` int(11) NULL,
|
||||
`name` string NULL,
|
||||
`age` int(11) NULL
|
||||
)
|
||||
DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', ','
|
||||
set 'columns', 'id, name, age'
|
||||
set 'strict_mode', 'true'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(150, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load1 """ SELECT * FROM ${table_load_name} t ORDER BY id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 2. test patition1
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} PARTITION (less_than_20)
|
||||
TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`id` int(11) NULL,
|
||||
`name` string NULL,
|
||||
`age` int(11) NULL
|
||||
)
|
||||
DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', ','
|
||||
set 'columns', 'id, name, age'
|
||||
set 'strict_mode', 'true'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(19, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load2 """ SELECT * FROM ${table_load_name} t ORDER BY id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 3. test patition2
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} PARTITION (between_20_70)
|
||||
TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`id` int(11) NULL,
|
||||
`name` string NULL,
|
||||
`age` int(11) NULL
|
||||
)
|
||||
DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', ','
|
||||
set 'columns', 'id, name, age'
|
||||
set 'strict_mode', 'true'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(50, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load3 """ SELECT * FROM ${table_load_name} t ORDER BY id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 4. test patition3 and where clause
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} PARTITION (more_than_70) where id >100
|
||||
TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`id` int(11) NULL,
|
||||
`name` string NULL,
|
||||
`age` int(11) NULL
|
||||
)
|
||||
DISTRIBUTED BY HASH(id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', ','
|
||||
set 'columns', 'id, name, age'
|
||||
set 'strict_mode', 'true'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(50, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load3 """ SELECT * FROM ${table_load_name} t ORDER BY id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
}
|
||||
443
regression-test/suites/export_p0/test_export_csv.groovy
Normal file
443
regression-test/suites/export_p0/test_export_csv.groovy
Normal file
@ -0,0 +1,443 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
import org.codehaus.groovy.runtime.IOGroovyMethods
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
suite("test_export_csv", "p0") {
|
||||
// check whether the FE config 'enable_outfile_to_local' is true
|
||||
StringBuilder strBuilder = new StringBuilder()
|
||||
strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword)
|
||||
strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe")
|
||||
|
||||
String command = strBuilder.toString()
|
||||
def process = command.toString().execute()
|
||||
def code = process.waitFor()
|
||||
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
|
||||
def out = process.getText()
|
||||
logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err)
|
||||
assertEquals(code, 0)
|
||||
def response = parseJson(out.trim())
|
||||
assertEquals(response.code, 0)
|
||||
assertEquals(response.msg, "success")
|
||||
def configJson = response.data.rows
|
||||
boolean enableOutfileToLocal = false
|
||||
for (Object conf: configJson) {
|
||||
assert conf instanceof Map
|
||||
if (((Map<String, String>) conf).get("Name").toLowerCase() == "enable_outfile_to_local") {
|
||||
enableOutfileToLocal = ((Map<String, String>) conf).get("Value").toLowerCase() == "true"
|
||||
}
|
||||
}
|
||||
if (!enableOutfileToLocal) {
|
||||
logger.warn("Please set enable_outfile_to_local to true to run test_outfile")
|
||||
return
|
||||
}
|
||||
|
||||
def table_export_name = "test_export_csv"
|
||||
def table_load_name = "test_load_csv"
|
||||
def outfile_path_prefix = """/tmp/test_export"""
|
||||
|
||||
// create table and insert
|
||||
sql """ DROP TABLE IF EXISTS ${table_export_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_export_name} (
|
||||
`user_id` LARGEINT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
StringBuilder sb = new StringBuilder()
|
||||
int i = 1
|
||||
for (; i < 100; i ++) {
|
||||
sb.append("""
|
||||
(${i}, '2017-10-01', '2017-10-01 00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}),
|
||||
""")
|
||||
}
|
||||
sb.append("""
|
||||
(${i}, '2017-10-01', '2017-10-01 00:00:00', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)
|
||||
""")
|
||||
sql """ INSERT INTO ${table_export_name} VALUES
|
||||
${sb.toString()}
|
||||
"""
|
||||
qt_select_export1 """ SELECT * FROM ${table_export_name} t ORDER BY user_id; """
|
||||
|
||||
|
||||
def check_path_exists = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (!path.exists()) {
|
||||
assert path.mkdirs()
|
||||
} else {
|
||||
throw new IllegalStateException("""${dir_path} already exists! """)
|
||||
}
|
||||
}
|
||||
|
||||
def check_file_amounts = { dir_path, amount ->
|
||||
File path = new File(dir_path)
|
||||
File[] files = path.listFiles()
|
||||
assert files.length == amount
|
||||
}
|
||||
|
||||
def delete_files = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (path.exists()) {
|
||||
for (File f: path.listFiles()) {
|
||||
f.delete();
|
||||
}
|
||||
path.delete();
|
||||
}
|
||||
}
|
||||
|
||||
def waiting_export = { export_label ->
|
||||
while (true) {
|
||||
def res = sql """ show export where label = "${export_label}" """
|
||||
logger.info("export state: " + res[0][2])
|
||||
if (res[0][2] == "FINISHED") {
|
||||
break;
|
||||
} else if (res[0][2] == "CANCELLED") {
|
||||
throw new IllegalStateException("""export failed: ${res[0][10]}""")
|
||||
} else {
|
||||
sleep(5000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. test more type
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
def outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
def label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` LARGEINT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', ','
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, largeint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(100, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load1 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
|
||||
// 2. test csv column_separator and line_delimiter
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} where user_id <11 TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"="ab",
|
||||
"line_delimiter" = "cc"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` LARGEINT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', 'ab'
|
||||
set 'line_delimiter', 'cc'
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, largeint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(10, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load2 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 3. test csv_with_names
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} where user_id <11 TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv_with_names",
|
||||
"column_separator"="ab",
|
||||
"line_delimiter" = "cc"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` LARGEINT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', 'ab'
|
||||
set 'line_delimiter', 'cc'
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, largeint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv_with_names'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(10, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load3 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
// try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
// delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 4. test csv_with_names_and_types
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} where user_id <11 TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv_with_names_and_types",
|
||||
"column_separator"="ab",
|
||||
"line_delimiter" = "cc"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` LARGEINT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'column_separator', 'ab'
|
||||
set 'line_delimiter', 'cc'
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, largeint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv_with_names_and_types'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(10, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load4 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
// try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
// delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
try_sql("DROP TABLE IF EXISTS ${table_export_name}")
|
||||
}
|
||||
594
regression-test/suites/export_p0/test_export_data_types.groovy
Normal file
594
regression-test/suites/export_p0/test_export_data_types.groovy
Normal file
@ -0,0 +1,594 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
import org.codehaus.groovy.runtime.IOGroovyMethods
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
suite("test_export_data_types", "p0") {
|
||||
// check whether the FE config 'enable_outfile_to_local' is true
|
||||
StringBuilder strBuilder = new StringBuilder()
|
||||
strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword)
|
||||
strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe")
|
||||
|
||||
String command = strBuilder.toString()
|
||||
def process = command.toString().execute()
|
||||
def code = process.waitFor()
|
||||
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
|
||||
def out = process.getText()
|
||||
logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err)
|
||||
assertEquals(code, 0)
|
||||
def response = parseJson(out.trim())
|
||||
assertEquals(response.code, 0)
|
||||
assertEquals(response.msg, "success")
|
||||
def configJson = response.data.rows
|
||||
boolean enableOutfileToLocal = false
|
||||
for (Object conf: configJson) {
|
||||
assert conf instanceof Map
|
||||
if (((Map<String, String>) conf).get("Name").toLowerCase() == "enable_outfile_to_local") {
|
||||
enableOutfileToLocal = ((Map<String, String>) conf).get("Value").toLowerCase() == "true"
|
||||
}
|
||||
}
|
||||
if (!enableOutfileToLocal) {
|
||||
logger.warn("Please set enable_outfile_to_local to true to run test_outfile")
|
||||
return
|
||||
}
|
||||
|
||||
def table_export_name = "test_export_data_types"
|
||||
def table_load_name = "test_load_data_types"
|
||||
def outfile_path_prefix = """/tmp/test_export"""
|
||||
|
||||
// create table
|
||||
sql """ DROP TABLE IF EXISTS ${table_export_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_export_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间2",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_1` DATETIMEV2 NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_2` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_3` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`street` STRING COMMENT "用户所在街道",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT "",
|
||||
`decimalv3_col` decimalv3 COMMENT "",
|
||||
`decimalv3_col2` decimalv3(1,0) COMMENT "",
|
||||
`decimalv3_col3` decimalv3(1,1) COMMENT "",
|
||||
`decimalv3_col4` decimalv3(9,8) COMMENT "",
|
||||
`decimalv3_col5` decimalv3(20,10) COMMENT "",
|
||||
`decimalv3_col6` decimalv3(38,0) COMMENT "",
|
||||
`decimalv3_col7` decimalv3(38,37) COMMENT "",
|
||||
`decimalv3_col8` decimalv3(38,38) COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
StringBuilder sb = new StringBuilder()
|
||||
int i = 1
|
||||
sb.append("""
|
||||
(${i}, '2023-04-20', '2023-04-20', '2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00',
|
||||
'Beijing', 'Haidian',
|
||||
${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}',
|
||||
${i}, ${i}, ${i}, 0.${i}, ${i}, ${i}, ${i}, ${i}, 0.${i}),
|
||||
""")
|
||||
|
||||
sb.append("""
|
||||
(${++i}, '9999-12-31', '9999-12-31', '9999-12-31 23:59:59', '9999-12-31 23:59:59', '2023-04-20 00:00:00.12', '2023-04-20 00:00:00.3344',
|
||||
'', 'Haidian',
|
||||
${Short.MIN_VALUE}, ${Byte.MIN_VALUE}, true, ${Integer.MIN_VALUE}, ${Long.MIN_VALUE}, ${i}, ${Float.MIN_VALUE}, ${Double.MIN_VALUE}, 'char${i}',
|
||||
100000000, 100000000, 4, 0.1, 0.99999999, 9999999999.9999999999, 99999999999999999999999999999999999999, 9.9999999999999999999999999999999999999, 0.99999999999999999999999999999999999999),
|
||||
""")
|
||||
|
||||
sb.append("""
|
||||
(${++i}, '2023-04-21', '2023-04-21', '2023-04-20 12:34:56', '2023-04-20 00:00:00', '2023-04-20 00:00:00.123', '2023-04-20 00:00:00.123456',
|
||||
'Beijing', '',
|
||||
${Short.MAX_VALUE}, ${Byte.MAX_VALUE}, true, ${Integer.MAX_VALUE}, ${Long.MAX_VALUE}, ${i}, ${Float.MAX_VALUE}, ${Double.MAX_VALUE}, 'char${i}',
|
||||
999999999, 999999999, 9, 0.9, 9.99999999, 1234567890.0123456789, 12345678901234567890123456789012345678, 1.2345678901234567890123456789012345678, 0.12345678901234567890123456789012345678),
|
||||
""")
|
||||
|
||||
sb.append("""
|
||||
(${++i}, '0000-01-01', '0000-01-01', '2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00', '2023-04-20 00:00:00',
|
||||
'Beijing', 'Haidian',
|
||||
${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}',
|
||||
${i}, ${i}, ${i}, 0.${i}, ${i}, ${i}, ${i}, ${i}, 0.${i})
|
||||
""")
|
||||
|
||||
|
||||
sql """ INSERT INTO ${table_export_name} VALUES
|
||||
${sb.toString()}
|
||||
"""
|
||||
|
||||
qt_select_export1 """ SELECT * FROM ${table_export_name} t ORDER BY user_id; """
|
||||
|
||||
def check_path_exists = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (!path.exists()) {
|
||||
assert path.mkdirs()
|
||||
} else {
|
||||
throw new IllegalStateException("""${dir_path} already exists! """)
|
||||
}
|
||||
}
|
||||
|
||||
def check_file_amounts = { dir_path, amount ->
|
||||
File path = new File(dir_path)
|
||||
File[] files = path.listFiles()
|
||||
assert files.length == amount
|
||||
}
|
||||
|
||||
def delete_files = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (path.exists()) {
|
||||
for (File f: path.listFiles()) {
|
||||
f.delete();
|
||||
}
|
||||
path.delete();
|
||||
}
|
||||
}
|
||||
|
||||
def waiting_export = { export_label ->
|
||||
while (true) {
|
||||
def res = sql """ show export where label = "${export_label}" """
|
||||
logger.info("export state: " + res[0][2])
|
||||
if (res[0][2] == "FINISHED") {
|
||||
break;
|
||||
} else if (res[0][2] == "CANCELLED") {
|
||||
throw new IllegalStateException("""export failed: ${res[0][10]}""")
|
||||
} else {
|
||||
sleep(5000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. test csv
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
def outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
def label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间2",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_1` DATETIMEV2 NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_2` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_3` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`street` STRING COMMENT "用户所在街道",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT "",
|
||||
`decimalv3_col` decimalv3 COMMENT "",
|
||||
`decimalv3_col2` decimalv3(1,0) COMMENT "",
|
||||
`decimalv3_col3` decimalv3(1,1) COMMENT "",
|
||||
`decimalv3_col4` decimalv3(9,8) COMMENT "",
|
||||
`decimalv3_col5` decimalv3(20,10) COMMENT "",
|
||||
`decimalv3_col6` decimalv3(38,0) COMMENT "",
|
||||
`decimalv3_col7` decimalv3(38,37) COMMENT "",
|
||||
`decimalv3_col8` decimalv3(38,38) COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv'
|
||||
set 'column_separator', ','
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(4, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load1 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
|
||||
// 2. test parquet
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} where user_id<4 TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "parquet",
|
||||
"columns" = "user_id, date, datev2, datetime, datetimev2_1, datetimev2_2, datetimev2_3, city, street, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col, decimalv3_col, decimalv3_col2, decimalv3_col3, decimalv3_col4, decimalv3_col5, decimalv3_col6, decimalv3_col7, decimalv3_col8"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间2",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_1` DATETIMEV2 NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_2` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_3` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`street` STRING COMMENT "用户所在街道",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT "",
|
||||
`decimalv3_col` decimalv3 COMMENT "",
|
||||
`decimalv3_col2` decimalv3(1,0) COMMENT "",
|
||||
`decimalv3_col3` decimalv3(1,1) COMMENT "",
|
||||
`decimalv3_col4` decimalv3(9,8) COMMENT "",
|
||||
`decimalv3_col5` decimalv3(20,10) COMMENT "",
|
||||
`decimalv3_col6` decimalv3(38,0) COMMENT "",
|
||||
`decimalv3_col7` decimalv3(38,37) COMMENT "",
|
||||
`decimalv3_col8` decimalv3(38,38) COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'parquet'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(3, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load2 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 3. test orc
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "orc",
|
||||
"columns" = "user_id, date, city, street, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`street` STRING COMMENT "用户所在街道",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'orc'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(4, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load3 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 4. test csv_with_names
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv_with_names",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间2",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_1` DATETIMEV2 NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_2` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_3` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`street` STRING COMMENT "用户所在街道",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT "",
|
||||
`decimalv3_col` decimalv3 COMMENT "",
|
||||
`decimalv3_col2` decimalv3(1,0) COMMENT "",
|
||||
`decimalv3_col3` decimalv3(1,1) COMMENT "",
|
||||
`decimalv3_col4` decimalv3(9,8) COMMENT "",
|
||||
`decimalv3_col5` decimalv3(20,10) COMMENT "",
|
||||
`decimalv3_col6` decimalv3(38,0) COMMENT "",
|
||||
`decimalv3_col7` decimalv3(38,37) COMMENT "",
|
||||
`decimalv3_col8` decimalv3(38,38) COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv_with_names'
|
||||
set 'column_separator', ','
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(4, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load4 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
|
||||
// 5. test csv_with_names_and_types
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv_with_names_and_types",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datev2` DATEV2 NOT NULL COMMENT "数据灌入日期时间2",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_1` DATETIMEV2 NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_2` DATETIMEV2(3) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetimev2_3` DATETIMEV2(6) NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`street` STRING COMMENT "用户所在街道",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT "",
|
||||
`decimalv3_col` decimalv3 COMMENT "",
|
||||
`decimalv3_col2` decimalv3(1,0) COMMENT "",
|
||||
`decimalv3_col3` decimalv3(1,1) COMMENT "",
|
||||
`decimalv3_col4` decimalv3(9,8) COMMENT "",
|
||||
`decimalv3_col5` decimalv3(20,10) COMMENT "",
|
||||
`decimalv3_col6` decimalv3(38,0) COMMENT "",
|
||||
`decimalv3_col7` decimalv3(38,37) COMMENT "",
|
||||
`decimalv3_col8` decimalv3(38,38) COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv_with_names_and_types'
|
||||
set 'column_separator', ','
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(4, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load5 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
try_sql("DROP TABLE IF EXISTS ${table_export_name}")
|
||||
}
|
||||
493
regression-test/suites/export_p0/test_export_empty_table.groovy
Normal file
493
regression-test/suites/export_p0/test_export_empty_table.groovy
Normal file
@ -0,0 +1,493 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
import org.codehaus.groovy.runtime.IOGroovyMethods
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
suite("test_export_empty_table", "p0") {
|
||||
// check whether the FE config 'enable_outfile_to_local' is true
|
||||
StringBuilder strBuilder = new StringBuilder()
|
||||
strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword)
|
||||
strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe")
|
||||
|
||||
String command = strBuilder.toString()
|
||||
def process = command.toString().execute()
|
||||
def code = process.waitFor()
|
||||
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
|
||||
def out = process.getText()
|
||||
logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err)
|
||||
assertEquals(code, 0)
|
||||
def response = parseJson(out.trim())
|
||||
assertEquals(response.code, 0)
|
||||
assertEquals(response.msg, "success")
|
||||
def configJson = response.data.rows
|
||||
boolean enableOutfileToLocal = false
|
||||
for (Object conf: configJson) {
|
||||
assert conf instanceof Map
|
||||
if (((Map<String, String>) conf).get("Name").toLowerCase() == "enable_outfile_to_local") {
|
||||
enableOutfileToLocal = ((Map<String, String>) conf).get("Value").toLowerCase() == "true"
|
||||
}
|
||||
}
|
||||
if (!enableOutfileToLocal) {
|
||||
logger.warn("Please set enable_outfile_to_local to true to run test_outfile")
|
||||
return
|
||||
}
|
||||
|
||||
def table_export_name = "test_export_empty_table"
|
||||
def table_load_name = "test_load_empty_table"
|
||||
def outfile_path_prefix = """/tmp/test_export"""
|
||||
|
||||
// create table
|
||||
sql """ DROP TABLE IF EXISTS ${table_export_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_export_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
qt_select_export1 """ SELECT * FROM ${table_export_name} t ORDER BY user_id; """
|
||||
|
||||
def check_path_exists = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (!path.exists()) {
|
||||
assert path.mkdirs()
|
||||
} else {
|
||||
throw new IllegalStateException("""${dir_path} already exists! """)
|
||||
}
|
||||
}
|
||||
|
||||
def check_file_amounts = { dir_path, amount ->
|
||||
File path = new File(dir_path)
|
||||
File[] files = path.listFiles()
|
||||
assert files.length == amount
|
||||
}
|
||||
|
||||
def delete_files = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (path.exists()) {
|
||||
for (File f: path.listFiles()) {
|
||||
f.delete();
|
||||
}
|
||||
path.delete();
|
||||
}
|
||||
}
|
||||
|
||||
def waiting_export = { export_label ->
|
||||
while (true) {
|
||||
def res = sql """ show export where label = "${export_label}" """
|
||||
logger.info("export state: " + res[0][2])
|
||||
if (res[0][2] == "FINISHED") {
|
||||
break;
|
||||
} else if (res[0][2] == "CANCELLED") {
|
||||
throw new IllegalStateException("""export failed: ${res[0][10]}""")
|
||||
} else {
|
||||
sleep(5000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. test csv
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
def outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
def label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv'
|
||||
set 'column_separator', ','
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(0, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load1 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
|
||||
// 2. test parquet
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "parquet"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'parquet'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(0, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load2 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 3. test orc
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "orc"
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'orc'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(0, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load3 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
// 4. test csv_with_names
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv_with_names",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv_with_names'
|
||||
set 'column_separator', ','
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(0, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load4 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
|
||||
// 5. test csv_with_names_and_types
|
||||
uuid = UUID.randomUUID().toString()
|
||||
outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "csv_with_names_and_types",
|
||||
"column_separator"=","
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'csv_with_names_and_types'
|
||||
set 'column_separator', ','
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(0, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load5 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
try_sql("DROP TABLE IF EXISTS ${table_export_name}")
|
||||
}
|
||||
206
regression-test/suites/export_p0/test_export_orc.groovy
Normal file
206
regression-test/suites/export_p0/test_export_orc.groovy
Normal file
@ -0,0 +1,206 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
import org.codehaus.groovy.runtime.IOGroovyMethods
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
suite("test_export_orc", "p0") {
|
||||
// check whether the FE config 'enable_outfile_to_local' is true
|
||||
StringBuilder strBuilder = new StringBuilder()
|
||||
strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword)
|
||||
strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe")
|
||||
|
||||
String command = strBuilder.toString()
|
||||
def process = command.toString().execute()
|
||||
def code = process.waitFor()
|
||||
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
|
||||
def out = process.getText()
|
||||
logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err)
|
||||
assertEquals(code, 0)
|
||||
def response = parseJson(out.trim())
|
||||
assertEquals(response.code, 0)
|
||||
assertEquals(response.msg, "success")
|
||||
def configJson = response.data.rows
|
||||
boolean enableOutfileToLocal = false
|
||||
for (Object conf: configJson) {
|
||||
assert conf instanceof Map
|
||||
if (((Map<String, String>) conf).get("Name").toLowerCase() == "enable_outfile_to_local") {
|
||||
enableOutfileToLocal = ((Map<String, String>) conf).get("Value").toLowerCase() == "true"
|
||||
}
|
||||
}
|
||||
if (!enableOutfileToLocal) {
|
||||
logger.warn("Please set enable_outfile_to_local to true to run test_outfile")
|
||||
return
|
||||
}
|
||||
|
||||
def table_export_name = "test_export_orc"
|
||||
def table_load_name = "test_load_orc"
|
||||
def outfile_path_prefix = """/tmp/test_export"""
|
||||
|
||||
// create table and insert
|
||||
sql """ DROP TABLE IF EXISTS ${table_export_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_export_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
StringBuilder sb = new StringBuilder()
|
||||
int i = 1
|
||||
for (; i < 100; i ++) {
|
||||
sb.append("""
|
||||
(${i}, '2017-10-01', '2017-10-01 00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}),
|
||||
""")
|
||||
}
|
||||
sb.append("""
|
||||
(${i}, '2017-10-01', '2017-10-01 00:00:00', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)
|
||||
""")
|
||||
sql """ INSERT INTO ${table_export_name} VALUES
|
||||
${sb.toString()}
|
||||
"""
|
||||
qt_select_export1 """ SELECT * FROM ${table_export_name} t ORDER BY user_id; """
|
||||
|
||||
|
||||
def check_path_exists = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (!path.exists()) {
|
||||
assert path.mkdirs()
|
||||
} else {
|
||||
throw new IllegalStateException("""${dir_path} already exists! """)
|
||||
}
|
||||
}
|
||||
|
||||
def check_file_amounts = { dir_path, amount ->
|
||||
File path = new File(dir_path)
|
||||
File[] files = path.listFiles()
|
||||
assert files.length == amount
|
||||
}
|
||||
|
||||
def delete_files = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (path.exists()) {
|
||||
for (File f: path.listFiles()) {
|
||||
f.delete();
|
||||
}
|
||||
path.delete();
|
||||
}
|
||||
}
|
||||
|
||||
def waiting_export = { export_label ->
|
||||
while (true) {
|
||||
def res = sql """ show export where label = "${export_label}" """
|
||||
logger.info("export state: " + res[0][2])
|
||||
if (res[0][2] == "FINISHED") {
|
||||
break;
|
||||
} else if (res[0][2] == "CANCELLED") {
|
||||
throw new IllegalStateException("""export failed: ${res[0][10]}""")
|
||||
} else {
|
||||
sleep(5000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. test more type
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
def outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
def label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "orc",
|
||||
'columns' = 'user_id, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'orc'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(100, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load1 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
try_sql("DROP TABLE IF EXISTS ${table_export_name}")
|
||||
}
|
||||
209
regression-test/suites/export_p0/test_export_parquet.groovy
Normal file
209
regression-test/suites/export_p0/test_export_parquet.groovy
Normal file
@ -0,0 +1,209 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
import org.codehaus.groovy.runtime.IOGroovyMethods
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
suite("test_export_parquet", "p0") {
|
||||
// check whether the FE config 'enable_outfile_to_local' is true
|
||||
StringBuilder strBuilder = new StringBuilder()
|
||||
strBuilder.append("curl --location-trusted -u " + context.config.jdbcUser + ":" + context.config.jdbcPassword)
|
||||
strBuilder.append(" http://" + context.config.feHttpAddress + "/rest/v1/config/fe")
|
||||
|
||||
String command = strBuilder.toString()
|
||||
def process = command.toString().execute()
|
||||
def code = process.waitFor()
|
||||
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
|
||||
def out = process.getText()
|
||||
logger.info("Request FE Config: code=" + code + ", out=" + out + ", err=" + err)
|
||||
assertEquals(code, 0)
|
||||
def response = parseJson(out.trim())
|
||||
assertEquals(response.code, 0)
|
||||
assertEquals(response.msg, "success")
|
||||
def configJson = response.data.rows
|
||||
boolean enableOutfileToLocal = false
|
||||
for (Object conf: configJson) {
|
||||
assert conf instanceof Map
|
||||
if (((Map<String, String>) conf).get("Name").toLowerCase() == "enable_outfile_to_local") {
|
||||
enableOutfileToLocal = ((Map<String, String>) conf).get("Value").toLowerCase() == "true"
|
||||
}
|
||||
}
|
||||
if (!enableOutfileToLocal) {
|
||||
logger.warn("Please set enable_outfile_to_local to true to run test_outfile")
|
||||
return
|
||||
}
|
||||
|
||||
def table_export_name = "test_export_parquet"
|
||||
def table_load_name = "test_load_parquet"
|
||||
def outfile_path_prefix = """/tmp/test_export"""
|
||||
|
||||
// create table and insert
|
||||
sql """ DROP TABLE IF EXISTS ${table_export_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_export_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
StringBuilder sb = new StringBuilder()
|
||||
int i = 1
|
||||
for (; i < 100; i ++) {
|
||||
sb.append("""
|
||||
(${i}, '2017-10-01', '2017-10-01 00:00:00', 'Beijing', ${i}, ${i % 128}, true, ${i}, ${i}, ${i}, ${i}.${i}, ${i}.${i}, 'char${i}', ${i}),
|
||||
""")
|
||||
}
|
||||
sb.append("""
|
||||
(${i}, '2017-10-01', '2017-10-01 00:00:00', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)
|
||||
""")
|
||||
sql """ INSERT INTO ${table_export_name} VALUES
|
||||
${sb.toString()}
|
||||
"""
|
||||
qt_select_export1 """ SELECT * FROM ${table_export_name} t ORDER BY user_id; """
|
||||
|
||||
|
||||
def check_path_exists = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (!path.exists()) {
|
||||
assert path.mkdirs()
|
||||
} else {
|
||||
throw new IllegalStateException("""${dir_path} already exists! """)
|
||||
}
|
||||
}
|
||||
|
||||
def check_file_amounts = { dir_path, amount ->
|
||||
File path = new File(dir_path)
|
||||
File[] files = path.listFiles()
|
||||
assert files.length == amount
|
||||
}
|
||||
|
||||
def delete_files = { dir_path ->
|
||||
File path = new File(dir_path)
|
||||
if (path.exists()) {
|
||||
for (File f: path.listFiles()) {
|
||||
f.delete();
|
||||
}
|
||||
path.delete();
|
||||
}
|
||||
}
|
||||
|
||||
def waiting_export = { export_label ->
|
||||
while (true) {
|
||||
def res = sql """ show export where label = "${export_label}" """
|
||||
logger.info("export state: " + res[0][2])
|
||||
if (res[0][2] == "FINISHED") {
|
||||
break;
|
||||
} else if (res[0][2] == "CANCELLED") {
|
||||
throw new IllegalStateException("""export failed: ${res[0][10]}""")
|
||||
} else {
|
||||
sleep(5000)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. test more type
|
||||
def uuid = UUID.randomUUID().toString()
|
||||
def outFilePath = """${outfile_path_prefix}_${uuid}"""
|
||||
def label = "label_${uuid}"
|
||||
try {
|
||||
// check export path
|
||||
check_path_exists.call("${outFilePath}")
|
||||
|
||||
// exec export
|
||||
sql """
|
||||
EXPORT TABLE ${table_export_name} TO "file://${outFilePath}/"
|
||||
PROPERTIES(
|
||||
"label" = "${label}",
|
||||
"format" = "parquet",
|
||||
'columns' = 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
);
|
||||
"""
|
||||
waiting_export.call(label)
|
||||
|
||||
// check file amounts
|
||||
check_file_amounts.call("${outFilePath}", 1)
|
||||
|
||||
// check data correctness
|
||||
sql """ DROP TABLE IF EXISTS ${table_load_name} """
|
||||
sql """
|
||||
CREATE TABLE IF NOT EXISTS ${table_load_name} (
|
||||
`user_id` INT NOT NULL COMMENT "用户id",
|
||||
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
|
||||
`datetime` DATETIME NOT NULL COMMENT "数据灌入日期时间",
|
||||
`city` VARCHAR(20) COMMENT "用户所在城市",
|
||||
`age` SMALLINT COMMENT "用户年龄",
|
||||
`sex` TINYINT COMMENT "用户性别",
|
||||
`bool_col` boolean COMMENT "",
|
||||
`int_col` int COMMENT "",
|
||||
`bigint_col` bigint COMMENT "",
|
||||
`largeint_col` largeint COMMENT "",
|
||||
`float_col` float COMMENT "",
|
||||
`double_col` double COMMENT "",
|
||||
`char_col` CHAR(10) COMMENT "",
|
||||
`decimal_col` decimal COMMENT ""
|
||||
)
|
||||
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
|
||||
"""
|
||||
|
||||
File[] files = new File("${outFilePath}").listFiles()
|
||||
String file_path = files[0].getAbsolutePath()
|
||||
streamLoad {
|
||||
table "${table_load_name}"
|
||||
|
||||
set 'columns', 'user_id, date, datetime, city, age, sex, bool_col, int_col, bigint_col, float_col, double_col, char_col, decimal_col'
|
||||
set 'strict_mode', 'true'
|
||||
set 'format', 'parquet'
|
||||
|
||||
file "${file_path}"
|
||||
time 10000 // limit inflight 10s
|
||||
|
||||
check { result, exception, startTime, endTime ->
|
||||
if (exception != null) {
|
||||
throw exception
|
||||
}
|
||||
log.info("Stream load result: ${result}".toString())
|
||||
def json = parseJson(result)
|
||||
assertEquals("success", json.Status.toLowerCase())
|
||||
assertEquals(100, json.NumberTotalRows)
|
||||
assertEquals(0, json.NumberFilteredRows)
|
||||
}
|
||||
}
|
||||
|
||||
qt_select_load1 """ SELECT * FROM ${table_load_name} t ORDER BY user_id; """
|
||||
|
||||
} finally {
|
||||
try_sql("DROP TABLE IF EXISTS ${table_load_name}")
|
||||
delete_files.call("${outFilePath}")
|
||||
}
|
||||
|
||||
try_sql("DROP TABLE IF EXISTS ${table_export_name}")
|
||||
}
|
||||
Reference in New Issue
Block a user