revert sc case (#10945)

schema change cases leads to failure of creating partition.
This commit is contained in:
Lightman
2022-07-17 21:45:23 +08:00
committed by GitHub
parent 6b1408ce41
commit 50cb26d599
19 changed files with 0 additions and 3518 deletions

View File

@ -1,11 +0,0 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !compaction --
100
-- !compaction --
100
-- !compaction --
1
2

View File

@ -1,44 +0,0 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !test_delete_schema_change --
0
-- !test_delete_schema_change_2 --
0
-- !test_delete_schema_change_3 --
1
-- !test_delete_schema_change_4 --
1
-- !test_delete_schema_change_5 --
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21
-- !test_delete_schema_change_6 --
0
-- !test_delete_schema_change_7 --
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21 1
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20 1
-- !test_delete_schema_change_8 --
1
-- !test_delete_schema_change_9 --
1
-- !test_delete_schema_change_10 --
1
-- !test_delete_schema_change_11 --
1
-- !test_delete_schema_change_12 --
0
-- !test_delete_schema_change_13 --
3 2017-10-01 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2020-01-03T00:00 1 32 20 2
2 2017-10-01 Beijing 10 1 2020-01-03T00:00 2020-01-03T00:00 2020-01-03T00:00 1 32 20 2
1 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 19 2

View File

@ -1,33 +0,0 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !test_partition_schema_change --
0
-- !test_partition_schema_change_2 --
0
-- !test_partition_schema_change_3 --
1
-- !test_partition_schema_change_4 --
1
-- !test_partition_schema_change_5 --
1
-- !test_partition_schema_change_6 --
1 2017-01-02 Beijing 10 1 2017-01-02T00:00 1 30 20
1 2017-02-02 Beijing 10 1 2017-02-02T00:00 1 30 20
1 2017-03-02 Beijing 10 1 2017-03-02T00:00 1 30 20
-- !test_partition_schema_change_7 --
0
-- !test_partition_schema_change_8 --
1
-- !test_partition_schema_change_9 --
1 2017-01-02 Beijing 10 1 2017-01-02T00:00 1 30 20 1
1 2017-02-02 Beijing 10 1 2017-02-02T00:00 1 30 20 1
2 2017-02-03 Beijing 10 1 2017-02-02T00:00 1 30 20 2
1 2017-03-02 Beijing 10 1 2017-03-02T00:00 1 30 20 1

View File

@ -1,64 +0,0 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !test_update_schema_change --
0
-- !test_update_schema_change_2 --
0
-- !test_update_schema_change_3 --
1
-- !test_update_schema_change_4 --
1
-- !test_update_schema_change_5 --
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21
-- !test_update_schema_change_6 --
0
-- !test_update_schema_change_7 --
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21 1
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20 1
-- !test_update_schema_change_8 --
1
-- !test_update_schema_change_9 --
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20 2
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21 1
-- !test_update_schema_change_10 --
1
-- !test_update_schema_change_11 --
1
-- !test_update_schema_change_12 --
2
-- !test_update_schema_change_13 --
5 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21 20
3 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20 20
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21 1
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20 20
-- !test_update_schema_change_14 --
0
-- !test_update_schema_change_15 --
5 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21
3 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20
-- !test_update_schema_change_16 --
1
-- !test_update_schema_change_17 --
5 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 20 31 21
3 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20
2 2017-10-01 Beijing 10 1 2020-01-02T00:00 2020-01-02T00:00 2020-01-02T00:00 1 31 21
1 2017-10-01 Beijing 10 1 2020-01-01T00:00 2020-01-01T00:00 2020-01-01T00:00 1 30 20

View File

@ -1,274 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_agg_keys_schema_change") {
def tableName = "schema_change_agg_keys_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`cost` BIGINT SUM DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT MAX DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT MIN DEFAULT "99999" COMMENT "用户最小停留时间",
`hll_col` HLL HLL_UNION NOT NULL COMMENT "HLL列",
`bitmap_col` Bitmap BITMAP_UNION NOT NULL COMMENT "bitmap列")
AGGREGATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, 1, 30, 20, hll_hash(1), to_bitmap(1))
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, 1, 31, 19, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, 1, 31, 21, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(3), to_bitmap(3))
"""
def result = "null";
result = sql """ select * from ${tableName} order by user_id"""
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 10)
assertTrue(result[0][5] == 2, "user id 1 cost should be 2")
assertTrue(result[1][5] == 2, "user id 2 cost should be 2")
// add key column case 1, not light schema change
sql """
ALTER table ${tableName} ADD COLUMN new_key_column INT default "2"
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`cost`,`max_dwell_time`,`min_dwell_time`, `hll_col`, `bitmap_col`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, 100, 32, 20, hll_hash(4), to_bitmap(4))
"""
result = "null"
result = sql """SELECT * FROM ${tableName} WHERE user_id = 3"""
assertTrue(result.size() == 1)
assertTrue(result[0][5] == 2, "new key column default value should be 2")
assertTrue(result[0].size() == 11)
// add key column case 2
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, 3, 110, 32, 20, hll_hash(4), to_bitmap(4))
"""
result = "null"
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
result = "null"
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 4, "total count is 4")
// drop key column, not light schema change
sql """
ALTER TABLE ${tableName} DROP COLUMN new_key_column
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
result = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 10)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 4)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 10)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 8)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,257 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_agg_mv_schema_change") {
def tableName = "schema_change_agg_mv_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`cost` BIGINT SUM DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT MAX DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT MIN DEFAULT "99999" COMMENT "用户最小停留时间",
`hll_col` HLL HLL_UNION NOT NULL COMMENT "HLL列",
`bitmap_col` Bitmap BITMAP_UNION NOT NULL COMMENT "bitmap列")
AGGREGATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
//add materialized view
def result = "null"
def mvName = "mv1"
sql "create materialized view ${mvName} as select user_id, date, city, age, sex, sum(cost) from ${tableName} group by user_id, date, city, age, sex, cost;"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE MATERIALIZED VIEW WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, 1, 30, 20, hll_hash(1), to_bitmap(1))
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, 1, 31, 19, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, 1, 31, 21, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(3), to_bitmap(3))
"""
result = "null";
result = sql """ select * from ${tableName} order by user_id"""
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 10)
assertTrue(result[0][5] == 2, "user id 1 cost should be 2")
assertTrue(result[1][5] == 2, "user id 2 cost should be 2")
assertTrue(result[0].size() == 10)
// drop value column with mv, not light schema change
sql """
ALTER TABLE ${tableName} DROP COLUMN cost
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")) {
log.info("rollup job is cancelled, result: ${result}".toString())
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} (`user_id`, `date`, `city`, `age`, `sex`, `max_dwell_time`,`min_dwell_time`, `hll_col`, `bitmap_col`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(4), to_bitmap(4))
"""
result = "null"
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 9)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 4)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 9)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 14)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,255 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_agg_rollup_schema_change") {
def tableName = "schema_change_agg_rollup_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`cost` BIGINT SUM DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT MAX DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT MIN DEFAULT "99999" COMMENT "用户最小停留时间",
`hll_col` HLL HLL_UNION NOT NULL COMMENT "HLL列",
`bitmap_col` Bitmap BITMAP_UNION NOT NULL COMMENT "bitmap列")
AGGREGATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
//add rollup
def result = "null"
def rollupName = "rollup_cost"
sql "ALTER TABLE ${tableName} ADD ROLLUP ${rollupName}(`user_id`,`date`,`city`,`age`,`sex`, cost);"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE ROLLUP WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, 1, 30, 20, hll_hash(1), to_bitmap(1))
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, 1, 31, 19, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, 1, 31, 21, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, 1, 32, 20, hll_hash(3), to_bitmap(3))
"""
result = "null";
result = sql """ select * from ${tableName} order by user_id """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 10)
assertTrue(result[0][5] == 2, "user id 1 cost should be 2")
assertTrue(result[1][5] == 2, "user id 2 cost should be 2")
assertTrue(result[0].size() == 10)
// drop value column with rollup, not light schema change
sql """
ALTER TABLE ${tableName} DROP COLUMN cost
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")) {
log.info("rollup job is cancelled, result: ${result}".toString())
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} (`user_id`, `date`, `city`, `age`, `sex`, `max_dwell_time`,`min_dwell_time`, `hll_col`, `bitmap_col`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(4), to_bitmap(4))
"""
result = "null"
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 9)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, 32, 20, hll_hash(5), to_bitmap(5))
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 4)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 9)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 12)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,271 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_agg_vals_schema_change") {
def tableName = "schema_change_agg_vals_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME REPLACE DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME REPLACE DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME REPLACE NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT SUM DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT MAX DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT MIN DEFAULT "99999" COMMENT "用户最小停留时间",
`hll_col` HLL HLL_UNION NOT NULL COMMENT "HLL列",
`bitmap_col` Bitmap BITMAP_UNION NOT NULL COMMENT "bitmap列")
AGGREGATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20, hll_hash(1), to_bitmap(1))
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21, hll_hash(2), to_bitmap(2))
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(3), to_bitmap(3))
"""
def result1 = sql """
select * from ${tableName} order by user_id
"""
assertTrue(result1.size() == 2)
assertTrue(result1[0].size() == 13)
assertTrue(result1[0][8] == 2, "user id 1 cost should be 2")
assertTrue(result1[1][8] == 2, "user id 2 cost should be 2")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT MAX default "1"
"""
def result2 = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result1[0][8] == 2, "user id 2 cost should be 2")
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(4), to_bitmap(4), 2)
"""
result2 = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result2[0][8] == 3, "user id 2 cost should be 3")
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`, `hll_col`, `bitmap_col`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(4), to_bitmap(4))
"""
result2 = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result2.size() == 1)
assertTrue(result2[0].size() == 14)
assertTrue(result2[0][13] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(4), to_bitmap(4), 2)
"""
def result3 = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result3.size() == 1)
assertTrue(result3[0].size() == 14)
assertTrue(result3[0][13] == 2, "new add column value is set to 2")
def result4 = sql """ select count(*) from ${tableName} """
logger.info("result4.size:"+result4.size() + " result4[0].size:" + result4[0].size + " " + result4[0][0])
assertTrue(result4.size() == 1)
assertTrue(result4[0].size() == 1)
assertTrue(result4[0][0] == 3, "total count is 3")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN last_visit_date
"""
def result5 = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result5.size() == 1)
assertTrue(result5[0].size() == 13)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(4), to_bitmap(4), 2)
"""
def result6 = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result6.size() == 1)
assertTrue(result6[0].size() == 13)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(5), to_bitmap(5), 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(5), to_bitmap(5), 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(5), to_bitmap(5), 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(5), to_bitmap(5), 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(5), to_bitmap(5), 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, hll_hash(5), to_bitmap(5), 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
def result7 = sql """ select count(*) from ${tableName} """
assertTrue(result7.size() == 1)
assertTrue(result7[0][0] == 5)
def result8 = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result8.size() == 1)
assertTrue(result8[0].size() == 13)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 8)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,44 +0,0 @@
DROP TABLE IF EXISTS schema_change_delete_regression_test;
CREATE TABLE schema_change_delete_regression_test (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
DUPLICATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
INSERT INTO schema_change_delete_regression_test VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20);
INSERT INTO schema_change_delete_regression_test VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21);
SELECT * FROM schema_change_delete_regression_test order by user_id ASC, last_visit_date;
ALTER table schema_change_delete_regression_test ADD COLUMN new_column INT default "1";
SELECT * FROM schema_change_delete_regression_test order by user_id DESC, last_visit_date;
INSERT INTO schema_change_delete_regression_test VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19, 2);
INSERT INTO schema_change_delete_regression_test VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2);
INSERT INTO schema_change_delete_regression_test VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 1);
INSERT INTO schema_change_delete_regression_test VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2);
DELETE FROM schema_change_delete_regression_test where new_column = 1;
SELECT * FROM schema_change_delete_regression_test order by user_id DESC, last_visit_date;

View File

@ -1,272 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_dup_keys_schema_change") {
def tableName = "schema_change_dup_keys_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
DUPLICATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
def result = sql """
select count(*) from ${tableName}
"""
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 4, "total columns should be 4 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 order by new_column """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 12)
assertTrue(result[1][11] == 2, "new add column value is set to 2")
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 6, "total count is 6")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN sex
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")) {
log.info("rollup job is cancelled, result: ${result}".toString())
break
}
Thread.sleep(1000)
}
result = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 13)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount < 10)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,287 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_dup_mv_schema_change") {
def tableName = "schema_change_dup_mv_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
DUPLICATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
//add materialized view
def result = "null"
def mvName = "mv1"
sql "create materialized view ${mvName} as select user_id, date, city, age,sex from ${tableName};"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE MATERIALIZED VIEW WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """
select count(*) from ${tableName}
"""
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 4, "total columns should be 4 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 order by new_column """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 12)
assertTrue(result[1][11] == 2, "new add column value is set to 2")
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 6, "total count is 6")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN sex
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")) {
log.info("rollup job is cancelled, result: ${result}".toString())
break
}
Thread.sleep(1000)
}
result = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 13)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 14)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,286 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_dup_rollup_schema_change") {
def tableName = "schema_change_dup_rollup_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
DUPLICATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
//add rollup
def result = "null"
def rollupName = "rollup_cost"
sql "ALTER TABLE ${tableName} ADD ROLLUP ${rollupName}(`user_id`,`date`,`city`,`age`,`sex`, cost);"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE ROLLUP WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """
select count(*) from ${tableName}
"""
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 4, "total columns should be 4 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 order by new_column """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 12)
assertTrue(result[1][11] == 2, "new add column value is set to 2")
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 6, "total count is 6")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN sex
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")) {
log.info("rollup job is cancelled, result: ${result}".toString())
break
}
Thread.sleep(1000)
}
result = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 13)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 2)
assertTrue(result[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
StringBuilder sb = new StringBuilder();
backend_id = tablet[2]
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 14)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,258 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_dup_vals_schema_change") {
def tableName = "schema_change_dup_vals_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
DUPLICATE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
def result1 = sql """
select count(*) from ${tableName}
"""
assertTrue(result1.size() == 1)
assertTrue(result1[0].size() == 1)
assertTrue(result1[0][0] == 4, "total columns should be 4 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
def result2 = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result2.size() == 1)
assertTrue(result2[0].size() == 12)
assertTrue(result2[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
def result3 = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 order by new_column """
assertTrue(result3.size() == 2)
assertTrue(result3[0].size() == 12)
assertTrue(result3[1][11] == 2, "new add column value is set to 2")
def result4 = sql """ select count(*) from ${tableName} """
logger.info("result4.size:"+result4.size() + " result4[0].size:" + result4[0].size + " " + result4[0][0])
assertTrue(result4.size() == 1)
assertTrue(result4[0].size() == 1)
assertTrue(result4[0][0] == 6, "total count is 6")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN last_visit_date
"""
def result5 = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result5.size() == 2)
assertTrue(result5[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
def result6 = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result6.size() == 1)
assertTrue(result6[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
def result7 = sql """ select count(*) from ${tableName} """
assertTrue(result7.size() == 1)
assertTrue(result7[0][0] == 13)
def result8 = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result8.size() == 2)
assertTrue(result8[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount < 10)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,44 +0,0 @@
DROP TABLE IF EXISTS example_range_tbl;
CREATE TABLE example_range_tbl
(
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME REPLACE DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT SUM DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT MAX DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT MIN DEFAULT "99999" COMMENT "用户最小停留时间"
)
ENGINE=OLAP
AGGREGATE KEY(`user_id`, `date`, `city`, `age`, `sex`)
PARTITION BY RANGE(`date`)
(
PARTITION `p201701` VALUES LESS THAN ("2017-02-01"),
PARTITION `p201702` VALUES LESS THAN ("2017-03-01"),
PARTITION `p201703` VALUES LESS THAN ("2017-04-01")
)
DISTRIBUTED BY HASH(`user_id`) BUCKETS 1
PROPERTIES
(
"replication_num" = "1"
);
INSERT INTO example_range_tbl VALUES
(1, '2017-01-02', 'Beijing', 10, 1, "2017-01-02 00:00:00", 1, 30, 20);
INSERT INTO example_range_tbl VALUES
(1, '2017-02-02', 'Beijing', 10, 1, "2017-02-02 00:00:00", 1, 30, 20);
INSERT INTO example_range_tbl VALUES
(1, '2017-03-02', 'Beijing', 10, 1, "2017-03-02 00:00:00", 1, 30, 20);
select * from example_range_tbl order by `date`;
ALTER table example_range_tbl ADD COLUMN new_column INT MAX default "1";
INSERT INTO example_range_tbl VALUES
(2, '2017-02-03', 'Beijing', 10, 1, "2017-02-02 00:00:00", 1, 30, 20, 2);
select * from example_range_tbl order by `date`;

View File

@ -1,250 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_uniq_keys_schema_change") {
def tableName = "schema_change_uniq_keys_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE schema_change_uniq_keys_regression_test (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
UNIQUE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
sql """ INSERT INTO schema_change_uniq_keys_regression_test VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO schema_change_uniq_keys_regression_test VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO schema_change_uniq_keys_regression_test VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO schema_change_uniq_keys_regression_test VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
def result = sql """
select count(*) from schema_change_uniq_keys_regression_test
"""
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 2, "total columns should be 2 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 2, "new add column value is set to 2")
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 3, "total count is 3")
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 5)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 10)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,273 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_uniq_mv_schema_change") {
def tableName = "schema_change_uniq_mv_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
UNIQUE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
//add materialized view
def result = "null"
def mvName = "mv1"
sql "create materialized view ${mvName} as select user_id, date, city, age, sex from ${tableName} group by user_id, date, city, age, sex;"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE MATERIALIZED VIEW WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """
select count(*) from ${tableName}
"""
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 2, "total columns should be 2 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 2, "new add column value is set to 2")
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 3, "total count is 3")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN cost
"""
result = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
result = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 5)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 14)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,285 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_uniq_rollup_schema_change") {
def tableName = "schema_change_uniq_rollup_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
UNIQUE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
//add rollup
def result = "null"
def rollupName = "rollup_cost"
sql "ALTER TABLE ${tableName} ADD ROLLUP ${rollupName}(`user_id`,`date`,`city`,`age`,`sex`, cost);"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE ROLLUP WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")){
break
}
Thread.sleep(1000)
}
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """
select count(*) from ${tableName}
"""
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 2, "total columns should be 2 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
result = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 12)
assertTrue(result[0][11] == 2, "new add column value is set to 2")
result = sql """ select count(*) from ${tableName} """
logger.info("result.size:" + result.size() + " result[0].size:" + result[0].size + " " + result[0][0])
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 1)
assertTrue(result[0][0] == 3, "total count is 3")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN cost
"""
result = "null"
while (!result.contains("FINISHED")){
result = sql "SHOW ALTER TABLE COLUMN WHERE TableName='${tableName}' ORDER BY CreateTime DESC LIMIT 1;"
result = result.toString()
logger.info("result: ${result}")
if(result.contains("CANCELLED")) {
log.info("rollup job is cancelled, result: ${result}".toString())
break
}
Thread.sleep(1000)
}
result = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, 1,'2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
result = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1,'2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
result = sql """ select count(*) from ${tableName} """
assertTrue(result.size() == 1)
assertTrue(result[0][0] == 5)
result = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result.size() == 1)
assertTrue(result[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 14)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,260 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
import org.codehaus.groovy.runtime.IOGroovyMethods
suite ("test_uniq_vals_schema_change") {
def tableName = "schema_change_uniq_vals_regression_test"
try {
String[][] backends = sql """ show backends; """
assertTrue(backends.size() > 0)
String backend_id;
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
for (String[] backend in backends) {
backendId_to_backendIP.put(backend[0], backend[2])
backendId_to_backendHttpPort.put(backend[0], backend[5])
}
backend_id = backendId_to_backendIP.keySet()[0]
StringBuilder showConfigCommand = new StringBuilder();
showConfigCommand.append("curl -X GET http://")
showConfigCommand.append(backendId_to_backendIP.get(backend_id))
showConfigCommand.append(":")
showConfigCommand.append(backendId_to_backendHttpPort.get(backend_id))
showConfigCommand.append("/api/show_config")
logger.info(showConfigCommand.toString())
def process = showConfigCommand.toString().execute()
int code = process.waitFor()
String err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
String out = process.getText()
logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def configList = parseJson(out.trim())
assert configList instanceof List
boolean disableAutoCompaction = true
for (Object ele in (List) configList) {
assert ele instanceof List<String>
if (((List<String>) ele)[0] == "disable_auto_compaction") {
disableAutoCompaction = Boolean.parseBoolean(((List<String>) ele)[2])
}
}
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
CREATE TABLE ${tableName} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
UNIQUE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20)
"""
sql """ INSERT INTO ${tableName} VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 19)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21)
"""
sql """ INSERT INTO ${tableName} VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
def result1 = sql """
select count(*) from ${tableName}
"""
assertTrue(result1.size() == 1)
assertTrue(result1[0].size() == 1)
assertTrue(result1[0][0] == 2, "total columns should be 2 rows")
// add column
sql """
ALTER table ${tableName} ADD COLUMN new_column INT default "1"
"""
sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
sql """ INSERT INTO ${tableName} (`user_id`,`date`,`city`,`age`,`sex`,`last_visit_date`,`last_update_date`,
`last_visit_date_not_null`,`cost`,`max_dwell_time`,`min_dwell_time`)
VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20)
"""
def result2 = sql """ SELECT * FROM ${tableName} WHERE user_id=3 """
assertTrue(result2.size() == 1)
assertTrue(result2[0].size() == 12)
assertTrue(result2[0][11] == 1, "new add column default value should be 1")
sql """ INSERT INTO ${tableName} VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
def result3 = sql """ SELECT * FROM ${tableName} WHERE user_id = 3 """
assertTrue(result3.size() == 1)
assertTrue(result3[0].size() == 12)
assertTrue(result3[0][11] == 2, "new add column value is set to 2")
def result4 = sql """ select count(*) from ${tableName} """
logger.info("result4.size:"+result4.size() + " result4[0].size:" + result4[0].size + " " + result4[0][0])
assertTrue(result4.size() == 1)
assertTrue(result4[0].size() == 1)
assertTrue(result4[0][0] == 3, "total count is 3")
// drop column
sql """
ALTER TABLE ${tableName} DROP COLUMN last_visit_date
"""
def result5 = sql """ select * from ${tableName} where user_id = 3 """
assertTrue(result5.size() == 1)
assertTrue(result5[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(4, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
def result6 = sql """ select * from ${tableName} where user_id = 4 """
assertTrue(result6.size() == 1)
assertTrue(result6[0].size() == 11)
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
sql """ INSERT INTO ${tableName} VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-03', '2020-01-03', 1, 32, 20, 2)
"""
// compaction
String[][] tablets = sql """ show tablets from ${tableName}; """
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
logger.info("run compaction:" + tablet_id)
StringBuilder sb = new StringBuilder();
sb.append("curl -X POST http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run?tablet_id=")
sb.append(tablet_id)
sb.append("&compact_type=cumulative")
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
//assertEquals(code, 0)
}
// wait for all compactions done
for (String[] tablet in tablets) {
boolean running = true
do {
Thread.sleep(1000)
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/run_status?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
assertEquals("success", compactionStatus.status.toLowerCase())
running = compactionStatus.run_status
} while (running)
}
def result7 = sql """ select count(*) from ${tableName} """
assertTrue(result7.size() == 1)
assertTrue(result7[0][0] == 5)
def result8 = sql """ SELECT * FROM ${tableName} WHERE user_id=2 """
assertTrue(result8.size() == 1)
assertTrue(result8[0].size() == 11)
int rowCount = 0
for (String[] tablet in tablets) {
String tablet_id = tablet[0]
backend_id = tablet[2]
StringBuilder sb = new StringBuilder();
sb.append("curl -X GET http://")
sb.append(backendId_to_backendIP.get(backend_id))
sb.append(":")
sb.append(backendId_to_backendHttpPort.get(backend_id))
sb.append("/api/compaction/show?tablet_id=")
sb.append(tablet_id)
String command = sb.toString()
// wait for cleaning stale_rowsets
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
assert tabletJson.rowsets instanceof List
for (String rowset in (List<String>) tabletJson.rowsets) {
rowCount += Integer.parseInt(rowset.split(" ")[1])
}
}
logger.info("size:" + rowCount)
assertTrue(rowCount <= 10)
} finally {
//try_sql("DROP TABLE IF EXISTS ${tableName}")
}
}

View File

@ -1,50 +0,0 @@
DROP TABLE IF EXISTS schema_change_update_regression_test;
CREATE TABLE schema_change_update_regression_test (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`date` DATE NOT NULL COMMENT "数据灌入日期时间",
`city` VARCHAR(20) COMMENT "用户所在城市",
`age` SMALLINT COMMENT "用户年龄",
`sex` TINYINT COMMENT "用户性别",
`last_visit_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`last_update_date` DATETIME DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次更新时间",
`last_visit_date_not_null` DATETIME NOT NULL DEFAULT "1970-01-01 00:00:00" COMMENT "用户最后一次访问时间",
`cost` BIGINT DEFAULT "0" COMMENT "用户总消费",
`max_dwell_time` INT DEFAULT "0" COMMENT "用户最大停留时间",
`min_dwell_time` INT DEFAULT "99999" COMMENT "用户最小停留时间")
UNIQUE KEY(`user_id`, `date`, `city`, `age`, `sex`) DISTRIBUTED BY HASH(`user_id`)
PROPERTIES ( "replication_num" = "1" );
INSERT INTO schema_change_update_regression_test VALUES
(1, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20);
INSERT INTO schema_change_update_regression_test VALUES
(2, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21);
SELECT * FROM schema_change_update_regression_test order by user_id ASC, last_visit_date;
ALTER table schema_change_update_regression_test ADD COLUMN new_column INT default "1";
SELECT * FROM schema_change_update_regression_test order by user_id DESC, last_visit_date;
UPDATE schema_change_update_regression_test set new_column = 2 where user_id = 1;
SELECT * FROM schema_change_update_regression_test order by user_id ASC, last_visit_date;
INSERT INTO schema_change_update_regression_test VALUES
(3, '2017-10-01', 'Beijing', 10, 1, '2020-01-01', '2020-01-01', '2020-01-01', 1, 30, 20, 2);
INSERT INTO schema_change_update_regression_test VALUES
(5, '2017-10-01', 'Beijing', 10, 1, '2020-01-02', '2020-01-02', '2020-01-02', 1, 31, 21, 20);
UPDATE schema_change_update_regression_test set new_column = 20 where new_column = 2;
SELECT * FROM schema_change_update_regression_test order by user_id DESC, last_visit_date;
ALTER TABLE schema_change_update_regression_test DROP COLUMN new_column;
SELECT * FROM schema_change_update_regression_test order by user_id DESC, last_visit_date;
UPDATE schema_change_update_regression_test set cost = 20 where user_id = 5;
SELECT * FROM schema_change_update_regression_test order by user_id DESC, last_visit_date;