[Fix](multi-catalog) Fix be crashed when query hive table after schema changed(new column added). (#20537)
Fix be crashed when query hive table after schema changed(new column added). Regression Test: test_hive_schema_evolution.groovy
This commit is contained in:
@ -273,11 +273,17 @@ Status OrcReader::_init_read_columns() {
|
||||
for (auto& col_name : _column_names) {
|
||||
if (_is_hive) {
|
||||
auto iter = _scan_params.slot_name_to_schema_pos.find(col_name);
|
||||
int pos = iter->second;
|
||||
if (_is_acid) {
|
||||
orc_cols_lower_case[ACID_ROW_OFFSET + 1 + pos] = iter->first;
|
||||
} else {
|
||||
orc_cols_lower_case[pos] = iter->first;
|
||||
if (iter != _scan_params.slot_name_to_schema_pos.end()) {
|
||||
int pos = iter->second;
|
||||
if (_is_acid) {
|
||||
if (ACID_ROW_OFFSET + 1 + pos < orc_cols_lower_case.size()) {
|
||||
orc_cols_lower_case[ACID_ROW_OFFSET + 1 + pos] = iter->first;
|
||||
}
|
||||
} else {
|
||||
if (pos < orc_cols_lower_case.size()) {
|
||||
orc_cols_lower_case[pos] = iter->first;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
auto iter = std::find(orc_cols_lower_case.begin(), orc_cols_lower_case.end(), col_name);
|
||||
|
||||
@ -594,4 +594,31 @@ CREATE TABLE `unsupported_type_table`(
|
||||
k6 int
|
||||
);
|
||||
|
||||
CREATE TABLE `schema_evo_test_text`(
|
||||
id int,
|
||||
name string
|
||||
)
|
||||
ROW FORMAT DELIMITED FIELDS TERMINATED by ',';
|
||||
insert into `schema_evo_test_text` select 1, "kaka";
|
||||
alter table `schema_evo_test_text` ADD COLUMNS (`ts` timestamp);
|
||||
insert into `schema_evo_test_text` select 2, "messi", from_unixtime(to_unix_timestamp('20230101 13:01:03','yyyyMMdd HH:mm:ss'));
|
||||
|
||||
CREATE TABLE `schema_evo_test_parquet`(
|
||||
id int,
|
||||
name string
|
||||
)
|
||||
stored as parquet;
|
||||
insert into `schema_evo_test_parquet` select 1, "kaka";
|
||||
alter table `schema_evo_test_parquet` ADD COLUMNS (`ts` timestamp);
|
||||
insert into `schema_evo_test_parquet` select 2, "messi", from_unixtime(to_unix_timestamp('20230101 13:01:03','yyyyMMdd HH:mm:ss'));
|
||||
|
||||
CREATE TABLE `schema_evo_test_orc`(
|
||||
id int,
|
||||
name string
|
||||
)
|
||||
stored as orc;
|
||||
insert into `schema_evo_test_orc` select 1, "kaka";
|
||||
alter table `schema_evo_test_orc` ADD COLUMNS (`ts` timestamp);
|
||||
insert into `schema_evo_test_orc` select 2, "messi", from_unixtime(to_unix_timestamp('20230101 13:01:03','yyyyMMdd HH:mm:ss'));
|
||||
|
||||
show tables;
|
||||
|
||||
@ -0,0 +1,37 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !q01 --
|
||||
1 kaka \N
|
||||
2 messi 2023-01-01T13:01:03
|
||||
|
||||
-- !q02 --
|
||||
1 kaka \N
|
||||
2 messi 2023-01-01T13:01:03
|
||||
|
||||
-- !q03 --
|
||||
\N
|
||||
2023-01-01T13:01:03
|
||||
|
||||
-- !q01 --
|
||||
1 kaka \N
|
||||
2 messi 2023-01-01T21:01:03
|
||||
|
||||
-- !q02 --
|
||||
1 kaka \N
|
||||
2 messi 2023-01-01T21:01:03
|
||||
|
||||
-- !q03 --
|
||||
\N
|
||||
2023-01-01T21:01:03
|
||||
|
||||
-- !q01 --
|
||||
1 kaka \N
|
||||
2 messi 2023-01-01T13:01:03
|
||||
|
||||
-- !q02 --
|
||||
1 kaka \N
|
||||
2 messi 2023-01-01T13:01:03
|
||||
|
||||
-- !q03 --
|
||||
\N
|
||||
2023-01-01T13:01:03
|
||||
|
||||
@ -0,0 +1,75 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
suite("test_hive_schema_evolution", "p0") {
|
||||
def q_text = {
|
||||
qt_q01 """
|
||||
select * from schema_evo_test_text order by id;
|
||||
"""
|
||||
qt_q02 """
|
||||
select id, name, ts from schema_evo_test_text order by id;
|
||||
"""
|
||||
qt_q03 """
|
||||
select ts from schema_evo_test_text order by id;
|
||||
"""
|
||||
}
|
||||
|
||||
def q_parquet = {
|
||||
qt_q01 """
|
||||
select * from schema_evo_test_parquet order by id;
|
||||
"""
|
||||
qt_q02 """
|
||||
select id, name, ts from schema_evo_test_parquet order by id;
|
||||
"""
|
||||
qt_q03 """
|
||||
select ts from schema_evo_test_parquet order by id;
|
||||
"""
|
||||
}
|
||||
|
||||
def q_orc = {
|
||||
qt_q01 """
|
||||
select * from schema_evo_test_orc order by id;
|
||||
"""
|
||||
qt_q02 """
|
||||
select id, name, ts from schema_evo_test_orc order by id;
|
||||
"""
|
||||
qt_q03 """
|
||||
select ts from schema_evo_test_orc order by id;
|
||||
"""
|
||||
}
|
||||
|
||||
String enabled = context.config.otherConfigs.get("enableHiveTest")
|
||||
if (enabled != null && enabled.equalsIgnoreCase("true")) {
|
||||
try {
|
||||
String hms_port = context.config.otherConfigs.get("hms_port")
|
||||
String catalog_name = "test_hive_schema_evolution"
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
sql """create catalog if not exists ${catalog_name} properties (
|
||||
"type"="hms",
|
||||
'hive.metastore.uris' = 'thrift://127.0.0.1:${hms_port}'
|
||||
);"""
|
||||
sql """use `${catalog_name}`.`default`"""
|
||||
|
||||
q_text()
|
||||
q_parquet()
|
||||
q_orc()
|
||||
|
||||
sql """drop catalog if exists ${catalog_name}"""
|
||||
} finally {
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user