[fix](inverted index) fix check failed for block erase temp column (#20924)

This commit is contained in:
YueW
2023-06-18 19:27:48 +08:00
committed by GitHub
parent 48065fce19
commit d6b7640cf0
3 changed files with 7 additions and 1 deletions

View File

@ -213,6 +213,7 @@ Status VScanNode::get_next(RuntimeState* state, vectorized::Block* block, bool*
// we built some temporary columns into block, these columns only used in scan node level,
// remove them when query leave scan node to avoid other nodes use block->columns() to make a wrong decision
Defer drop_block_temp_column {[&]() {
std::unique_lock l(_block_lock);
auto all_column_names = block->get_names();
for (auto& name : all_column_names) {
if (name.rfind(BeConsts::BLOCK_TEMP_COLUMN_PREFIX, 0) == 0) {

View File

@ -247,6 +247,8 @@ protected:
const TupleDescriptor* _input_tuple_desc = nullptr;
const TupleDescriptor* _output_tuple_desc = nullptr;
doris::Mutex _block_lock;
// These two values are from query_options
int _max_scan_key_num;
int _max_pushdown_conditions_per_column;

View File

@ -55,6 +55,8 @@ suite("load") {
sql new File("""${context.file.parent}/ddl/${table}.sql""").text
}
sql "set exec_mem_limit=8G;"
for (String tableName in tables) {
streamLoad {
// you can skip db declaration, because a default db has already been
@ -74,9 +76,10 @@ suite("load") {
set "columns", columnsMap[tableName]
}
// relate to ${DORIS_HOME}/regression-test/data/demo/streamload_input.csv.
// also, you can stream load a http stream, e.g. http://xxx/some.csv
file """${getS3Url() + '/regression/tpcds/sf1-new/' + tableName}.dat.gz"""
file """${getS3Url()}/regression/tpcds/sf1/${tableName}.dat.gz"""
time 10000 // limit inflight 10s