[fix] fix hash table insert() may be failed but not handle this error (#8207)

This commit is contained in:
Zhengguo Yang
2022-03-03 22:33:05 +08:00
committed by GitHub
parent c56a372e06
commit e7c417505c
9 changed files with 56 additions and 52 deletions

View File

@ -27,7 +27,10 @@
namespace doris {
SetOperationNode::SetOperationNode(ObjectPool* pool, const TPlanNode& tnode,
const DescriptorTbl& descs, int tuple_id)
: ExecNode(pool, tnode, descs), _tuple_id(tuple_id), _tuple_desc(nullptr), _valid_element_in_hash_tbl(0) {}
: ExecNode(pool, tnode, descs),
_tuple_id(tuple_id),
_tuple_desc(nullptr),
_valid_element_in_hash_tbl(0) {}
Status SetOperationNode::init(const TPlanNode& tnode, RuntimeState* state) {
RETURN_IF_ERROR(ExecNode::init(tnode, state));
@ -142,7 +145,7 @@ Status SetOperationNode::open(RuntimeState* state) {
}
// initial build hash table used for remove duplicated
_hash_tbl.reset(new HashTable(_child_expr_lists[0], _child_expr_lists[1], _build_tuple_size,
true, _find_nulls, id(), mem_tracker(), 1024));
true, _find_nulls, id(), mem_tracker(), state->batch_size() * 2));
RowBatch build_batch(child(0)->row_desc(), state->batch_size(), mem_tracker().get());
RETURN_IF_ERROR(child(0)->open(state));
@ -155,10 +158,9 @@ Status SetOperationNode::open(RuntimeState* state) {
_build_pool->acquire_data(build_batch.tuple_data_pool(), false);
RETURN_IF_LIMIT_EXCEEDED(state, " SetOperation, while constructing the hash table.");
// build hash table and remove duplicate items
RETURN_IF_ERROR(_hash_tbl->resize_buckets_ahead(build_batch.num_rows()));
for (int i = 0; i < build_batch.num_rows(); ++i) {
VLOG_ROW << "build row: "
<< get_row_output_string(build_batch.get_row(i), child(0)->row_desc());
_hash_tbl->insert_unique(build_batch.get_row(i));
_hash_tbl->insert_unique_without_check(build_batch.get_row(i));
}
VLOG_ROW << "hash table content: " << _hash_tbl->debug_string(true, &child(0)->row_desc());
build_batch.reset();