Fix expect may produce incorrect values (#3381)

This commit is contained in:
yangzhg
2020-04-23 09:35:41 +08:00
committed by GitHub
parent ad6698cd31
commit a58bc1957e
4 changed files with 45 additions and 61 deletions

View File

@ -133,4 +133,39 @@ bool SetOperationNode::equals(TupleRow* row, TupleRow* other) {
}
return true;
}
Status SetOperationNode::open(RuntimeState* state) {
RETURN_IF_ERROR(ExecNode::open(state));
RETURN_IF_ERROR(exec_debug_action(TExecNodePhase::OPEN));
SCOPED_TIMER(_runtime_profile->total_time_counter());
RETURN_IF_CANCELLED(state);
// open result expr lists.
for (const vector<ExprContext*>& exprs : _child_expr_lists) {
RETURN_IF_ERROR(Expr::open(exprs, state));
}
// initial build hash table used for remove duplicted
_hash_tbl.reset(new HashTable(_child_expr_lists[0], _child_expr_lists[1], _build_tuple_size,
true, _find_nulls, id(), mem_tracker(), 1024));
RowBatch build_batch(child(0)->row_desc(), state->batch_size(), mem_tracker());
RETURN_IF_ERROR(child(0)->open(state));
bool eos = false;
while (!eos) {
SCOPED_TIMER(_build_timer);
RETURN_IF_CANCELLED(state);
RETURN_IF_ERROR(child(0)->get_next(state, &build_batch, &eos));
// take ownership of tuple data of build_batch
_build_pool->acquire_data(build_batch.tuple_data_pool(), false);
RETURN_IF_LIMIT_EXCEEDED(state, " SetOperation, while constructing the hash table.");
// build hash table and remove duplicate items
for (int i = 0; i < build_batch.num_rows(); ++i) {
VLOG_ROW << "build row: "
<< get_row_output_string(build_batch.get_row(i), child(0)->row_desc());
_hash_tbl->insert_unique(build_batch.get_row(i));
}
VLOG_ROW << "hash table content: " << _hash_tbl->debug_string(true, &child(0)->row_desc());
build_batch.reset();
}
return Status::OK();
}
} // namespace doris