[improvement] make asan work as much as possible (#8148)

* make ASAN poisoning work as much as possible

Before this patch a use after poison is reported like below
==19305==ERROR: AddressSanitizer: unknown-crash on address
0x625000137013 at pc 0x561c44bcf6b8 bp 0x7ffb75a00910 sp 0x7ffb75a000b8

After this patch the use after poison is reported like below
==17782==ERROR: AddressSanitizer: use-after-poison on address
0x625000137033 at pc 0x55633c8f56b8 bp 0x7ff3dc437930 sp 0x7ff3dc43

Before this patch, a false memory usage is reported like below
==33080==AddressSanitizer CHECK failed: ../../../../src/libsanitizer/
asan/asan_allocator.cpp:189 "((old)) == ((kAllocBegMagic))"
This commit is contained in:
dataroaring
2022-02-22 09:29:22 +08:00
committed by GitHub
parent 6e8d52f3fc
commit d6aebc0c2c

View File

@ -163,6 +163,12 @@ public:
static constexpr int DEFAULT_ALIGNMENT = 8;
#if defined(__SANITIZE_ADDRESS__) || defined(ADDRESS_SANITIZER)
static constexpr int DEFAULT_PADDING_SIZE = 0x10;
#else
static constexpr int DEFAULT_PADDING_SIZE = 0x0;
#endif
private:
friend class MemPoolTest;
static const int INITIAL_CHUNK_SIZE = 4 * 1024;
@ -203,26 +209,48 @@ private:
return chunks_[current_chunk_idx_].allocated_bytes;
}
uint8_t * allocate_from_current_chunk(int64_t size, int alignment) {
// Manually ASAN poisoning is complicated and it is hard to make
// it work right. There are illustrated examples in
// http://blog.hostilefork.com/poison-memory-without-asan/.
//
// Stacks of use after poison do not provide enough information
// to resolve bug, while stacks of use afer free provide.
// https://github.com/google/sanitizers/issues/191
//
// We'd better implement a mempool using malloc/free directly,
// thus asan works natively. However we cannot do it in a short
// time, so we make manual poisoning work as much as possible.
// I refers to https://github.com/mcgov/asan_alignment_example.
ChunkInfo& info = chunks_[current_chunk_idx_];
int64_t aligned_allocated_bytes =
BitUtil::RoundUpToPowerOf2(info.allocated_bytes + DEFAULT_PADDING_SIZE, alignment);
if (aligned_allocated_bytes + size <= info.chunk.size) {
// Ensure the requested alignment is respected.
int64_t padding = aligned_allocated_bytes - info.allocated_bytes;
uint8_t* result = info.chunk.data + aligned_allocated_bytes;
ASAN_UNPOISON_MEMORY_REGION(result, size);
DCHECK_LE(info.allocated_bytes + size, info.chunk.size);
info.allocated_bytes += padding + size;
total_allocated_bytes_ += padding + size;
peak_allocated_bytes_ = std::max(total_allocated_bytes_, peak_allocated_bytes_);
DCHECK_LE(current_chunk_idx_, chunks_.size() - 1);
return result;
}
return nullptr;
}
template <bool CHECK_LIMIT_FIRST>
uint8_t* ALWAYS_INLINE allocate(int64_t size, int alignment) {
DCHECK_GE(size, 0);
if (UNLIKELY(size == 0)) return reinterpret_cast<uint8_t*>(&k_zero_length_region_);
if (current_chunk_idx_ != -1) {
ChunkInfo& info = chunks_[current_chunk_idx_];
int64_t aligned_allocated_bytes =
BitUtil::RoundUpToPowerOf2(info.allocated_bytes, alignment);
if (aligned_allocated_bytes + size <= info.chunk.size) {
// Ensure the requested alignment is respected.
int64_t padding = aligned_allocated_bytes - info.allocated_bytes;
uint8_t* result = info.chunk.data + aligned_allocated_bytes;
ASAN_UNPOISON_MEMORY_REGION(result, size);
DCHECK_LE(info.allocated_bytes + size, info.chunk.size);
info.allocated_bytes += padding + size;
total_allocated_bytes_ += padding + size;
DCHECK_LE(current_chunk_idx_, chunks_.size() - 1);
uint8_t* result = allocate_from_current_chunk(size, alignment);
if (result != nullptr) {
return result;
}
}
}
// If we couldn't allocate a new chunk, return nullptr. malloc() guarantees alignment
@ -230,16 +258,11 @@ private:
// guarantee alignment.
//static_assert(
//INITIAL_CHUNK_SIZE >= config::FLAGS_MEMORY_MAX_ALIGNMENT, "Min chunk size too low");
if (UNLIKELY(!find_chunk(size, CHECK_LIMIT_FIRST))) return nullptr;
if (UNLIKELY(!find_chunk(size, CHECK_LIMIT_FIRST))) {
return nullptr;
}
ChunkInfo& info = chunks_[current_chunk_idx_];
uint8_t* result = info.chunk.data + info.allocated_bytes;
ASAN_UNPOISON_MEMORY_REGION(result, size);
DCHECK_LE(info.allocated_bytes + size, info.chunk.size);
info.allocated_bytes += size;
total_allocated_bytes_ += size;
DCHECK_LE(current_chunk_idx_, chunks_.size() - 1);
peak_allocated_bytes_ = std::max(total_allocated_bytes_, peak_allocated_bytes_);
uint8_t* result = allocate_from_current_chunk(size, alignment);
return result;
}