From 3ca1976fa72df0b367eaea629e0a7ae59bf6cb44 Mon Sep 17 00:00:00 2001 From: Colin Cross Date: Wed, 28 Nov 2018 17:01:59 -0800 Subject: [PATCH] Validate allocations against mappings Bug 120032857 is seeing what appears to be allocations with incorrect end addresses, leading to a much later crash when it tries to map a zero page outside the valid virtual address space. Detect allocations that extend outside the highest or lowest memory mapping and crash immediately instead. Test: memunreachable_test Bug: 120032857 Change-Id: I9be670a025143e7078360a6bf7a83219279614d9 --- libmemunreachable/HeapWalker.cpp | 12 ++++++++++++ libmemunreachable/HeapWalker.h | 4 ++++ libmemunreachable/MemUnreachable.cpp | 5 +++++ libmemunreachable/tests/HeapWalker_test.cpp | 18 ++++++++++++++++++ 4 files changed, 39 insertions(+) diff --git a/libmemunreachable/HeapWalker.cpp b/libmemunreachable/HeapWalker.cpp index a046dad48..89837f7d2 100644 --- a/libmemunreachable/HeapWalker.cpp +++ b/libmemunreachable/HeapWalker.cpp @@ -35,6 +35,13 @@ bool HeapWalker::Allocation(uintptr_t begin, uintptr_t end) { end = begin + 1; } Range range{begin, end}; + if (valid_mappings_range_.end != 0 && + (begin < valid_mappings_range_.begin || end > valid_mappings_range_.end)) { + MEM_LOG_ALWAYS_FATAL("allocation %p-%p is outside mapping range %p-%p", + reinterpret_cast(begin), reinterpret_cast(end), + reinterpret_cast(valid_mappings_range_.begin), + reinterpret_cast(valid_mappings_range_.end)); + } auto inserted = allocations_.insert(std::pair(range, AllocationInfo{})); if (inserted.second) { valid_allocations_range_.begin = std::min(valid_allocations_range_.begin, begin); @@ -87,6 +94,11 @@ void HeapWalker::RecurseRoot(const Range& root) { } } +void HeapWalker::Mapping(uintptr_t begin, uintptr_t end) { + valid_mappings_range_.begin = std::min(valid_mappings_range_.begin, begin); + valid_mappings_range_.end = std::max(valid_mappings_range_.end, end); +} + void HeapWalker::Root(uintptr_t begin, uintptr_t end) { roots_.push_back(Range{begin, end}); } diff --git a/libmemunreachable/HeapWalker.h b/libmemunreachable/HeapWalker.h index b37cc623b..9e3db080d 100644 --- a/libmemunreachable/HeapWalker.h +++ b/libmemunreachable/HeapWalker.h @@ -59,6 +59,8 @@ class HeapWalker { segv_page_count_(0) { valid_allocations_range_.end = 0; valid_allocations_range_.begin = ~valid_allocations_range_.end; + valid_mappings_range_.end = 0; + valid_mappings_range_.begin = ~valid_allocations_range_.end; segv_handler_.install( SIGSEGV, [=](ScopedSignalHandler& handler, int signal, siginfo_t* siginfo, void* uctx) { @@ -68,6 +70,7 @@ class HeapWalker { ~HeapWalker() {} bool Allocation(uintptr_t begin, uintptr_t end); + void Mapping(uintptr_t begin, uintptr_t end); void Root(uintptr_t begin, uintptr_t end); void Root(const allocator::vector& vals); @@ -98,6 +101,7 @@ class HeapWalker { AllocationMap allocations_; size_t allocation_bytes_; Range valid_allocations_range_; + Range valid_mappings_range_; allocator::vector roots_; allocator::vector root_vals_; diff --git a/libmemunreachable/MemUnreachable.cpp b/libmemunreachable/MemUnreachable.cpp index b160de906..3d7b8a8aa 100644 --- a/libmemunreachable/MemUnreachable.cpp +++ b/libmemunreachable/MemUnreachable.cpp @@ -87,6 +87,11 @@ bool MemUnreachable::CollectAllocations(const allocator::vector& thr const allocator::vector& mappings, const allocator::vector& refs) { MEM_ALOGI("searching process %d for allocations", pid_); + + for (auto it = mappings.begin(); it != mappings.end(); it++) { + heap_walker_.Mapping(it->begin, it->end); + } + allocator::vector heap_mappings{mappings}; allocator::vector anon_mappings{mappings}; allocator::vector globals_mappings{mappings}; diff --git a/libmemunreachable/tests/HeapWalker_test.cpp b/libmemunreachable/tests/HeapWalker_test.cpp index 84a0ec63b..9610cd633 100644 --- a/libmemunreachable/tests/HeapWalker_test.cpp +++ b/libmemunreachable/tests/HeapWalker_test.cpp @@ -73,6 +73,24 @@ TEST_F(HeapWalkerTest, zero) { ASSERT_FALSE(heap_walker.Allocation(2, 3)); } +TEST_F(HeapWalkerTest, mapping) { + HeapWalker heap_walker(heap_); + heap_walker.Mapping(2, 3); + heap_walker.Mapping(4, 5); + ASSERT_TRUE(heap_walker.Allocation(2, 3)); + ASSERT_TRUE(heap_walker.Allocation(4, 5)); + // space between mappings is not checked, but could be in the future + ASSERT_TRUE(heap_walker.Allocation(3, 4)); + + // re-enable malloc, ASSERT_DEATH may allocate + disable_malloc_.Enable(); + ASSERT_DEATH({ heap_walker.Allocation(1, 2); }, "0x1-0x2.*outside.*0x2-0x5"); + ASSERT_DEATH({ heap_walker.Allocation(1, 3); }, "0x1-0x3.*outside.*0x2-0x5"); + ASSERT_DEATH({ heap_walker.Allocation(4, 6); }, "0x4-0x6.*outside.*0x2-0x5"); + ASSERT_DEATH({ heap_walker.Allocation(5, 6); }, "0x5-0x6.*outside.*0x2-0x5"); + ASSERT_DEATH({ heap_walker.Allocation(1, 6); }, "0x1-0x6.*outside.*0x2-0x5"); +} + #define buffer_begin(buffer) reinterpret_cast(buffer) #define buffer_end(buffer) (reinterpret_cast(buffer) + sizeof(buffer))