diff options
Diffstat (limited to 'runtime/gc')
| -rw-r--r-- | runtime/gc/allocator/rosalloc.h | 4 | ||||
| -rw-r--r-- | runtime/gc/heap-inl.h | 4 | ||||
| -rw-r--r-- | runtime/gc/heap.cc | 3 | ||||
| -rw-r--r-- | runtime/gc/space/large_object_space.cc | 5 | ||||
| -rw-r--r-- | runtime/gc/space/memory_tool_malloc_space-inl.h | 141 | ||||
| -rw-r--r-- | runtime/gc/space/rosalloc_space.cc | 6 | ||||
| -rw-r--r-- | runtime/gc/space/rosalloc_space.h | 4 |
7 files changed, 98 insertions, 69 deletions
diff --git a/runtime/gc/allocator/rosalloc.h b/runtime/gc/allocator/rosalloc.h index 150fe956ae..30213d55c5 100644 --- a/runtime/gc/allocator/rosalloc.h +++ b/runtime/gc/allocator/rosalloc.h @@ -625,7 +625,7 @@ class RosAlloc { // If true, check that the returned memory is actually zero. static constexpr bool kCheckZeroMemory = kIsDebugBuild; - // Valgrind protects memory, so do not check memory when running under valgrind. In a normal + // Do not check memory when running under a memory tool. In a normal // build with kCheckZeroMemory the whole test should be optimized away. // TODO: Unprotect before checks. ALWAYS_INLINE bool ShouldCheckZeroMemory(); @@ -768,7 +768,7 @@ class RosAlloc { // greater than or equal to this value, release pages. const size_t page_release_size_threshold_; - // Whether this allocator is running under Valgrind. + // Whether this allocator is running on a memory tool. bool is_running_on_memory_tool_; // The base address of the memory region that's managed by this allocator. diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h index 948d23303c..675686830e 100644 --- a/runtime/gc/heap-inl.h +++ b/runtime/gc/heap-inl.h @@ -272,7 +272,7 @@ inline mirror::Object* Heap::TryToAllocate(Thread* self, } case kAllocatorTypeRosAlloc: { if (kInstrumented && UNLIKELY(is_running_on_memory_tool_)) { - // If running on valgrind or asan, we should be using the instrumented path. + // If running on ASan, we should be using the instrumented path. size_t max_bytes_tl_bulk_allocated = rosalloc_space_->MaxBytesBulkAllocatedFor(alloc_size); if (UNLIKELY(IsOutOfMemoryOnAllocation(allocator_type, max_bytes_tl_bulk_allocated, @@ -303,7 +303,7 @@ inline mirror::Object* Heap::TryToAllocate(Thread* self, } case kAllocatorTypeDlMalloc: { if (kInstrumented && UNLIKELY(is_running_on_memory_tool_)) { - // If running on valgrind, we should be using the instrumented path. + // If running on ASan, we should be using the instrumented path. ret = dlmalloc_space_->Alloc(self, alloc_size, bytes_allocated, diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index b004566ed1..12021b7f99 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -2248,7 +2248,8 @@ class ZygoteCompactingCollector FINAL : public collector::SemiSpace { // Add a new bin with the remaining space. AddBin(size - alloc_size, pos + alloc_size); } - // Copy the object over to its new location. Don't use alloc_size to avoid valgrind error. + // Copy the object over to its new location. + // Historical note: We did not use `alloc_size` to avoid a Valgrind error. memcpy(reinterpret_cast<void*>(forward_address), obj, obj_size); if (kUseBakerReadBarrier) { obj->AssertReadBarrierState(); diff --git a/runtime/gc/space/large_object_space.cc b/runtime/gc/space/large_object_space.cc index 512cde484d..a24ca32314 100644 --- a/runtime/gc/space/large_object_space.cc +++ b/runtime/gc/space/large_object_space.cc @@ -45,8 +45,9 @@ class MemoryToolLargeObjectMapSpace FINAL : public LargeObjectMapSpace { } ~MemoryToolLargeObjectMapSpace() OVERRIDE { - // Keep valgrind happy if there is any large objects such as dex cache arrays which aren't - // freed since they are held live by the class linker. + // Historical note: We were deleting large objects to keep Valgrind happy if there were + // any large objects such as Dex cache arrays which aren't freed since they are held live + // by the class linker. MutexLock mu(Thread::Current(), lock_); for (auto& m : large_objects_) { delete m.second.mem_map; diff --git a/runtime/gc/space/memory_tool_malloc_space-inl.h b/runtime/gc/space/memory_tool_malloc_space-inl.h index 8282f3dda7..c022171082 100644 --- a/runtime/gc/space/memory_tool_malloc_space-inl.h +++ b/runtime/gc/space/memory_tool_malloc_space-inl.h @@ -30,11 +30,14 @@ namespace space { namespace memory_tool_details { template <size_t kMemoryToolRedZoneBytes, bool kUseObjSizeForUsable> -inline mirror::Object* AdjustForValgrind(void* obj_with_rdz, size_t num_bytes, - size_t bytes_allocated, size_t usable_size, - size_t bytes_tl_bulk_allocated, - size_t* bytes_allocated_out, size_t* usable_size_out, - size_t* bytes_tl_bulk_allocated_out) { +inline mirror::Object* AdjustForMemoryTool(void* obj_with_rdz, + size_t num_bytes, + size_t bytes_allocated, + size_t usable_size, + size_t bytes_tl_bulk_allocated, + size_t* bytes_allocated_out, + size_t* usable_size_out, + size_t* bytes_tl_bulk_allocated_out) { if (bytes_allocated_out != nullptr) { *bytes_allocated_out = bytes_allocated; } @@ -84,24 +87,31 @@ template <typename S, bool kUseObjSizeForUsable> mirror::Object* MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::AllocWithGrowth( - Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::AllocWithGrowth( + Thread* self, + size_t num_bytes, + size_t* bytes_allocated_out, + size_t* usable_size_out, size_t* bytes_tl_bulk_allocated_out) { size_t bytes_allocated; size_t usable_size; size_t bytes_tl_bulk_allocated; - void* obj_with_rdz = S::AllocWithGrowth(self, num_bytes + 2 * kMemoryToolRedZoneBytes, - &bytes_allocated, &usable_size, + void* obj_with_rdz = S::AllocWithGrowth(self, + num_bytes + 2 * kMemoryToolRedZoneBytes, + &bytes_allocated, + &usable_size, &bytes_tl_bulk_allocated); if (obj_with_rdz == nullptr) { return nullptr; } - return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( - obj_with_rdz, num_bytes, - bytes_allocated, usable_size, + return memory_tool_details::AdjustForMemoryTool<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( + obj_with_rdz, + num_bytes, + bytes_allocated, + usable_size, bytes_tl_bulk_allocated, bytes_allocated_out, usable_size_out, @@ -113,27 +123,35 @@ template <typename S, bool kAdjustForRedzoneInAllocSize, bool kUseObjSizeForUsable> mirror::Object* MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::Alloc( - Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::Alloc( + Thread* self, + size_t num_bytes, + size_t* bytes_allocated_out, + size_t* usable_size_out, size_t* bytes_tl_bulk_allocated_out) { size_t bytes_allocated; size_t usable_size; size_t bytes_tl_bulk_allocated; - void* obj_with_rdz = S::Alloc(self, num_bytes + 2 * kMemoryToolRedZoneBytes, - &bytes_allocated, &usable_size, &bytes_tl_bulk_allocated); + void* obj_with_rdz = S::Alloc(self, + num_bytes + 2 * kMemoryToolRedZoneBytes, + &bytes_allocated, + &usable_size, + &bytes_tl_bulk_allocated); if (obj_with_rdz == nullptr) { return nullptr; } - return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, - kUseObjSizeForUsable>(obj_with_rdz, num_bytes, - bytes_allocated, usable_size, - bytes_tl_bulk_allocated, - bytes_allocated_out, - usable_size_out, - bytes_tl_bulk_allocated_out); + return memory_tool_details::AdjustForMemoryTool<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( + obj_with_rdz, + num_bytes, + bytes_allocated, + usable_size, + bytes_tl_bulk_allocated, + bytes_allocated_out, + usable_size_out, + bytes_tl_bulk_allocated_out); } template <typename S, @@ -141,24 +159,31 @@ template <typename S, bool kAdjustForRedzoneInAllocSize, bool kUseObjSizeForUsable> mirror::Object* MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::AllocThreadUnsafe( - Thread* self, size_t num_bytes, size_t* bytes_allocated_out, size_t* usable_size_out, + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::AllocThreadUnsafe( + Thread* self, + size_t num_bytes, + size_t* bytes_allocated_out, + size_t* usable_size_out, size_t* bytes_tl_bulk_allocated_out) { size_t bytes_allocated; size_t usable_size; size_t bytes_tl_bulk_allocated; - void* obj_with_rdz = S::AllocThreadUnsafe(self, num_bytes + 2 * kMemoryToolRedZoneBytes, - &bytes_allocated, &usable_size, + void* obj_with_rdz = S::AllocThreadUnsafe(self, + num_bytes + 2 * kMemoryToolRedZoneBytes, + &bytes_allocated, + &usable_size, &bytes_tl_bulk_allocated); if (obj_with_rdz == nullptr) { return nullptr; } - return memory_tool_details::AdjustForValgrind<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( - obj_with_rdz, num_bytes, - bytes_allocated, usable_size, + return memory_tool_details::AdjustForMemoryTool<kMemoryToolRedZoneBytes, kUseObjSizeForUsable>( + obj_with_rdz, + num_bytes, + bytes_allocated, + usable_size, bytes_tl_bulk_allocated, bytes_allocated_out, usable_size_out, @@ -170,12 +195,14 @@ template <typename S, bool kAdjustForRedzoneInAllocSize, bool kUseObjSizeForUsable> size_t MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::AllocationSize( + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::AllocationSize( mirror::Object* obj, size_t* usable_size) { - size_t result = S::AllocationSize(reinterpret_cast<mirror::Object*>( - reinterpret_cast<uint8_t*>(obj) - (kAdjustForRedzoneInAllocSize ? kMemoryToolRedZoneBytes : 0)), + size_t result = S::AllocationSize( + reinterpret_cast<mirror::Object*>( + reinterpret_cast<uint8_t*>(obj) + - (kAdjustForRedzoneInAllocSize ? kMemoryToolRedZoneBytes : 0)), usable_size); if (usable_size != nullptr) { if (kUseObjSizeForUsable) { @@ -192,10 +219,9 @@ template <typename S, bool kAdjustForRedzoneInAllocSize, bool kUseObjSizeForUsable> size_t MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::Free( - Thread* self, mirror::Object* ptr) { + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::Free(Thread* self, mirror::Object* ptr) { void* obj_after_rdz = reinterpret_cast<void*>(ptr); uint8_t* obj_with_rdz = reinterpret_cast<uint8_t*>(obj_after_rdz) - kMemoryToolRedZoneBytes; @@ -220,10 +246,10 @@ template <typename S, bool kAdjustForRedzoneInAllocSize, bool kUseObjSizeForUsable> size_t MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::FreeList( - Thread* self, size_t num_ptrs, mirror::Object** ptrs) { + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::FreeList( + Thread* self, size_t num_ptrs, mirror::Object** ptrs) { size_t freed = 0; for (size_t i = 0; i < num_ptrs; i++) { freed += Free(self, ptrs[i]); @@ -238,11 +264,12 @@ template <typename S, bool kUseObjSizeForUsable> template <typename... Params> MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::MemoryToolMallocSpace( - MemMap* mem_map, size_t initial_size, Params... params) : S(mem_map, initial_size, params...) { - // Don't want to change the valgrind states of the mem map here as the allocator is already + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::MemoryToolMallocSpace( + MemMap* mem_map, size_t initial_size, Params... params) + : S(mem_map, initial_size, params...) { + // Don't want to change the memory tool states of the mem map here as the allocator is already // initialized at this point and that may interfere with what the allocator does internally. Note // that the tail beyond the initial size is mprotected. } @@ -252,9 +279,9 @@ template <typename S, bool kAdjustForRedzoneInAllocSize, bool kUseObjSizeForUsable> size_t MemoryToolMallocSpace<S, - kMemoryToolRedZoneBytes, - kAdjustForRedzoneInAllocSize, - kUseObjSizeForUsable>::MaxBytesBulkAllocatedFor(size_t num_bytes) { + kMemoryToolRedZoneBytes, + kAdjustForRedzoneInAllocSize, + kUseObjSizeForUsable>::MaxBytesBulkAllocatedFor(size_t num_bytes) { return S::MaxBytesBulkAllocatedFor(num_bytes + 2 * kMemoryToolRedZoneBytes); } diff --git a/runtime/gc/space/rosalloc_space.cc b/runtime/gc/space/rosalloc_space.cc index e7865363a1..d698cf20ae 100644 --- a/runtime/gc/space/rosalloc_space.cc +++ b/runtime/gc/space/rosalloc_space.cc @@ -77,7 +77,7 @@ RosAllocSpace* RosAllocSpace::CreateFromMemMap(MemMap* mem_map, const std::strin // Everything is set so record in immutable structure and leave uint8_t* begin = mem_map->Begin(); - // TODO: Fix RosAllocSpace to support Valgrind/ASan. There is currently some issues with + // TODO: Fix RosAllocSpace to support ASan. There is currently some issues with // AllocationSize caused by redzones. b/12944686 if (running_on_memory_tool) { return new MemoryToolMallocSpace<RosAllocSpace, kDefaultMemoryToolRedZoneBytes, false, true>( @@ -382,12 +382,12 @@ size_t RosAllocSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usab size_t size = obj->SizeOf<kVerifyNone>(); bool add_redzones = false; if (kMaybeIsRunningOnMemoryTool) { - add_redzones = RUNNING_ON_MEMORY_TOOL ? kMemoryToolAddsRedzones : 0; + add_redzones = kRunningOnMemoryTool ? kMemoryToolAddsRedzones : 0; if (add_redzones) { size += 2 * kDefaultMemoryToolRedZoneBytes; } } else { - DCHECK_EQ(RUNNING_ON_MEMORY_TOOL, 0U); + DCHECK(!kRunningOnMemoryTool); } size_t size_by_size = rosalloc_->UsableSize(size); if (kIsDebugBuild) { diff --git a/runtime/gc/space/rosalloc_space.h b/runtime/gc/space/rosalloc_space.h index 9d16b87b7d..4c17233360 100644 --- a/runtime/gc/space/rosalloc_space.h +++ b/runtime/gc/space/rosalloc_space.h @@ -159,8 +159,8 @@ class RosAllocSpace : public MallocSpace { void* CreateAllocator(void* base, size_t morecore_start, size_t initial_size, size_t maximum_size, bool low_memory_mode) OVERRIDE { - return CreateRosAlloc(base, morecore_start, initial_size, maximum_size, low_memory_mode, - RUNNING_ON_MEMORY_TOOL != 0); + return CreateRosAlloc( + base, morecore_start, initial_size, maximum_size, low_memory_mode, kRunningOnMemoryTool); } static allocator::RosAlloc* CreateRosAlloc(void* base, size_t morecore_start, size_t initial_size, size_t maximum_size, bool low_memory_mode, |