Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
David Sehr | 1ce2b3b | 2018-04-05 11:02:03 -0700 | [diff] [blame] | 17 | #ifndef ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_ |
| 18 | #define ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_ |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 19 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 20 | #include <android-base/logging.h> |
| 21 | |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 22 | #include "arena_allocator.h" |
David Sehr | 1979c64 | 2018-04-26 14:41:18 -0700 | [diff] [blame] | 23 | #include "debug_stack.h" |
| 24 | #include "globals.h" |
| 25 | #include "macros.h" |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
| 29 | class ArenaStack; |
| 30 | class ScopedArenaAllocator; |
| 31 | |
| 32 | template <typename T> |
| 33 | class ScopedArenaAllocatorAdapter; |
| 34 | |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 35 | // Tag associated with each allocation to help prevent double free. |
| 36 | enum class ArenaFreeTag : uint8_t { |
| 37 | // Allocation is used and has not yet been destroyed. |
| 38 | kUsed, |
| 39 | // Allocation has been destroyed. |
| 40 | kFree, |
| 41 | }; |
| 42 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 43 | // Holds a list of Arenas for use by ScopedArenaAllocator stack. |
Vladimir Marko | fda0432 | 2015-11-11 18:45:50 +0000 | [diff] [blame] | 44 | // The memory is returned to the ArenaPool when the ArenaStack is destroyed. |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 45 | class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 46 | public: |
| 47 | explicit ArenaStack(ArenaPool* arena_pool); |
| 48 | ~ArenaStack(); |
| 49 | |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 50 | using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool; |
| 51 | using ArenaAllocatorMemoryTool::MakeDefined; |
| 52 | using ArenaAllocatorMemoryTool::MakeUndefined; |
| 53 | using ArenaAllocatorMemoryTool::MakeInaccessible; |
| 54 | |
Vladimir Marko | 53b6afc | 2014-03-21 14:21:20 +0000 | [diff] [blame] | 55 | void Reset(); |
| 56 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 57 | size_t PeakBytesAllocated() { |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 58 | DebugStackRefCounter::CheckNoRefs(); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 59 | return PeakStats()->BytesAllocated(); |
| 60 | } |
| 61 | |
| 62 | MemStats GetPeakStats() const; |
| 63 | |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 64 | // Return the arena tag associated with a pointer. |
| 65 | static ArenaFreeTag& ArenaTagForAllocation(void* ptr) { |
| 66 | DCHECK(kIsDebugBuild) << "Only debug builds have tags"; |
| 67 | return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1); |
| 68 | } |
| 69 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 70 | // The alignment guaranteed for individual allocations. |
| 71 | static constexpr size_t kAlignment = 8u; |
| 72 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 73 | private: |
| 74 | struct Peak; |
| 75 | struct Current; |
| 76 | template <typename Tag> struct TaggedStats : ArenaAllocatorStats { }; |
| 77 | struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> { |
| 78 | explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { } |
| 79 | ArenaPool* const pool; |
| 80 | }; |
| 81 | |
| 82 | ArenaAllocatorStats* PeakStats() { |
| 83 | return static_cast<TaggedStats<Peak>*>(&stats_and_pool_); |
| 84 | } |
| 85 | |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 86 | const ArenaAllocatorStats* PeakStats() const { |
| 87 | return static_cast<const TaggedStats<Peak>*>(&stats_and_pool_); |
| 88 | } |
| 89 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 90 | ArenaAllocatorStats* CurrentStats() { |
| 91 | return static_cast<TaggedStats<Current>*>(&stats_and_pool_); |
| 92 | } |
| 93 | |
| 94 | // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter. |
| 95 | void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 96 | if (UNLIKELY(IsRunningOnMemoryTool())) { |
| 97 | return AllocWithMemoryTool(bytes, kind); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 98 | } |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 99 | // Add kAlignment for the free or used tag. Required to preserve alignment. |
| 100 | size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 101 | uint8_t* ptr = top_ptr_; |
| 102 | if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { |
| 103 | ptr = AllocateFromNextArena(rounded_bytes); |
| 104 | } |
| 105 | CurrentStats()->RecordAlloc(bytes, kind); |
| 106 | top_ptr_ = ptr + rounded_bytes; |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 107 | if (kIsDebugBuild) { |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 108 | ptr += kAlignment; |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 109 | ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed; |
| 110 | } |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 111 | return ptr; |
| 112 | } |
| 113 | |
| 114 | uint8_t* AllocateFromNextArena(size_t rounded_bytes); |
| 115 | void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats); |
| 116 | void UpdateBytesAllocated(); |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 117 | void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 118 | |
| 119 | StatsAndPool stats_and_pool_; |
| 120 | Arena* bottom_arena_; |
| 121 | Arena* top_arena_; |
| 122 | uint8_t* top_ptr_; |
| 123 | uint8_t* top_end_; |
| 124 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 125 | friend class ScopedArenaAllocator; |
| 126 | template <typename T> |
| 127 | friend class ScopedArenaAllocatorAdapter; |
| 128 | |
| 129 | DISALLOW_COPY_AND_ASSIGN(ArenaStack); |
| 130 | }; |
| 131 | |
Vladimir Marko | fda0432 | 2015-11-11 18:45:50 +0000 | [diff] [blame] | 132 | // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized. |
| 133 | // |
| 134 | // Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived |
| 135 | // objects and allows nesting multiple allocators. Only the top allocator can be used but |
| 136 | // once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the |
| 137 | // stack. This is facilitated by returning the memory to the ArenaStack. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 138 | class ScopedArenaAllocator |
| 139 | : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats { |
| 140 | public: |
Andreas Gampe | 44b3174 | 2018-10-01 19:30:57 -0700 | [diff] [blame] | 141 | ScopedArenaAllocator(ScopedArenaAllocator&& other) noexcept; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 142 | explicit ScopedArenaAllocator(ArenaStack* arena_stack); |
| 143 | ~ScopedArenaAllocator(); |
| 144 | |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 145 | ArenaStack* GetArenaStack() const { |
| 146 | return arena_stack_; |
| 147 | } |
| 148 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 149 | void Reset(); |
| 150 | |
Vladimir Marko | e4fcc5b | 2015-02-13 10:28:29 +0000 | [diff] [blame] | 151 | void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 152 | DebugStackReference::CheckTop(); |
| 153 | return arena_stack_->Alloc(bytes, kind); |
| 154 | } |
| 155 | |
Vladimir Marko | e4fcc5b | 2015-02-13 10:28:29 +0000 | [diff] [blame] | 156 | template <typename T> |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 157 | T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) { |
| 158 | return AllocArray<T>(1, kind); |
| 159 | } |
| 160 | |
| 161 | template <typename T> |
Vladimir Marko | e4fcc5b | 2015-02-13 10:28:29 +0000 | [diff] [blame] | 162 | T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) { |
| 163 | return static_cast<T*>(Alloc(length * sizeof(T), kind)); |
| 164 | } |
| 165 | |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 166 | // Get adapter for use in STL containers. See scoped_arena_containers.h . |
| 167 | ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 168 | |
| 169 | // Allow a delete-expression to destroy but not deallocate allocators created by Create(). |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 170 | static void operator delete(void* ptr ATTRIBUTE_UNUSED) {} |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 171 | |
| 172 | private: |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 173 | ArenaStack* arena_stack_; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 174 | Arena* mark_arena_; |
| 175 | uint8_t* mark_ptr_; |
| 176 | uint8_t* mark_end_; |
| 177 | |
Vladimir Marko | 3d2ec35 | 2014-10-10 15:39:11 +0100 | [diff] [blame] | 178 | void DoReset(); |
| 179 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 180 | template <typename T> |
| 181 | friend class ScopedArenaAllocatorAdapter; |
| 182 | |
| 183 | DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator); |
| 184 | }; |
| 185 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 186 | } // namespace art |
| 187 | |
David Sehr | 1ce2b3b | 2018-04-05 11:02:03 -0700 | [diff] [blame] | 188 | #endif // ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_ |