blob: 52d036105a1475bebfb4a0f95120b959ef1bafc1 [file] [log] [blame]
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
David Sehr1ce2b3b2018-04-05 11:02:03 -070017#ifndef ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_
18#define ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019
Andreas Gampe57943812017-12-06 21:39:13 -080020#include <android-base/logging.h>
21
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "arena_allocator.h"
David Sehr1979c642018-04-26 14:41:18 -070023#include "debug_stack.h"
24#include "globals.h"
25#include "macros.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000026
27namespace art {
28
29class ArenaStack;
30class ScopedArenaAllocator;
31
32template <typename T>
33class ScopedArenaAllocatorAdapter;
34
Mathieu Chartier7b05e172015-10-15 17:47:48 -070035// Tag associated with each allocation to help prevent double free.
36enum class ArenaFreeTag : uint8_t {
37 // Allocation is used and has not yet been destroyed.
38 kUsed,
39 // Allocation has been destroyed.
40 kFree,
41};
42
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000043// Holds a list of Arenas for use by ScopedArenaAllocator stack.
Vladimir Markofda04322015-11-11 18:45:50 +000044// The memory is returned to the ArenaPool when the ArenaStack is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +010045class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000046 public:
47 explicit ArenaStack(ArenaPool* arena_pool);
48 ~ArenaStack();
49
Vladimir Marko2a408a32015-09-18 14:11:00 +010050 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
51 using ArenaAllocatorMemoryTool::MakeDefined;
52 using ArenaAllocatorMemoryTool::MakeUndefined;
53 using ArenaAllocatorMemoryTool::MakeInaccessible;
54
Vladimir Marko53b6afc2014-03-21 14:21:20 +000055 void Reset();
56
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000057 size_t PeakBytesAllocated() {
Vladimir Marko174b2e22017-10-12 13:34:49 +010058 DebugStackRefCounter::CheckNoRefs();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000059 return PeakStats()->BytesAllocated();
60 }
61
62 MemStats GetPeakStats() const;
63
Mathieu Chartier7b05e172015-10-15 17:47:48 -070064 // Return the arena tag associated with a pointer.
65 static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
66 DCHECK(kIsDebugBuild) << "Only debug builds have tags";
67 return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
68 }
69
Vladimir Markof44d36c2017-03-14 14:18:46 +000070 // The alignment guaranteed for individual allocations.
71 static constexpr size_t kAlignment = 8u;
72
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000073 private:
74 struct Peak;
75 struct Current;
76 template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
77 struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
78 explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
79 ArenaPool* const pool;
80 };
81
82 ArenaAllocatorStats* PeakStats() {
83 return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
84 }
85
Vladimir Marko174b2e22017-10-12 13:34:49 +010086 const ArenaAllocatorStats* PeakStats() const {
87 return static_cast<const TaggedStats<Peak>*>(&stats_and_pool_);
88 }
89
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000090 ArenaAllocatorStats* CurrentStats() {
91 return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
92 }
93
94 // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
95 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +010096 if (UNLIKELY(IsRunningOnMemoryTool())) {
97 return AllocWithMemoryTool(bytes, kind);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000098 }
Vladimir Markof44d36c2017-03-14 14:18:46 +000099 // Add kAlignment for the free or used tag. Required to preserve alignment.
100 size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000101 uint8_t* ptr = top_ptr_;
102 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
103 ptr = AllocateFromNextArena(rounded_bytes);
104 }
105 CurrentStats()->RecordAlloc(bytes, kind);
106 top_ptr_ = ptr + rounded_bytes;
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700107 if (kIsDebugBuild) {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000108 ptr += kAlignment;
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700109 ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
110 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000111 return ptr;
112 }
113
114 uint8_t* AllocateFromNextArena(size_t rounded_bytes);
115 void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
116 void UpdateBytesAllocated();
Vladimir Marko2a408a32015-09-18 14:11:00 +0100117 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000118
119 StatsAndPool stats_and_pool_;
120 Arena* bottom_arena_;
121 Arena* top_arena_;
122 uint8_t* top_ptr_;
123 uint8_t* top_end_;
124
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000125 friend class ScopedArenaAllocator;
126 template <typename T>
127 friend class ScopedArenaAllocatorAdapter;
128
129 DISALLOW_COPY_AND_ASSIGN(ArenaStack);
130};
131
Vladimir Markofda04322015-11-11 18:45:50 +0000132// Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
133//
134// Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived
135// objects and allows nesting multiple allocators. Only the top allocator can be used but
136// once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the
137// stack. This is facilitated by returning the memory to the ArenaStack.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000138class ScopedArenaAllocator
139 : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
140 public:
Andreas Gampe44b31742018-10-01 19:30:57 -0700141 ScopedArenaAllocator(ScopedArenaAllocator&& other) noexcept;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000142 explicit ScopedArenaAllocator(ArenaStack* arena_stack);
143 ~ScopedArenaAllocator();
144
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100145 ArenaStack* GetArenaStack() const {
146 return arena_stack_;
147 }
148
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000149 void Reset();
150
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000151 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152 DebugStackReference::CheckTop();
153 return arena_stack_->Alloc(bytes, kind);
154 }
155
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000156 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000157 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
158 return AllocArray<T>(1, kind);
159 }
160
161 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000162 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
163 return static_cast<T*>(Alloc(length * sizeof(T), kind));
164 }
165
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100166 // Get adapter for use in STL containers. See scoped_arena_containers.h .
167 ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000168
169 // Allow a delete-expression to destroy but not deallocate allocators created by Create().
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100170 static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000171
172 private:
Vladimir Marko174b2e22017-10-12 13:34:49 +0100173 ArenaStack* arena_stack_;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000174 Arena* mark_arena_;
175 uint8_t* mark_ptr_;
176 uint8_t* mark_end_;
177
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100178 void DoReset();
179
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000180 template <typename T>
181 friend class ScopedArenaAllocatorAdapter;
182
183 DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
184};
185
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000186} // namespace art
187
David Sehr1ce2b3b2018-04-05 11:02:03 -0700188#endif // ART_LIBARTBASE_BASE_SCOPED_ARENA_ALLOCATOR_H_