blob: 132831c3efb8527abf1ceb6c2ea7f06d39557633 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "compiler_internals.h"
18#include "dex_file-inl.h"
19#include "arena_allocator.h"
20#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070021#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070023#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070024
25namespace art {
26
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070027// Memmap is a bit slower than malloc according to my measurements.
28static constexpr bool kUseMemMap = false;
29static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070030static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
buzbee862a7602013-04-05 10:58:54 -070033static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = {
34 "Misc ",
35 "BasicBlock ",
36 "LIR ",
37 "MIR ",
38 "DataFlow ",
39 "GrowList ",
40 "GrowBitMap ",
41 "Dalvik2SSA ",
42 "DebugInfo ",
43 "Successor ",
44 "RegAlloc ",
45 "Data ",
46 "Preds ",
47};
48
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070049Arena::Arena(size_t size)
50 : bytes_allocated_(0),
51 map_(nullptr),
52 next_(nullptr) {
53 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -070054 std::string error_msg;
55 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, &error_msg);
56 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070057 memory_ = map_->Begin();
58 size_ = map_->Size();
59 } else {
60 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
61 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -070062 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -070063}
64
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070065Arena::~Arena() {
66 if (kUseMemMap) {
67 delete map_;
68 } else {
69 free(reinterpret_cast<void*>(memory_));
70 }
buzbee862a7602013-04-05 10:58:54 -070071}
72
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070073void Arena::Reset() {
74 if (bytes_allocated_) {
75 if (kUseMemSet || !kUseMemMap) {
76 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -070077 } else {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070078 madvise(Begin(), bytes_allocated_, MADV_DONTNEED);
buzbeea5abf702013-04-12 14:39:29 -070079 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070080 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -070081 }
buzbee862a7602013-04-05 10:58:54 -070082}
83
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070084ArenaPool::ArenaPool()
85 : lock_("Arena pool lock"),
86 free_arenas_(nullptr) {
87}
88
89ArenaPool::~ArenaPool() {
90 while (free_arenas_ != nullptr) {
91 auto* arena = free_arenas_;
92 free_arenas_ = free_arenas_->next_;
93 delete arena;
94 }
95}
96
97Arena* ArenaPool::AllocArena(size_t size) {
98 Thread* self = Thread::Current();
99 Arena* ret = nullptr;
100 {
101 MutexLock lock(self, lock_);
102 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
103 ret = free_arenas_;
104 free_arenas_ = free_arenas_->next_;
105 }
106 }
107 if (ret == nullptr) {
108 ret = new Arena(size);
109 }
110 ret->Reset();
111 return ret;
112}
113
114void ArenaPool::FreeArena(Arena* arena) {
115 Thread* self = Thread::Current();
Mathieu Chartier75165d02013-09-12 14:00:31 -0700116 if (UNLIKELY(RUNNING_ON_VALGRIND)) {
117 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
118 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700119 {
120 MutexLock lock(self, lock_);
121 arena->next_ = free_arenas_;
122 free_arenas_ = arena;
123 }
124}
125
126size_t ArenaAllocator::BytesAllocated() const {
buzbee862a7602013-04-05 10:58:54 -0700127 size_t total = 0;
128 for (int i = 0; i < kNumAllocKinds; i++) {
129 total += alloc_stats_[i];
130 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700131 return total;
132}
133
134ArenaAllocator::ArenaAllocator(ArenaPool* pool)
135 : pool_(pool),
136 begin_(nullptr),
137 end_(nullptr),
138 ptr_(nullptr),
139 arena_head_(nullptr),
Mathieu Chartier75165d02013-09-12 14:00:31 -0700140 num_allocations_(0),
141 running_on_valgrind_(RUNNING_ON_VALGRIND) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700142 memset(&alloc_stats_[0], 0, sizeof(alloc_stats_));
143}
144
145void ArenaAllocator::UpdateBytesAllocated() {
146 if (arena_head_ != nullptr) {
147 // Update how many bytes we have allocated into the arena so that the arena pool knows how
148 // much memory to zero out.
149 arena_head_->bytes_allocated_ = ptr_ - begin_;
150 }
151}
152
Mathieu Chartier75165d02013-09-12 14:00:31 -0700153void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
154 size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
155 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
156 // Obtain a new block.
157 ObtainNewArenaForAllocation(rounded_bytes);
158 if (UNLIKELY(ptr_ == nullptr)) {
159 return nullptr;
160 }
161 }
162 if (kCountAllocations) {
163 alloc_stats_[kind] += rounded_bytes;
164 ++num_allocations_;
165 }
166 uint8_t* ret = ptr_;
167 ptr_ += rounded_bytes;
168 // Check that the memory is already zeroed out.
169 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
170 CHECK_EQ(*ptr, 0U);
171 }
172 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
173 return ret;
174}
175
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700176ArenaAllocator::~ArenaAllocator() {
177 // Reclaim all the arenas by giving them back to the thread pool.
178 UpdateBytesAllocated();
179 while (arena_head_ != nullptr) {
180 Arena* arena = arena_head_;
181 arena_head_ = arena_head_->next_;
182 pool_->FreeArena(arena);
183 }
184}
185
186void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
187 UpdateBytesAllocated();
188 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
189 new_arena->next_ = arena_head_;
190 arena_head_ = new_arena;
191 // Update our internal data structures.
192 ptr_ = begin_ = new_arena->Begin();
193 end_ = new_arena->End();
194}
195
196// Dump memory usage stats.
197void ArenaAllocator::DumpMemStats(std::ostream& os) const {
198 size_t malloc_bytes = 0;
199 // Start out with how many lost bytes we have in the arena we are currently allocating into.
200 size_t lost_bytes(end_ - ptr_);
201 size_t num_arenas = 0;
202 for (Arena* arena = arena_head_; arena != nullptr; arena = arena->next_) {
203 malloc_bytes += arena->Size();
204 if (arena != arena_head_) {
205 lost_bytes += arena->RemainingSpace();
206 }
207 ++num_arenas;
208 }
209 const size_t bytes_allocated = BytesAllocated();
210 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
211 << ", lost: " << lost_bytes << "\n";
212 if (num_allocations_ != 0) {
213 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
214 << num_allocations_ << ", avg size: " << bytes_allocated / num_allocations_ << "\n";
215 }
buzbee862a7602013-04-05 10:58:54 -0700216 os << "===== Allocation by kind\n";
217 for (int i = 0; i < kNumAllocKinds; i++) {
218 os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n";
219 }
220}
221
222} // namespace art