blob: bba92a1f409f184a8120df9109325e14b989f811 [file] [log] [blame]
Hiroshi Yamauchi3c2856e2013-11-22 13:42:53 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
18#define ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
19
20#include "rosalloc.h"
21
22namespace art {
23namespace gc {
24namespace allocator {
25
Andreas Gamped7576322014-10-24 22:13:45 -070026inline ALWAYS_INLINE bool RosAlloc::ShouldCheckZeroMemory() {
27 return kCheckZeroMemory && !running_on_valgrind_;
28}
29
Mathieu Chartier0651d412014-04-29 14:37:57 -070030template<bool kThreadSafe>
Hiroshi Yamauchi4460a842015-03-09 11:57:48 -070031inline ALWAYS_INLINE void* RosAlloc::Alloc(Thread* self, size_t size, size_t* bytes_allocated,
32 size_t* usable_size,
33 size_t* bytes_tl_bulk_allocated) {
Hiroshi Yamauchi3c2856e2013-11-22 13:42:53 -080034 if (UNLIKELY(size > kLargeSizeThreshold)) {
Hiroshi Yamauchi4460a842015-03-09 11:57:48 -070035 return AllocLargeObject(self, size, bytes_allocated, usable_size,
36 bytes_tl_bulk_allocated);
Hiroshi Yamauchi3c2856e2013-11-22 13:42:53 -080037 }
Mathieu Chartier0651d412014-04-29 14:37:57 -070038 void* m;
39 if (kThreadSafe) {
Hiroshi Yamauchi4460a842015-03-09 11:57:48 -070040 m = AllocFromRun(self, size, bytes_allocated, usable_size, bytes_tl_bulk_allocated);
Mathieu Chartier0651d412014-04-29 14:37:57 -070041 } else {
Hiroshi Yamauchi4460a842015-03-09 11:57:48 -070042 m = AllocFromRunThreadUnsafe(self, size, bytes_allocated, usable_size,
43 bytes_tl_bulk_allocated);
Mathieu Chartier0651d412014-04-29 14:37:57 -070044 }
Hiroshi Yamauchi3c2856e2013-11-22 13:42:53 -080045 // Check if the returned memory is really all zero.
Andreas Gamped7576322014-10-24 22:13:45 -070046 if (ShouldCheckZeroMemory() && m != nullptr) {
Ian Rogers13735952014-10-08 12:43:28 -070047 uint8_t* bytes = reinterpret_cast<uint8_t*>(m);
Hiroshi Yamauchi3c2856e2013-11-22 13:42:53 -080048 for (size_t i = 0; i < size; ++i) {
49 DCHECK_EQ(bytes[i], 0);
50 }
51 }
52 return m;
53}
54
Hiroshi Yamauchi4460a842015-03-09 11:57:48 -070055inline bool RosAlloc::Run::IsFull() {
56 const size_t num_vec = NumberOfBitmapVectors();
57 for (size_t v = 0; v < num_vec; ++v) {
58 if (~alloc_bit_map_[v] != 0) {
59 return false;
60 }
61 }
62 return true;
63}
64
65inline bool RosAlloc::CanAllocFromThreadLocalRun(Thread* self, size_t size) {
66 if (UNLIKELY(!IsSizeForThreadLocal(size))) {
67 return false;
68 }
69 size_t bracket_size;
70 size_t idx = SizeToIndexAndBracketSize(size, &bracket_size);
71 DCHECK_EQ(idx, SizeToIndex(size));
72 DCHECK_EQ(bracket_size, IndexToBracketSize(idx));
73 DCHECK_EQ(bracket_size, bracketSizes[idx]);
74 DCHECK_LE(size, bracket_size);
75 DCHECK(size > 512 || bracket_size - size < 16);
76 DCHECK_LT(idx, kNumThreadLocalSizeBrackets);
77 Run* thread_local_run = reinterpret_cast<Run*>(self->GetRosAllocRun(idx));
78 if (kIsDebugBuild) {
79 // Need the lock to prevent race conditions.
80 MutexLock mu(self, *size_bracket_locks_[idx]);
81 CHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end());
82 CHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end());
83 }
84 DCHECK(thread_local_run != nullptr);
85 DCHECK(thread_local_run->IsThreadLocal() || thread_local_run == dedicated_full_run_);
86 return !thread_local_run->IsFull();
87}
88
89inline void* RosAlloc::AllocFromThreadLocalRun(Thread* self, size_t size,
90 size_t* bytes_allocated) {
91 DCHECK(bytes_allocated != nullptr);
92 if (UNLIKELY(!IsSizeForThreadLocal(size))) {
93 return nullptr;
94 }
95 size_t bracket_size;
96 size_t idx = SizeToIndexAndBracketSize(size, &bracket_size);
97 Run* thread_local_run = reinterpret_cast<Run*>(self->GetRosAllocRun(idx));
98 if (kIsDebugBuild) {
99 // Need the lock to prevent race conditions.
100 MutexLock mu(self, *size_bracket_locks_[idx]);
101 CHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end());
102 CHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end());
103 }
104 DCHECK(thread_local_run != nullptr);
105 DCHECK(thread_local_run->IsThreadLocal() || thread_local_run == dedicated_full_run_);
106 void* slot_addr = thread_local_run->AllocSlot();
107 if (LIKELY(slot_addr != nullptr)) {
108 *bytes_allocated = bracket_size;
109 }
110 return slot_addr;
111}
112
113inline size_t RosAlloc::MaxBytesBulkAllocatedFor(size_t size) {
114 if (UNLIKELY(!IsSizeForThreadLocal(size))) {
115 return size;
116 }
117 size_t bracket_size;
118 size_t idx = SizeToIndexAndBracketSize(size, &bracket_size);
119 return numOfSlots[idx] * bracket_size;
120}
121
122inline void* RosAlloc::Run::AllocSlot() {
123 const size_t idx = size_bracket_idx_;
124 while (true) {
125 if (kIsDebugBuild) {
126 // Make sure that no slots leaked, the bitmap should be full for all previous vectors.
127 for (size_t i = 0; i < first_search_vec_idx_; ++i) {
128 CHECK_EQ(~alloc_bit_map_[i], 0U);
129 }
130 }
131 uint32_t* const alloc_bitmap_ptr = &alloc_bit_map_[first_search_vec_idx_];
132 uint32_t ffz1 = __builtin_ffs(~*alloc_bitmap_ptr);
133 if (LIKELY(ffz1 != 0)) {
134 const uint32_t ffz = ffz1 - 1;
135 const uint32_t slot_idx = ffz +
136 first_search_vec_idx_ * sizeof(*alloc_bitmap_ptr) * kBitsPerByte;
137 const uint32_t mask = 1U << ffz;
138 DCHECK_LT(slot_idx, numOfSlots[idx]) << "out of range";
139 // Found an empty slot. Set the bit.
140 DCHECK_EQ(*alloc_bitmap_ptr & mask, 0U);
141 *alloc_bitmap_ptr |= mask;
142 DCHECK_NE(*alloc_bitmap_ptr & mask, 0U);
143 uint8_t* slot_addr = reinterpret_cast<uint8_t*>(this) +
144 headerSizes[idx] + slot_idx * bracketSizes[idx];
145 if (kTraceRosAlloc) {
146 LOG(INFO) << "RosAlloc::Run::AllocSlot() : 0x" << std::hex
147 << reinterpret_cast<intptr_t>(slot_addr)
148 << ", bracket_size=" << std::dec << bracketSizes[idx]
149 << ", slot_idx=" << slot_idx;
150 }
151 return slot_addr;
152 }
153 const size_t num_words = RoundUp(numOfSlots[idx], 32) / 32;
154 if (first_search_vec_idx_ + 1 >= num_words) {
155 DCHECK(IsFull());
156 // Already at the last word, return null.
157 return nullptr;
158 }
159 // Increase the index to the next word and try again.
160 ++first_search_vec_idx_;
161 }
162}
163
Hiroshi Yamauchi3c2856e2013-11-22 13:42:53 -0800164} // namespace allocator
165} // namespace gc
166} // namespace art
167
168#endif // ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_