Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2008 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
Elliott Hughes | 5e71b52 | 2011-10-20 13:12:32 -0700 | [diff] [blame] | 17 | #include "heap_bitmap.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 18 | |
| 19 | #include <sys/mman.h> |
| 20 | |
Elliott Hughes | 90a3369 | 2011-08-30 13:27:07 -0700 | [diff] [blame] | 21 | #include "UniquePtr.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 22 | #include "logging.h" |
Brian Carlstrom | 27ec961 | 2011-09-19 20:20:38 -0700 | [diff] [blame] | 23 | #include "utils.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 24 | |
| 25 | namespace art { |
| 26 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 27 | HeapBitmap* HeapBitmap::Create(const char* name, byte* heap_begin, size_t heap_capacity) { |
| 28 | CHECK(heap_begin != NULL); |
| 29 | size_t bitmap_size = HB_OFFSET_TO_INDEX(heap_capacity) * kWordSize; |
| 30 | UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name, NULL, bitmap_size, PROT_READ | PROT_WRITE)); |
| 31 | if (mem_map.get() == NULL) { |
| 32 | LOG(ERROR) << "Failed to allocate bitmap " << name; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 33 | return NULL; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 34 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 35 | word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin()); |
| 36 | return new HeapBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 37 | } |
| 38 | |
| 39 | // Clean up any resources associated with the bitmap. |
Brian Carlstrom | db4d540 | 2011-08-09 12:18:28 -0700 | [diff] [blame] | 40 | HeapBitmap::~HeapBitmap() {} |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 41 | |
| 42 | // Fill the bitmap with zeroes. Returns the bitmap's memory to the |
| 43 | // system as a side-effect. |
| 44 | void HeapBitmap::Clear() { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 45 | if (bitmap_begin_ != NULL) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 46 | // This returns the memory to the system. Successive page faults |
| 47 | // will return zeroed memory. |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 48 | int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 49 | if (result == -1) { |
| 50 | PLOG(WARNING) << "madvise failed"; |
| 51 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 52 | heap_end_ = heap_begin_ - 1; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 53 | } |
| 54 | } |
| 55 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 56 | // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover, |
| 57 | // even if a bit has not been set for it. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 58 | bool HeapBitmap::HasAddress(const void* obj) const { |
| 59 | if (obj != NULL) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 60 | const uintptr_t offset = (uintptr_t)obj - heap_begin_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 61 | const size_t index = HB_OFFSET_TO_INDEX(offset); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 62 | return index < bitmap_size_ / kWordSize; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 63 | } |
| 64 | return false; |
| 65 | } |
| 66 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 67 | void HeapBitmap::VisitRange(uintptr_t visit_begin, uintptr_t visit_end, Callback* visitor, void* arg) const { |
| 68 | size_t start = HB_OFFSET_TO_INDEX(visit_begin - heap_begin_); |
| 69 | size_t end = HB_OFFSET_TO_INDEX(visit_end - heap_begin_ - 1); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 70 | for (size_t i = start; i <= end; i++) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 71 | word w = bitmap_begin_[i]; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 72 | if (w != 0) { |
| 73 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 74 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 75 | while (w != 0) { |
| 76 | const int shift = CLZ(w); |
| 77 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 78 | (*visitor)(obj, arg); |
| 79 | w &= ~(high_bit >> shift); |
| 80 | } |
| 81 | } |
| 82 | } |
| 83 | } |
| 84 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 85 | // Visits set bits in address order. The callback is not permitted to |
| 86 | // change the bitmap bits or max during the traversal. |
| 87 | void HeapBitmap::Walk(HeapBitmap::Callback* callback, void* arg) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 88 | CHECK(bitmap_begin_ != NULL); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 89 | CHECK(callback != NULL); |
jeffhao | 2504552 | 2012-03-13 19:34:37 -0700 | [diff] [blame] | 90 | if (heap_end_ < heap_begin_) { |
| 91 | return; // Bitmap is empty. |
| 92 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 93 | uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 94 | for (uintptr_t i = 0; i <= end; ++i) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 95 | word w = bitmap_begin_[i]; |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 96 | if (UNLIKELY(w != 0)) { |
| 97 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 98 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 99 | while (w != 0) { |
| 100 | const int shift = CLZ(w); |
| 101 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 102 | (*callback)(obj, arg); |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 103 | w &= ~(high_bit >> shift); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 104 | } |
| 105 | } |
| 106 | } |
| 107 | } |
| 108 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 109 | // Similar to Walk but the callback routine is permitted to change the bitmap bits and end during |
| 110 | // traversal. Used by the the root marking scan exclusively. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 111 | // |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 112 | // The callback is invoked with a finger argument. The finger is a pointer to an address not yet |
| 113 | // visited by the traversal. If the callback sets a bit for an address at or above the finger, this |
| 114 | // address will be visited by the traversal. If the callback sets a bit for an address below the |
| 115 | // finger, this address will not be visited (typiscally such an address would be placed on the |
| 116 | // marking stack). |
| 117 | void HeapBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) { |
| 118 | CHECK(bitmap_begin_ != NULL); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 119 | CHECK(callback != NULL); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 120 | CHECK_LE(scan_begin, scan_end); |
| 121 | CHECK_GE(scan_begin, heap_begin_); |
| 122 | size_t start = HB_OFFSET_TO_INDEX(scan_begin - heap_begin_); |
| 123 | if (scan_end < heap_end_) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 124 | // The end of the space we're looking at is before the current maximum bitmap PC, scan to that |
| 125 | // and don't recompute end on each iteration |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 126 | size_t end = HB_OFFSET_TO_INDEX(scan_end - heap_begin_ - 1); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 127 | for (size_t i = start; i <= end; i++) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 128 | word w = bitmap_begin_[i]; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 129 | if (UNLIKELY(w != 0)) { |
| 130 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 131 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
| 132 | void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 133 | while (w != 0) { |
| 134 | const int shift = CLZ(w); |
| 135 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 136 | (*callback)(obj, finger, arg); |
| 137 | w &= ~(high_bit >> shift); |
| 138 | } |
| 139 | } |
| 140 | } |
| 141 | } else { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 142 | size_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 143 | for (size_t i = start; i <= end; i++) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 144 | word w = bitmap_begin_[i]; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 145 | if (UNLIKELY(w != 0)) { |
| 146 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 147 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
| 148 | void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 149 | while (w != 0) { |
| 150 | const int shift = CLZ(w); |
| 151 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 152 | (*callback)(obj, finger, arg); |
| 153 | w &= ~(high_bit >> shift); |
| 154 | } |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 155 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 156 | // update 'end' in case callback modified bitmap |
| 157 | end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 158 | } |
| 159 | } |
| 160 | } |
| 161 | |
| 162 | // Walk through the bitmaps in increasing address order, and find the |
| 163 | // object pointers that correspond to garbage objects. Call |
| 164 | // <callback> zero or more times with lists of these object pointers. |
| 165 | // |
| 166 | // The callback is not permitted to increase the max of either bitmap. |
| 167 | void HeapBitmap::SweepWalk(const HeapBitmap& live_bitmap, |
| 168 | const HeapBitmap& mark_bitmap, |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 169 | uintptr_t sweep_begin, uintptr_t sweep_end, |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 170 | HeapBitmap::SweepCallback* callback, void* arg) { |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 171 | CHECK(live_bitmap.bitmap_begin_ != NULL); |
| 172 | CHECK(mark_bitmap.bitmap_begin_ != NULL); |
| 173 | CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_); |
| 174 | CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 175 | CHECK(callback != NULL); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 176 | CHECK_LE(sweep_begin, sweep_end); |
| 177 | CHECK_GE(sweep_begin, live_bitmap.heap_begin_); |
| 178 | sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_); |
| 179 | if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 180 | // Easy case; both are obviously empty. |
| 181 | // TODO: this should never happen |
| 182 | return; |
| 183 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 184 | // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**? |
| 185 | std::vector<Object*> pointer_buf(4 * kBitsPerWord); |
| 186 | Object** pb = &pointer_buf[0]; |
| 187 | size_t start = HB_OFFSET_TO_INDEX(sweep_begin - live_bitmap.heap_begin_); |
| 188 | size_t end = HB_OFFSET_TO_INDEX(sweep_end - live_bitmap.heap_begin_); |
| 189 | word* live = live_bitmap.bitmap_begin_; |
| 190 | word* mark = mark_bitmap.bitmap_begin_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 191 | for (size_t i = start; i <= end; i++) { |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 192 | word garbage = live[i] & ~mark[i]; |
| 193 | if (UNLIKELY(garbage != 0)) { |
| 194 | word high_bit = 1 << (kBitsPerWord - 1); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 195 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + live_bitmap.heap_begin_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 196 | while (garbage != 0) { |
| 197 | int shift = CLZ(garbage); |
| 198 | garbage &= ~(high_bit >> shift); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 199 | *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 200 | } |
| 201 | // Make sure that there are always enough slots available for an |
| 202 | // entire word of one bits. |
Elliott Hughes | 3b6baaa | 2011-10-14 19:13:56 -0700 | [diff] [blame] | 203 | if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) { |
| 204 | (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg); |
| 205 | pb = &pointer_buf[0]; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 206 | } |
| 207 | } |
| 208 | } |
Elliott Hughes | 3b6baaa | 2011-10-14 19:13:56 -0700 | [diff] [blame] | 209 | if (pb > &pointer_buf[0]) { |
| 210 | (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 211 | } |
| 212 | } |
| 213 | |
| 214 | } // namespace art |
Ian Rogers | 1351b67 | 2012-02-24 12:22:57 -0800 | [diff] [blame] | 215 | |
| 216 | // Support needed for in order traversal |
| 217 | #include "object.h" |
| 218 | #include "object_utils.h" |
| 219 | |
| 220 | namespace art { |
| 221 | |
| 222 | static void WalkFieldsInOrder(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj, |
| 223 | void* arg); |
| 224 | |
| 225 | // Walk instance fields of the given Class. Separate function to allow recursion on the super |
| 226 | // class. |
| 227 | static void WalkInstanceFields(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj, |
| 228 | Class* klass, void* arg) { |
| 229 | // Visit fields of parent classes first. |
| 230 | Class* super = klass->GetSuperClass(); |
| 231 | if (super != NULL) { |
| 232 | WalkInstanceFields(visited, callback, obj, super, arg); |
| 233 | } |
| 234 | // Walk instance fields |
| 235 | ObjectArray<Field>* fields = klass->GetIFields(); |
| 236 | if (fields != NULL) { |
| 237 | for (int32_t i = 0; i < fields->GetLength(); i++) { |
| 238 | Field* field = fields->Get(i); |
| 239 | FieldHelper fh(field); |
| 240 | if (!fh.GetType()->IsPrimitive()) { |
| 241 | Object* value = field->GetObj(obj); |
| 242 | if (value != NULL) { |
| 243 | WalkFieldsInOrder(visited, callback, value, arg); |
| 244 | } |
| 245 | } |
| 246 | } |
| 247 | } |
| 248 | } |
| 249 | |
| 250 | // For an unvisited object, visit it then all its children found via fields. |
| 251 | static void WalkFieldsInOrder(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj, |
| 252 | void* arg) { |
| 253 | if (visited->Test(obj)) { |
| 254 | return; |
| 255 | } |
| 256 | // visit the object itself |
| 257 | (*callback)(obj, arg); |
| 258 | visited->Set(obj); |
| 259 | // Walk instance fields of all objects |
| 260 | Class* klass = obj->GetClass(); |
| 261 | WalkInstanceFields(visited, callback, obj, klass, arg); |
| 262 | // Walk static fields of a Class |
| 263 | if (obj->IsClass()) { |
| 264 | ObjectArray<Field>* fields = klass->GetSFields(); |
| 265 | if (fields != NULL) { |
| 266 | for (int32_t i = 0; i < fields->GetLength(); i++) { |
| 267 | Field* field = fields->Get(i); |
| 268 | FieldHelper fh(field); |
| 269 | if (!fh.GetType()->IsPrimitive()) { |
| 270 | Object* value = field->GetObj(NULL); |
| 271 | if (value != NULL) { |
| 272 | WalkFieldsInOrder(visited, callback, value, arg); |
| 273 | } |
| 274 | } |
| 275 | } |
| 276 | } |
| 277 | } else if (obj->IsObjectArray()) { |
| 278 | // Walk elements of an object array |
| 279 | ObjectArray<Object>* obj_array = obj->AsObjectArray<Object>(); |
| 280 | int32_t length = obj_array->GetLength(); |
| 281 | for (int32_t i = 0; i < length; i++) { |
| 282 | Object* value = obj_array->Get(i); |
| 283 | if (value != NULL) { |
| 284 | WalkFieldsInOrder(visited, callback, value, arg); |
| 285 | } |
| 286 | } |
| 287 | } |
| 288 | } |
| 289 | |
| 290 | // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap |
| 291 | // bits or max during the traversal. |
| 292 | void HeapBitmap::InOrderWalk(HeapBitmap::Callback* callback, void* arg) { |
Elliott Hughes | b25c3f6 | 2012-03-26 16:35:06 -0700 | [diff] [blame] | 293 | UniquePtr<HeapBitmap> visited(Create("bitmap for in-order walk", |
| 294 | reinterpret_cast<byte*>(heap_begin_), |
| 295 | HB_INDEX_TO_OFFSET(bitmap_size_ / kWordSize))); |
Ian Rogers | 1351b67 | 2012-02-24 12:22:57 -0800 | [diff] [blame] | 296 | CHECK(bitmap_begin_ != NULL); |
| 297 | CHECK(callback != NULL); |
| 298 | uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_); |
| 299 | for (uintptr_t i = 0; i <= end; ++i) { |
| 300 | word w = bitmap_begin_[i]; |
| 301 | if (UNLIKELY(w != 0)) { |
| 302 | word high_bit = 1 << (kBitsPerWord - 1); |
| 303 | uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_; |
| 304 | while (w != 0) { |
| 305 | const int shift = CLZ(w); |
| 306 | Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment); |
| 307 | WalkFieldsInOrder(visited.get(), callback, obj, arg); |
| 308 | w &= ~(high_bit >> shift); |
| 309 | } |
| 310 | } |
| 311 | } |
| 312 | } |
| 313 | |
| 314 | } // namespace art |