Each space has its own bitmap(s)
Each alloc space now has One mark+live bitmap. Each image space has only one live bitmap.
Change-Id: I2e919d1bd7d9f4d35d0e95ed83a58df6f754df6e
diff --git a/src/card_table.cc b/src/card_table.cc
index 0f295b2..758a889 100644
--- a/src/card_table.cc
+++ b/src/card_table.cc
@@ -117,9 +117,11 @@
}
}
-void CardTable::Scan(HeapBitmap* bitmap, byte* heap_begin, byte* heap_end, Callback* visitor, void* arg) const {
- byte* card_cur = CardFromAddr(heap_begin);
- byte* card_end = CardFromAddr(heap_end);
+void CardTable::Scan(SpaceBitmap* bitmap, byte* scan_begin, byte* scan_end, Callback* visitor, void* arg) const {
+ DCHECK(bitmap->HasAddress(scan_begin));
+ DCHECK(bitmap->HasAddress(scan_end - 1)); // scan_end is the byte after the last byte we scan.
+ byte* card_cur = CardFromAddr(scan_begin);
+ byte* card_end = CardFromAddr(scan_end);
while (card_cur < card_end) {
while (card_cur < card_end && *card_cur == GC_CARD_CLEAN) {
card_cur++;
diff --git a/src/card_table.h b/src/card_table.h
index 8fde9fe..ea46cfe 100644
--- a/src/card_table.h
+++ b/src/card_table.h
@@ -25,7 +25,7 @@
namespace art {
class Heap;
-class HeapBitmap;
+class SpaceBitmap;
class Object;
#define GC_CARD_SHIFT 7
@@ -72,7 +72,7 @@
// For every dirty card between begin and end invoke the visitor with the specified argument
typedef void Callback(Object* obj, void* arg);
- void Scan(HeapBitmap* bitmap, byte* begin, byte* end, Callback* visitor, void* arg) const;
+ void Scan(SpaceBitmap* bitmap, byte* begin, byte* end, Callback* visitor, void* arg) const;
// Assertion used to check the given address is covered by the card table
void CheckAddrIsInCardTable(const byte* addr) const;
diff --git a/src/class_linker.cc b/src/class_linker.cc
index 2b20aff..dfed6f7 100644
--- a/src/class_linker.cc
+++ b/src/class_linker.cc
@@ -48,6 +48,7 @@
#include "scoped_jni_thread_state.h"
#include "ScopedLocalRef.h"
#include "space.h"
+#include "space_bitmap.h"
#include "stack_indirect_reference_table.h"
#include "stl_util.h"
#include "thread.h"
@@ -916,11 +917,11 @@
AppendToBootClassPath(*dex_file, dex_cache);
}
- HeapBitmap* heap_bitmap = heap->GetLiveBits();
- DCHECK(heap_bitmap != NULL);
-
// reinit clases_ table
- heap_bitmap->Walk(InitFromImageCallback, this);
+ const Spaces& vec = heap->GetSpaces();
+ for (Spaces::const_iterator cur = vec.begin(); cur != vec.end(); ++cur) {
+ (*cur)->GetLiveBitmap()->Walk(InitFromImageCallback, this);
+ }
// reinit class_roots_
Object* class_roots_object =
diff --git a/src/compiler.cc b/src/compiler.cc
index aad6ad3..ceb9d11 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -783,7 +783,7 @@
}
}
} else {
- if (Runtime::Current()->GetHeap()->GetImageSpace()->Contains(method)) {
+ if (Runtime::Current()->GetHeap()->FindSpaceFromObject(method)->IsImageSpace()) {
direct_method = reinterpret_cast<uintptr_t>(method);
}
direct_code = reinterpret_cast<uintptr_t>(method->GetCode());
diff --git a/src/debugger.cc b/src/debugger.cc
index e5d3780..cd52f82 100644
--- a/src/debugger.cc
+++ b/src/debugger.cc
@@ -2909,7 +2909,13 @@
UNIMPLEMENTED(WARNING) << "Native heap send heap segments";
} else {
Heap* heap = Runtime::Current()->GetHeap();
- heap->GetAllocSpace()->Walk(HeapChunkContext::HeapChunkCallback, &context);
+ typedef std::vector<Space*> SpaceVec;
+ const SpaceVec& spaces = heap->GetSpaces();
+ for (SpaceVec::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ (*cur)->AsAllocSpace()->Walk(HeapChunkContext::HeapChunkCallback, &context);
+ }
+ }
}
// Finally, send a heap end chunk.
diff --git a/src/heap.cc b/src/heap.cc
index c6dfdf7..acb80e7 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -137,10 +137,7 @@
Heap::Heap(size_t initial_size, size_t growth_limit, size_t capacity,
const std::string& original_image_file_name)
: lock_(NULL),
- image_space_(NULL),
alloc_space_(NULL),
- mark_bitmap_(NULL),
- live_bitmap_(NULL),
card_table_(NULL),
card_marking_disabled_(false),
is_gc_running_(false),
@@ -167,74 +164,69 @@
Space* first_space = NULL;
Space* last_space = NULL;
+ live_bitmap_.reset(new HeapBitmap);
+ mark_bitmap_.reset(new HeapBitmap);
+
// Requested begin for the alloc space, to follow the mapped image and oat files
byte* requested_begin = NULL;
std::string image_file_name(original_image_file_name);
if (!image_file_name.empty()) {
+ Space* image_space = NULL;
+
if (OS::FileExists(image_file_name.c_str())) {
// If the /system file exists, it should be up-to-date, don't try to generate
- image_space_ = Space::CreateImageSpace(image_file_name);
+ image_space = Space::CreateImageSpace(image_file_name);
} else {
// If the /system file didn't exist, we need to use one from the art-cache.
// If the cache file exists, try to open, but if it fails, regenerate.
// If it does not exist, generate.
image_file_name = GetArtCacheFilenameOrDie(image_file_name);
if (OS::FileExists(image_file_name.c_str())) {
- image_space_ = Space::CreateImageSpace(image_file_name);
+ image_space = Space::CreateImageSpace(image_file_name);
}
- if (image_space_ == NULL) {
+ if (image_space == NULL) {
if (!GenerateImage(image_file_name)) {
LOG(FATAL) << "Failed to generate image: " << image_file_name;
}
- image_space_ = Space::CreateImageSpace(image_file_name);
+ image_space = Space::CreateImageSpace(image_file_name);
}
}
- if (image_space_ == NULL) {
+ if (image_space == NULL) {
LOG(FATAL) << "Failed to create space from " << image_file_name;
}
- AddSpace(image_space_);
- UpdateFirstAndLastSpace(&first_space, &last_space, image_space_);
+ AddSpace(image_space);
+ UpdateFirstAndLastSpace(&first_space, &last_space, image_space);
// Oat files referenced by image files immediately follow them in memory, ensure alloc space
// isn't going to get in the middle
- byte* oat_end_addr = image_space_->GetImageHeader().GetOatEnd();
- CHECK(oat_end_addr > image_space_->End());
+ byte* oat_end_addr = GetImageSpace()->GetImageHeader().GetOatEnd();
+ CHECK(oat_end_addr > GetImageSpace()->End());
if (oat_end_addr > requested_begin) {
requested_begin = reinterpret_cast<byte*>(RoundUp(reinterpret_cast<uintptr_t>(oat_end_addr),
kPageSize));
}
}
- alloc_space_ = Space::CreateAllocSpace("alloc space", initial_size, growth_limit, capacity,
- requested_begin);
+ UniquePtr<AllocSpace> alloc_space(Space::CreateAllocSpace(
+ "alloc space", initial_size, growth_limit, capacity, requested_begin));
+ alloc_space_ = alloc_space.release();
+ AddSpace(alloc_space_);
if (alloc_space_ == NULL) {
LOG(FATAL) << "Failed to create alloc space";
}
- AddSpace(alloc_space_);
+
UpdateFirstAndLastSpace(&first_space, &last_space, alloc_space_);
byte* heap_begin = first_space->Begin();
size_t heap_capacity = (last_space->Begin() - first_space->Begin()) + last_space->NonGrowthLimitCapacity();
- // Allocate the initial live bitmap.
- UniquePtr<HeapBitmap> live_bitmap(HeapBitmap::Create("dalvik-bitmap-1", heap_begin, heap_capacity));
- if (live_bitmap.get() == NULL) {
- LOG(FATAL) << "Failed to create live bitmap";
- }
-
// Mark image objects in the live bitmap
for (size_t i = 0; i < spaces_.size(); ++i) {
Space* space = spaces_[i];
if (space->IsImageSpace()) {
- space->AsImageSpace()->RecordImageAllocations(live_bitmap.get());
+ space->AsImageSpace()->RecordImageAllocations(space->GetLiveBitmap());
}
}
- // Allocate the initial mark bitmap.
- UniquePtr<HeapBitmap> mark_bitmap(HeapBitmap::Create("dalvik-bitmap-2", heap_begin, heap_capacity));
- if (mark_bitmap.get() == NULL) {
- LOG(FATAL) << "Failed to create mark bitmap";
- }
-
// Allocate the card table.
UniquePtr<CardTable> card_table(CardTable::Create(heap_begin, heap_capacity));
if (card_table.get() == NULL) {
@@ -246,8 +238,6 @@
mod_union_table->Init();
mod_union_table_ = mod_union_table;
- live_bitmap_ = live_bitmap.release();
- mark_bitmap_ = mark_bitmap.release();
card_table_ = card_table.release();
num_bytes_allocated_ = 0;
@@ -269,6 +259,12 @@
}
void Heap::AddSpace(Space* space) {
+ DCHECK(space->GetLiveBitmap() != NULL);
+ live_bitmap_->AddSpaceBitmap(space->GetLiveBitmap());
+
+ DCHECK(space->GetMarkBitmap() != NULL);
+ mark_bitmap_->AddSpaceBitmap(space->GetMarkBitmap());
+
spaces_.push_back(space);
}
@@ -279,8 +275,6 @@
// all daemon threads are suspended, and we also know that the threads list have been deleted, so
// those threads can't resume. We're the only running thread, and we can do whatever we like...
STLDeleteElements(&spaces_);
- delete mark_bitmap_;
- delete live_bitmap_;
delete card_table_;
delete mod_union_table_;
delete mark_stack_;
@@ -288,6 +282,31 @@
delete lock_;
}
+Space* Heap::FindSpaceFromObject(const Object* obj) const {
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces_.begin(); cur != spaces_.end(); ++cur) {
+ if ((*cur)->Contains(obj)) {
+ return *cur;
+ }
+ }
+ LOG(FATAL) << "object " << reinterpret_cast<const void*>(obj) << " not inside any spaces!";
+ return NULL;
+}
+
+ImageSpace* Heap::GetImageSpace() {
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces_.begin(); cur != spaces_.end(); ++cur) {
+ if ((*cur)->IsImageSpace()) {
+ return (*cur)->AsImageSpace();
+ }
+ }
+ return NULL;
+}
+
+AllocSpace* Heap::GetAllocSpace() {
+ return alloc_space_;
+}
+
static void MSpaceChunkCallback(void* start, void* end, size_t used_bytes, void* arg) {
size_t& max_contiguous_allocation = *reinterpret_cast<size_t*>(arg);
@@ -321,8 +340,7 @@
}
if (!is_gc_running_ && num_bytes_allocated_ >= concurrent_start_bytes_) {
- // The SirtRef is necessary since the calls in RequestConcurrentGC
- // are a safepoint.
+ // The SirtRef is necessary since the calls in RequestConcurrentGC are a safepoint.
SirtRef<Object> ref(obj);
RequestConcurrentGC();
}
@@ -331,7 +349,12 @@
}
total_bytes_free = GetFreeMemory();
max_contiguous_allocation = 0;
- GetAllocSpace()->Walk(MSpaceChunkCallback, &max_contiguous_allocation);
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces_.begin(); cur != spaces_.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ (*cur)->AsAllocSpace()->Walk(MSpaceChunkCallback, &max_contiguous_allocation);
+ }
+ }
}
std::string msg(StringPrintf("Failed to allocate a %zd-byte %s (%lld total bytes free; largest possible contiguous allocation %zd bytes)",
@@ -361,7 +384,7 @@
bool Heap::IsLiveObjectLocked(const Object* obj) {
lock_->AssertHeld();
- return IsHeapAddress(obj) && live_bitmap_->Test(obj);
+ return IsHeapAddress(obj) && GetLiveBitmap()->Test(obj);
}
#if VERIFY_OBJECT_ENABLED
@@ -381,7 +404,7 @@
if (obj != NULL) {
if (!IsAligned<kObjectAlignment>(obj)) {
LOG(FATAL) << "Object isn't aligned: " << obj;
- } else if (!live_bitmap_->Test(obj)) {
+ } else if (!GetLiveBitmap()->Test(obj)) {
LOG(FATAL) << "Object is dead: " << obj;
}
// Ignore early dawn of the universe verifications
@@ -393,7 +416,7 @@
LOG(FATAL) << "Null class in object: " << obj;
} else if (!IsAligned<kObjectAlignment>(c)) {
LOG(FATAL) << "Class isn't aligned: " << c << " in object: " << obj;
- } else if (!live_bitmap_->Test(c)) {
+ } else if (!GetLiveBitmap()->Test(c)) {
LOG(FATAL) << "Class of object is dead: " << c << " in object: " << obj;
}
// Check obj.getClass().getClass() == obj.getClass().getClass().getClass()
@@ -415,7 +438,7 @@
void Heap::VerifyHeap() {
ScopedHeapLock heap_lock;
- live_bitmap_->Walk(Heap::VerificationCallback, this);
+ GetLiveBitmap()->Walk(Heap::VerificationCallback, this);
}
void Heap::RecordAllocationLocked(AllocSpace* space, const Object* obj) {
@@ -467,13 +490,28 @@
Object* Heap::AllocateLocked(size_t size) {
lock_->AssertHeld();
- DCHECK(alloc_space_ != NULL);
- AllocSpace* space = alloc_space_;
- Object* obj = AllocateLocked(space, size);
+
+ // Try the default alloc space first.
+ Object* obj = AllocateLocked(alloc_space_, size);
if (obj != NULL) {
- RecordAllocationLocked(space, obj);
+ RecordAllocationLocked(alloc_space_, obj);
+ return obj;
}
- return obj;
+
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces_.begin(); cur != spaces_.end(); ++cur) {
+ if ((*cur)->IsAllocSpace() && *cur != alloc_space_) {
+ AllocSpace* space = (*cur)->AsAllocSpace();
+ Object* obj = AllocateLocked(space, size);
+ if (obj != NULL) {
+ RecordAllocationLocked(space, obj);
+ // Switch to this alloc space since the old one did not have enough storage.
+ alloc_space_ = space;
+ return obj;
+ }
+ }
+ }
+ return NULL;
}
Object* Heap::AllocateLocked(AllocSpace* space, size_t alloc_size) {
@@ -553,15 +591,22 @@
}
int64_t Heap::GetMaxMemory() {
- return alloc_space_->Capacity();
+ size_t total = 0;
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces_.begin(); cur != spaces_.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ total += (*cur)->AsAllocSpace()->Capacity();
+ }
+ }
+ return total;
}
int64_t Heap::GetTotalMemory() {
- return alloc_space_->Capacity();
+ return GetMaxMemory();
}
int64_t Heap::GetFreeMemory() {
- return alloc_space_->Capacity() - num_bytes_allocated_;
+ return GetMaxMemory() - num_bytes_allocated_;
}
class InstanceCounter {
@@ -600,7 +645,7 @@
int64_t Heap::CountInstances(Class* c, bool count_assignable) {
ScopedHeapLock heap_lock;
InstanceCounter counter(c, count_assignable);
- live_bitmap_->Walk(InstanceCounter::Callback, &counter);
+ GetLiveBitmap()->Walk(InstanceCounter::Callback, &counter);
return counter.GetCount();
}
@@ -794,13 +839,20 @@
}
void Heap::SetIdealFootprint(size_t max_allowed_footprint) {
- size_t alloc_space_capacity = alloc_space_->Capacity();
- if (max_allowed_footprint > alloc_space_capacity) {
- VLOG(gc) << "Clamp target GC heap from " << PrettySize(max_allowed_footprint)
- << " to " << PrettySize(alloc_space_capacity);
- max_allowed_footprint = alloc_space_capacity;
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces_.begin(); cur != spaces_.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ AllocSpace* alloc_space = (*cur)->AsAllocSpace();
+ // TODO: Behavior for multiple alloc spaces?
+ size_t alloc_space_capacity = alloc_space->Capacity();
+ if (max_allowed_footprint > alloc_space_capacity) {
+ VLOG(gc) << "Clamp target GC heap from " << PrettySize(max_allowed_footprint)
+ << " to " << PrettySize(alloc_space_capacity);
+ max_allowed_footprint = alloc_space_capacity;
+ }
+ alloc_space->SetFootprintLimit(max_allowed_footprint);
+ }
}
- alloc_space_->SetFootprintLimit(max_allowed_footprint);
}
// kHeapIdealFree is the ideal maximum free size, when we grow the heap for utilization.
@@ -985,10 +1037,10 @@
CollectGarbageInternal(true, false);
}
-void Heap::Trim() {
+void Heap::Trim(AllocSpace* alloc_space) {
lock_->AssertHeld();
WaitForConcurrentGcToComplete();
- GetAllocSpace()->Trim();
+ alloc_space->Trim();
}
void Heap::RequestHeapTrim() {
diff --git a/src/heap.h b/src/heap.h
index 75362ce..132d5e5 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -27,6 +27,7 @@
#include "heap_bitmap.h"
#include "mutex.h"
#include "offsets.h"
+#include "safe_map.h"
#define VERIFY_OBJECT_ENABLED 0
@@ -44,6 +45,8 @@
class SpaceTest;
class Thread;
+typedef std::vector<Space*> Spaces;
+
class LOCKABLE Heap {
public:
static const size_t kInitialSize = 2 * MB;
@@ -137,14 +140,6 @@
return spaces_;
}
- HeapBitmap* GetLiveBits() {
- return live_bitmap_;
- }
-
- HeapBitmap* GetMarkBits() {
- return mark_bitmap_;
- }
-
void SetReferenceOffsets(MemberOffset reference_referent_offset,
MemberOffset reference_queue_offset,
MemberOffset reference_queueNext_offset,
@@ -213,16 +208,6 @@
size_t GetBytesAllocated() { return num_bytes_allocated_; }
size_t GetObjectsAllocated() { return num_objects_allocated_; }
- ImageSpace* GetImageSpace() {
- CHECK(image_space_ != NULL);
- return image_space_;
- }
-
- AllocSpace* GetAllocSpace() {
- CHECK(alloc_space_ != NULL);
- return alloc_space_;
- }
-
size_t GetConcurrentStartSize() const { return concurrent_start_size_; }
void SetConcurrentStartSize(size_t size) {
@@ -235,9 +220,26 @@
concurrent_min_free_ = size;
}
+ // Functions for getting the bitmap which corresponds to an object's address.
+ // This is probably slow, TODO: use better data structure like binary tree .
+ Space* FindSpaceFromObject(const Object*) const;
+
void DumpForSigQuit(std::ostream& os);
- void Trim();
+ void Trim(AllocSpace* alloc_space);
+
+ HeapBitmap* GetLiveBitmap() {
+ return live_bitmap_.get();
+ }
+
+ HeapBitmap* GetMarkBitmap() {
+ return mark_bitmap_.get();
+ }
+
+ // Assumes there is only one image space.
+ // DEPRECATED: Should remove in "near" future when support for multiple image spaces is added.
+ ImageSpace* GetImageSpace();
+ AllocSpace* GetAllocSpace();
private:
// Allocates uninitialized storage.
@@ -276,17 +278,11 @@
Mutex* lock_;
ConditionVariable* condition_;
- std::vector<Space*> spaces_;
+ Spaces spaces_;
- ImageSpace* image_space_;
-
- // default Space for allocations
+ // The alloc space which we are currently allocating into.
AllocSpace* alloc_space_;
- HeapBitmap* mark_bitmap_;
-
- HeapBitmap* live_bitmap_;
-
// TODO: Reduce memory usage, this bitmap currently takes 1 bit per 8 bytes
// of image space.
ModUnionTable* mod_union_table_;
@@ -300,11 +296,14 @@
// True while the garbage collector is running.
volatile bool is_gc_running_;
- // Bytes until concurrent GC
+ // Bytes until concurrent GC starts.
size_t concurrent_start_bytes_;
size_t concurrent_start_size_;
size_t concurrent_min_free_;
+ UniquePtr<HeapBitmap> live_bitmap_;
+ UniquePtr<HeapBitmap> mark_bitmap_;
+
// True while the garbage collector is trying to signal the GC daemon thread.
// This flag is needed to prevent recursion from occurring when the JNI calls
// allocate memory and request another GC.
diff --git a/src/heap_bitmap.cc b/src/heap_bitmap.cc
index fc722f5..c7b2f6c 100644
--- a/src/heap_bitmap.cc
+++ b/src/heap_bitmap.cc
@@ -1,313 +1,2 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
#include "heap_bitmap.h"
-#include "logging.h"
-#include "UniquePtr.h"
-#include "utils.h"
-
-namespace art {
-
-HeapBitmap* HeapBitmap::Create(const char* name, byte* heap_begin, size_t heap_capacity) {
- CHECK(heap_begin != NULL);
- // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
- size_t bitmap_size = HB_OFFSET_TO_INDEX(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
- UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name, NULL, bitmap_size, PROT_READ | PROT_WRITE));
- if (mem_map.get() == NULL) {
- LOG(ERROR) << "Failed to allocate bitmap " << name;
- return NULL;
- }
- word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin());
- return new HeapBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin);
-}
-
-// Clean up any resources associated with the bitmap.
-HeapBitmap::~HeapBitmap() {}
-
-// Fill the bitmap with zeroes. Returns the bitmap's memory to the
-// system as a side-effect.
-void HeapBitmap::Clear() {
- if (bitmap_begin_ != NULL) {
- // This returns the memory to the system. Successive page faults
- // will return zeroed memory.
- int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED);
- if (result == -1) {
- PLOG(WARNING) << "madvise failed";
- }
- heap_end_ = heap_begin_ - 1;
- }
-}
-
-// Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
-// even if a bit has not been set for it.
-bool HeapBitmap::HasAddress(const void* obj) const {
- if (obj != NULL) {
- const uintptr_t offset = (uintptr_t)obj - heap_begin_;
- const size_t index = HB_OFFSET_TO_INDEX(offset);
- return index < bitmap_size_ / kWordSize;
- }
- return false;
-}
-
-void HeapBitmap::VisitRange(uintptr_t visit_begin, uintptr_t visit_end, Callback* visitor, void* arg) const {
- size_t start = HB_OFFSET_TO_INDEX(visit_begin - heap_begin_);
- size_t end = HB_OFFSET_TO_INDEX(visit_end - heap_begin_ - 1);
- for (size_t i = start; i <= end; i++) {
- word w = bitmap_begin_[i];
- if (w != 0) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
- while (w != 0) {
- const int shift = CLZ(w);
- Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- (*visitor)(obj, arg);
- w &= ~(high_bit >> shift);
- }
- }
- }
-}
-
-// Visits set bits in address order. The callback is not permitted to
-// change the bitmap bits or max during the traversal.
-void HeapBitmap::Walk(HeapBitmap::Callback* callback, void* arg) {
- CHECK(bitmap_begin_ != NULL);
- CHECK(callback != NULL);
- if (heap_end_ < heap_begin_) {
- return; // Bitmap is empty.
- }
- uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
- for (uintptr_t i = 0; i <= end; ++i) {
- word w = bitmap_begin_[i];
- if (UNLIKELY(w != 0)) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
- while (w != 0) {
- const int shift = CLZ(w);
- Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- (*callback)(obj, arg);
- w &= ~(high_bit >> shift);
- }
- }
- }
-}
-
-// Similar to Walk but the callback routine is permitted to change the bitmap bits and end during
-// traversal. Used by the the root marking scan exclusively.
-//
-// The callback is invoked with a finger argument. The finger is a pointer to an address not yet
-// visited by the traversal. If the callback sets a bit for an address at or above the finger, this
-// address will be visited by the traversal. If the callback sets a bit for an address below the
-// finger, this address will not be visited (typiscally such an address would be placed on the
-// marking stack).
-void HeapBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) {
- CHECK(bitmap_begin_ != NULL);
- CHECK(callback != NULL);
- CHECK_LE(scan_begin, scan_end);
- CHECK_GE(scan_begin, heap_begin_);
- size_t start = HB_OFFSET_TO_INDEX(scan_begin - heap_begin_);
- if (scan_end < heap_end_) {
- // The end of the space we're looking at is before the current maximum bitmap PC, scan to that
- // and don't recompute end on each iteration
- size_t end = HB_OFFSET_TO_INDEX(scan_end - heap_begin_ - 1);
- for (size_t i = start; i <= end; i++) {
- word w = bitmap_begin_[i];
- if (UNLIKELY(w != 0)) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
- void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_);
- while (w != 0) {
- const int shift = CLZ(w);
- Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- (*callback)(obj, finger, arg);
- w &= ~(high_bit >> shift);
- }
- }
- }
- } else {
- size_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
- for (size_t i = start; i <= end; i++) {
- word w = bitmap_begin_[i];
- if (UNLIKELY(w != 0)) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
- void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_);
- while (w != 0) {
- const int shift = CLZ(w);
- Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- (*callback)(obj, finger, arg);
- w &= ~(high_bit >> shift);
- }
- }
- // update 'end' in case callback modified bitmap
- end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
- }
- }
-}
-
-// Walk through the bitmaps in increasing address order, and find the
-// object pointers that correspond to garbage objects. Call
-// <callback> zero or more times with lists of these object pointers.
-//
-// The callback is not permitted to increase the max of either bitmap.
-void HeapBitmap::SweepWalk(const HeapBitmap& live_bitmap,
- const HeapBitmap& mark_bitmap,
- uintptr_t sweep_begin, uintptr_t sweep_end,
- HeapBitmap::SweepCallback* callback, void* arg) {
- CHECK(live_bitmap.bitmap_begin_ != NULL);
- CHECK(mark_bitmap.bitmap_begin_ != NULL);
- CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
- CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
- CHECK(callback != NULL);
- CHECK_LE(sweep_begin, sweep_end);
- CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
- sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_);
- if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) {
- // Easy case; both are obviously empty.
- // TODO: this should never happen
- return;
- }
- // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**?
- std::vector<Object*> pointer_buf(4 * kBitsPerWord);
- Object** pb = &pointer_buf[0];
- size_t start = HB_OFFSET_TO_INDEX(sweep_begin - live_bitmap.heap_begin_);
- size_t end = HB_OFFSET_TO_INDEX(sweep_end - live_bitmap.heap_begin_);
- word* live = live_bitmap.bitmap_begin_;
- word* mark = mark_bitmap.bitmap_begin_;
- for (size_t i = start; i <= end; i++) {
- word garbage = live[i] & ~mark[i];
- if (UNLIKELY(garbage != 0)) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + live_bitmap.heap_begin_;
- while (garbage != 0) {
- int shift = CLZ(garbage);
- garbage &= ~(high_bit >> shift);
- *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- }
- // Make sure that there are always enough slots available for an
- // entire word of one bits.
- if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) {
- (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
- pb = &pointer_buf[0];
- }
- }
- }
- if (pb > &pointer_buf[0]) {
- (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
- }
-}
-
-} // namespace art
-
-// Support needed for in order traversal
-#include "object.h"
-#include "object_utils.h"
-
-namespace art {
-
-static void WalkFieldsInOrder(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj,
- void* arg);
-
-// Walk instance fields of the given Class. Separate function to allow recursion on the super
-// class.
-static void WalkInstanceFields(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj,
- Class* klass, void* arg) {
- // Visit fields of parent classes first.
- Class* super = klass->GetSuperClass();
- if (super != NULL) {
- WalkInstanceFields(visited, callback, obj, super, arg);
- }
- // Walk instance fields
- ObjectArray<Field>* fields = klass->GetIFields();
- if (fields != NULL) {
- for (int32_t i = 0; i < fields->GetLength(); i++) {
- Field* field = fields->Get(i);
- FieldHelper fh(field);
- if (!fh.GetType()->IsPrimitive()) {
- Object* value = field->GetObj(obj);
- if (value != NULL) {
- WalkFieldsInOrder(visited, callback, value, arg);
- }
- }
- }
- }
-}
-
-// For an unvisited object, visit it then all its children found via fields.
-static void WalkFieldsInOrder(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj,
- void* arg) {
- if (visited->Test(obj)) {
- return;
- }
- // visit the object itself
- (*callback)(obj, arg);
- visited->Set(obj);
- // Walk instance fields of all objects
- Class* klass = obj->GetClass();
- WalkInstanceFields(visited, callback, obj, klass, arg);
- // Walk static fields of a Class
- if (obj->IsClass()) {
- ObjectArray<Field>* fields = klass->GetSFields();
- if (fields != NULL) {
- for (int32_t i = 0; i < fields->GetLength(); i++) {
- Field* field = fields->Get(i);
- FieldHelper fh(field);
- if (!fh.GetType()->IsPrimitive()) {
- Object* value = field->GetObj(NULL);
- if (value != NULL) {
- WalkFieldsInOrder(visited, callback, value, arg);
- }
- }
- }
- }
- } else if (obj->IsObjectArray()) {
- // Walk elements of an object array
- ObjectArray<Object>* obj_array = obj->AsObjectArray<Object>();
- int32_t length = obj_array->GetLength();
- for (int32_t i = 0; i < length; i++) {
- Object* value = obj_array->Get(i);
- if (value != NULL) {
- WalkFieldsInOrder(visited, callback, value, arg);
- }
- }
- }
-}
-
-// Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
-// bits or max during the traversal.
-void HeapBitmap::InOrderWalk(HeapBitmap::Callback* callback, void* arg) {
- UniquePtr<HeapBitmap> visited(Create("bitmap for in-order walk",
- reinterpret_cast<byte*>(heap_begin_),
- HB_INDEX_TO_OFFSET(bitmap_size_ / kWordSize)));
- CHECK(bitmap_begin_ != NULL);
- CHECK(callback != NULL);
- uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
- for (uintptr_t i = 0; i <= end; ++i) {
- word w = bitmap_begin_[i];
- if (UNLIKELY(w != 0)) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
- while (w != 0) {
- const int shift = CLZ(w);
- Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- WalkFieldsInOrder(visited.get(), callback, obj, arg);
- w &= ~(high_bit >> shift);
- }
- }
- }
-}
-
-} // namespace art
diff --git a/src/heap_bitmap.h b/src/heap_bitmap.h
index 34c11b9..e2109d6 100644
--- a/src/heap_bitmap.h
+++ b/src/heap_bitmap.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008 The Android Open Source Project
+ * Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,175 +17,65 @@
#ifndef ART_SRC_HEAP_BITMAP_H_
#define ART_SRC_HEAP_BITMAP_H_
-#include <limits.h>
-#include <stdint.h>
-
-#include "UniquePtr.h"
-#include "globals.h"
-#include "logging.h"
-#include "mem_map.h"
-#include "utils.h"
+#include "space_bitmap.h"
namespace art {
+ class Heap;
+ class SpaceBitmap;
-class Object;
-
-// <offset> is the difference from .base to a pointer address.
-// <index> is the index of .bits that contains the bit representing
-// <offset>.
-#define HB_OFFSET_TO_INDEX(offset_) \
- ((offset_) / kAlignment / kBitsPerWord)
-#define HB_INDEX_TO_OFFSET(index_) \
- ((index_) * kAlignment * kBitsPerWord)
-
-#define HB_OFFSET_TO_BYTE_INDEX(offset_) \
- (HB_OFFSET_TO_INDEX(offset_) * sizeof(*(reinterpret_cast<HeapBitmap*>(0))->words_))
-
-// Pack the bits in backwards so they come out in address order
-// when using CLZ.
-#define HB_OFFSET_TO_MASK(offset_) \
- (1 << (31-(((uintptr_t)(offset_) / kAlignment) % kBitsPerWord)))
-
-class HeapBitmap {
- public:
- static const size_t kAlignment = 8;
-
- typedef void Callback(Object* obj, void* arg);
-
- typedef void ScanCallback(Object* obj, void* finger, void* arg);
-
- typedef void SweepCallback(size_t ptr_count, Object** ptrs, void* arg);
-
- // Initialize a HeapBitmap so that it points to a bitmap large enough to cover a heap at
- // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
- static HeapBitmap* Create(const char* name, byte* heap_begin, size_t heap_capacity);
-
- ~HeapBitmap();
-
- inline void Set(const Object* obj) {
- Modify(obj, true);
- }
-
- inline void Clear(const Object* obj) {
- Modify(obj, false);
- }
-
- void Clear();
-
- inline bool Test(const Object* obj) {
- uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
- DCHECK(HasAddress(obj)) << obj;
- DCHECK(bitmap_begin_ != NULL);
- DCHECK_GE(addr, heap_begin_);
- if (addr <= heap_end_) {
- const uintptr_t offset = addr - heap_begin_;
- return (bitmap_begin_[HB_OFFSET_TO_INDEX(offset)] & HB_OFFSET_TO_MASK(offset)) != 0;
- } else {
- return false;
- }
- }
-
- bool HasAddress(const void* addr) const;
-
- void VisitRange(uintptr_t base, uintptr_t max, Callback* visitor, void* arg) const;
-
- class ClearVisitor {
+ class HeapBitmap {
public:
- explicit ClearVisitor(HeapBitmap* const bitmap)
- : bitmap_(bitmap) {
+ bool Test(const Object* obj) {
+ SpaceBitmap* bitmap = GetSpaceBitmap(obj);
+ DCHECK(bitmap != NULL);
+ return bitmap->Test(obj);
}
- void operator ()(Object* obj) const {
- bitmap_->Clear(obj);
+ void Clear(const Object* obj) {
+ SpaceBitmap* bitmap = GetSpaceBitmap(obj);
+ DCHECK(bitmap != NULL)
+ << "tried to clear object "
+ << reinterpret_cast<const void*>(obj)
+ << " which did not belong to any bitmaps";
+ return bitmap->Clear(obj);
}
+
+ void Set(const Object* obj) {
+ SpaceBitmap* bitmap = GetSpaceBitmap(obj);
+ DCHECK(bitmap != NULL)
+ << "tried to mark object "
+ << reinterpret_cast<const void*>(obj)
+ << " which did not belong to any bitmaps";
+ bitmap->Set(obj);
+ }
+
+ SpaceBitmap* GetSpaceBitmap(const Object* obj) {
+ // TODO: C++0x auto
+ for (BitmapVec::iterator cur = bitmaps_.begin(); cur != bitmaps_.end(); ++cur) {
+ if ((*cur)->HasAddress(obj)) {
+ return *cur;
+ }
+ }
+ return NULL;
+ }
+
+ void Walk(SpaceBitmap::Callback* callback, void* arg) {
+ // TODO: C++0x auto
+ for (BitmapVec::iterator cur = bitmaps_.begin(); cur != bitmaps_.end(); ++cur) {
+ (*cur)->Walk(callback, arg);
+ }
+ }
+
private:
- HeapBitmap* const bitmap_;
+ void AddSpaceBitmap(SpaceBitmap* space) {
+ bitmaps_.push_back(space);
+ }
+
+ typedef std::vector<SpaceBitmap*> BitmapVec;
+ BitmapVec bitmaps_;
+
+ friend class Heap;
};
-
- template <typename Visitor>
- void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
- for (; visit_begin < visit_end; visit_begin += kAlignment ) {
- visitor(reinterpret_cast<Object*>(visit_begin));
- }
- }
-
- template <typename Visitor>
- void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
- size_t start = HB_OFFSET_TO_INDEX(visit_begin - heap_begin_);
- size_t end = HB_OFFSET_TO_INDEX(visit_end - heap_begin_ - 1);
- for (size_t i = start; i <= end; i++) {
- word w = bitmap_begin_[i];
- if (w != 0) {
- word high_bit = 1 << (kBitsPerWord - 1);
- uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
- do {
- const int shift = CLZ(w);
- Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
- visitor(obj);
- w &= ~(high_bit >> shift);
- } while (w != 0);
- }
- }
- }
-
- void Walk(Callback* callback, void* arg);
-
- void InOrderWalk(HeapBitmap::Callback* callback, void* arg);
-
- void ScanWalk(uintptr_t base, uintptr_t max, ScanCallback* thunk, void* arg);
-
- static void SweepWalk(const HeapBitmap& live,
- const HeapBitmap& mark,
- uintptr_t base, uintptr_t max,
- SweepCallback* thunk, void* arg);
-
- private:
- // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
- // however, we document that this is expected on heap_end_
- HeapBitmap(const char* name, MemMap* mem_map, word* bitmap_begin, size_t bitmap_size, const void* heap_begin)
- : mem_map_(mem_map), bitmap_begin_(bitmap_begin), bitmap_size_(bitmap_size),
- heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)), heap_end_(heap_begin_ - 1),
- name_(name) {}
-
- inline void Modify(const Object* obj, bool do_set) {
- uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
- DCHECK_GE(addr, heap_begin_);
- const uintptr_t offset = addr - heap_begin_;
- const size_t index = HB_OFFSET_TO_INDEX(offset);
- const word mask = HB_OFFSET_TO_MASK(offset);
- DCHECK_LT(index, bitmap_size_ / kWordSize);
- if (do_set) {
- if (addr > heap_end_) {
- heap_end_ = addr;
- }
- bitmap_begin_[index] |= mask;
- } else {
- bitmap_begin_[index] &= ~mask;
- }
- }
-
- // Backing storage for bitmap.
- UniquePtr<MemMap> mem_map_;
-
- // This bitmap itself, word sized for efficiency in scanning.
- word* const bitmap_begin_;
-
- // Size of this bitmap.
- const size_t bitmap_size_;
-
- // The base address of the heap, which corresponds to the word containing the first bit in the
- // bitmap.
- const uintptr_t heap_begin_;
-
- // The highest pointer value ever returned by an allocation from
- // this heap. I.e., the highest address that may correspond to a
- // set bit. If there are no bits set, (heap_end_ < heap_begin_).
- uintptr_t heap_end_;
-
- // Name of this bitmap.
- const char* const name_;
-};
-
} // namespace art
#endif // ART_SRC_HEAP_BITMAP_H_
diff --git a/src/heap_test.cc b/src/heap_test.cc
index d5c07da..48aa425 100644
--- a/src/heap_test.cc
+++ b/src/heap_test.cc
@@ -47,9 +47,9 @@
TEST_F(HeapTest, HeapBitmapCapacityTest) {
byte* heap_begin = reinterpret_cast<byte*>(0x1000);
- const size_t heap_capacity = HeapBitmap::kAlignment * (sizeof(intptr_t) * 8 + 1);
- UniquePtr<HeapBitmap> bitmap(HeapBitmap::Create("test-bitmap", heap_begin, heap_capacity));
- bitmap->Set(reinterpret_cast<const Object*>(&heap_begin[heap_capacity - HeapBitmap::kAlignment]));
+ const size_t heap_capacity = SpaceBitmap::kAlignment * (sizeof(intptr_t) * 8 + 1);
+ UniquePtr<SpaceBitmap> bitmap(SpaceBitmap::Create("test-bitmap", heap_begin, heap_capacity));
+ bitmap->Set(reinterpret_cast<const Object*>(&heap_begin[heap_capacity - SpaceBitmap::kAlignment]));
}
} // namespace art
diff --git a/src/hprof/hprof.cc b/src/hprof/hprof.cc
index 3e976ad..d806d71 100644
--- a/src/hprof/hprof.cc
+++ b/src/hprof/hprof.cc
@@ -48,6 +48,7 @@
#include "os.h"
#include "safe_map.h"
#include "scoped_heap_lock.h"
+#include "space.h"
#include "stringprintf.h"
#include "thread_list.h"
@@ -404,7 +405,7 @@
// Walk the roots and the heap.
current_record_.StartNewRecord(body_fp_, HPROF_TAG_HEAP_DUMP_SEGMENT, HPROF_TIME);
Runtime::Current()->VisitRoots(RootVisitor, this);
- Runtime::Current()->GetHeap()->GetLiveBits()->Walk(HeapBitmapCallback, this);
+ Runtime::Current()->GetHeap()->GetLiveBitmap()->Walk(HeapBitmapCallback, this);
current_record_.StartNewRecord(body_fp_, HPROF_TAG_HEAP_DUMP_END, HPROF_TIME);
current_record_.Flush();
fflush(body_fp_);
diff --git a/src/image_test.cc b/src/image_test.cc
index 5662239..f9c2d1c 100644
--- a/src/image_test.cc
+++ b/src/image_test.cc
@@ -65,7 +65,7 @@
Space* space = heap->GetSpaces()[0];
ASSERT_FALSE(space->IsImageSpace());
ASSERT_TRUE(space != NULL);
- ASSERT_EQ(space, heap->GetAllocSpace());
+ ASSERT_TRUE(space->IsAllocSpace());
ASSERT_GE(sizeof(image_header) + space->Size(), static_cast<size_t>(file->Length()));
}
@@ -93,8 +93,6 @@
ASSERT_FALSE(heap->GetSpaces()[0]->IsAllocSpace());
ASSERT_FALSE(heap->GetSpaces()[1]->IsImageSpace());
ASSERT_TRUE(heap->GetSpaces()[1]->IsAllocSpace());
- ASSERT_TRUE(heap->GetImageSpace() != NULL);
- ASSERT_TRUE(heap->GetAllocSpace() != NULL);
ImageSpace* image_space = heap->GetImageSpace();
byte* image_begin = image_space->Begin();
diff --git a/src/image_writer.cc b/src/image_writer.cc
index 589d3d7..59b7e80 100644
--- a/src/image_writer.cc
+++ b/src/image_writer.cc
@@ -52,7 +52,7 @@
image_begin_ = reinterpret_cast<byte*>(image_begin);
Heap* heap = Runtime::Current()->GetHeap();
- source_space_ = heap->GetAllocSpace();
+ const Spaces& spaces = heap->GetSpaces();
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
const std::vector<DexCache*>& all_dex_caches = class_linker->GetDexCaches();
@@ -75,7 +75,14 @@
ComputeLazyFieldsForImageClasses(); // Add useful information
ComputeEagerResolvedStrings();
heap->CollectGarbage(false); // Remove garbage
- heap->GetAllocSpace()->Trim(); // Trim size of source_space
+ // Trim size of alloc spaces
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ (*cur)->AsAllocSpace()->Trim();
+ }
+ }
+
if (!AllocMemory()) {
return false;
}
@@ -104,8 +111,27 @@
return true;
}
+bool ImageWriter::InSourceSpace(const Object* object) const {
+ const Spaces& spaces = Runtime::Current()->GetHeap()->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace() && (*cur)->Contains(object)) {
+ return true;
+ }
+ }
+ return false;
+}
+
bool ImageWriter::AllocMemory() {
- size_t size = source_space_->Size();
+ typedef std::vector<Space*> SpaceVec;
+ const SpaceVec& spaces = Runtime::Current()->GetHeap()->GetSpaces();
+ size_t size = 0;
+ for (SpaceVec::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ size += (*cur)->Size();
+ }
+ }
+
int prot = PROT_READ | PROT_WRITE;
size_t length = RoundUp(size, kPageSize);
image_.reset(MemMap::MapAnonymous("image-writer-image", NULL, length, prot));
@@ -151,9 +177,8 @@
}
void ImageWriter::ComputeEagerResolvedStrings() {
- HeapBitmap* heap_bitmap = Runtime::Current()->GetHeap()->GetLiveBits();
- DCHECK(heap_bitmap != NULL);
- heap_bitmap->Walk(ComputeEagerResolvedStringsCallback, this); // TODO: add Space-limited Walk
+ // TODO: Check image spaces only?
+ Runtime::Current()->GetHeap()->GetLiveBitmap()->Walk(ComputeEagerResolvedStringsCallback, this);
}
bool ImageWriter::IsImageClass(const Class* klass) {
@@ -232,7 +257,8 @@
if (image_classes_ == NULL) {
return;
}
- Runtime::Current()->GetHeap()->GetLiveBits()->Walk(CheckNonImageClassesRemovedCallback, this);
+
+ Runtime::Current()->GetHeap()->GetLiveBitmap()->Walk(CheckNonImageClassesRemovedCallback, this);
}
void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
@@ -332,16 +358,21 @@
void ImageWriter::CalculateNewObjectOffsets() {
SirtRef<ObjectArray<Object> > image_roots(CreateImageRoots());
- HeapBitmap* heap_bitmap = Runtime::Current()->GetHeap()->GetLiveBits();
- DCHECK(heap_bitmap != NULL);
+ Heap* heap = Runtime::Current()->GetHeap();
+ typedef std::vector<Space*> SpaceVec;
+ const SpaceVec& spaces = heap->GetSpaces();
+ DCHECK(!spaces.empty());
DCHECK_EQ(0U, image_end_);
// leave space for the header, but do not write it yet, we need to
// know where image_roots is going to end up
image_end_ += RoundUp(sizeof(ImageHeader), 8); // 64-bit-alignment
- heap_bitmap->InOrderWalk(CalculateNewObjectOffsetsCallback, this); // TODO: add Space-limited Walk
- DCHECK_LT(image_end_, image_->Size());
+ // TODO: Image spaces only?
+ for (SpaceVec::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ (*cur)->GetLiveBitmap()->InOrderWalk(CalculateNewObjectOffsetsCallback, this);
+ DCHECK_LT(image_end_, image_->Size());
+ }
// Note that image_top_ is left at end of used space
oat_begin_ = image_begin_ + RoundUp(image_end_, kPageSize);
@@ -358,11 +389,10 @@
void ImageWriter::CopyAndFixupObjects() {
Heap* heap = Runtime::Current()->GetHeap();
- HeapBitmap* heap_bitmap = heap->GetLiveBits();
- DCHECK(heap_bitmap != NULL);
// TODO: heap validation can't handle this fix up pass
heap->DisableObjectValidation();
- heap_bitmap->Walk(CopyAndFixupObjectsCallback, this); // TODO: add Space-limited Walk
+ // TODO: Image spaces only?
+ heap->GetLiveBitmap()->Walk(CopyAndFixupObjectsCallback, this);
}
void ImageWriter::CopyAndFixupObjectsCallback(Object* object, void* arg) {
diff --git a/src/image_writer.h b/src/image_writer.h
index 1dc5b66..07d55dc 100644
--- a/src/image_writer.h
+++ b/src/image_writer.h
@@ -39,8 +39,7 @@
class ImageWriter {
public:
explicit ImageWriter(const std::set<std::string>* image_classes)
- : source_space_(NULL), image_end_(0), image_begin_(NULL), image_classes_(image_classes),
- oat_begin_(NULL) {}
+ : image_end_(0), image_begin_(NULL), image_classes_(image_classes), oat_begin_(NULL) {}
~ImageWriter() {}
@@ -79,10 +78,7 @@
return offsets_.find(object)->second;
}
- bool InSourceSpace(const Object* object) const {
- DCHECK(source_space_ != NULL);
- return source_space_->Contains(object);
- }
+ bool InSourceSpace(const Object* object) const;
Object* GetImageAddress(const Object* object) const {
if (object == NULL) {
@@ -147,9 +143,6 @@
// oat file with code for this image
OatFile* oat_file_;
- // Space we are writing objects from
- const Space* source_space_;
-
// memory mapped for generating the image
UniquePtr<MemMap> image_;
diff --git a/src/mark_sweep.cc b/src/mark_sweep.cc
index 63801a1..1a7bc83 100644
--- a/src/mark_sweep.cc
+++ b/src/mark_sweep.cc
@@ -37,10 +37,9 @@
namespace art {
MarkSweep::MarkSweep(MarkStack* mark_stack)
- : mark_stack_(mark_stack),
+ : current_mark_bitmap_(NULL),
+ mark_stack_(mark_stack),
heap_(NULL),
- mark_bitmap_(NULL),
- live_bitmap_(NULL),
finger_(NULL),
condemned_(NULL),
soft_reference_list_(NULL),
@@ -54,25 +53,40 @@
void MarkSweep::Init() {
heap_ = Runtime::Current()->GetHeap();
- mark_bitmap_ = heap_->GetMarkBits();
- live_bitmap_ = heap_->GetLiveBits();
mark_stack_->Reset();
+ const Spaces& spaces = heap_->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ current_mark_bitmap_ = (*cur)->GetMarkBitmap();
+ break;
+ }
+ }
// TODO: if concurrent, enable card marking in compiler
-
// TODO: check that the mark bitmap is entirely clear.
}
void MarkSweep::MarkObject0(const Object* obj, bool check_finger) {
DCHECK(obj != NULL);
+
+ SpaceBitmap* space_bitmap = NULL;
+ // Try to take advantage of locality of references within a space, failing this find the space
+ // the hard way.
+ if (current_mark_bitmap_->HasAddress(obj)) {
+ space_bitmap = current_mark_bitmap_;
+ } else {
+ space_bitmap = heap_->GetMarkBitmap()->GetSpaceBitmap(obj);
+ }
+
if (obj < condemned_) {
DCHECK(IsMarked(obj));
return;
}
- bool is_marked = mark_bitmap_->Test(obj);
+ bool is_marked = space_bitmap->Test(obj);
// This object was not previously marked.
if (!is_marked) {
- mark_bitmap_->Set(obj);
+ space_bitmap->Set(obj);
if (check_finger && obj < finger_) {
// The object must be pushed on to the mark stack.
mark_stack_->Push(obj);
@@ -115,9 +129,7 @@
DCHECK(root != NULL);
DCHECK(arg != NULL);
MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
- // Sanity tests
- DCHECK(mark_sweep->live_bitmap_->Test(root));
- mark_sweep->MarkObject0(root, false);
+ // We do not need to mark since live == marked for image spaces.
mark_sweep->ScanObject(root);
}
@@ -146,7 +158,7 @@
DCHECK(root != NULL);
DCHECK(arg != NULL);
MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
- DCHECK(mark_sweep->IsMarked(root));
+ DCHECK(mark_sweep->heap_->GetMarkBitmap()->Test(root));
mark_sweep->CheckObject(root);
}
@@ -158,7 +170,7 @@
if (spaces[i]->IsImageSpace()) {
byte* begin = spaces[i]->Begin();
byte* end = spaces[i]->End();
- card_table->Scan(heap_->GetLiveBits(), begin, end, ScanImageRootVisitor, this);
+ card_table->Scan(spaces[i]->GetLiveBitmap(), begin, end, ScanImageRootVisitor, this);
}
}
}
@@ -191,16 +203,8 @@
for (size_t i = 0; i < spaces.size(); ++i) {
byte* begin = spaces[i]->Begin();
byte* end = spaces[i]->End();
- // Normally, we only need to scan the black dirty objects
- // But for image spaces, the roots will not be black objects.
- // To address this we just scan the live bits instead of the mark bits.
- if (spaces[i]->IsImageSpace()) {
- // Image roots may not be marked so we may need to mark them.
- // TODO: optimize this by offsetting some of the work to init.
- card_table->Scan(heap_->GetLiveBits(), begin, end, ScanImageRootVisitor, this);
- } else {
- card_table->Scan(heap_->GetMarkBits(), begin, end, ScanDirtyCardCallback, this);
- }
+ // Image spaces are handled properly since live == marked for them.
+ card_table->Scan(spaces[i]->GetMarkBitmap(), begin, end, ScanImageRootVisitor, this);
}
}
@@ -215,7 +219,9 @@
if (spaces[i]->IsImageSpace()) {
uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin());
uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End());
- live_bitmap_->ScanWalk(begin, end, &MarkSweep::CheckBitmapCallback, arg);
+ SpaceBitmap* live_bitmap = spaces[i]->GetLiveBitmap();
+ DCHECK(live_bitmap != NULL);
+ live_bitmap->ScanWalk(begin, end, CheckBitmapCallback, arg);
}
}
finger_ = reinterpret_cast<Object*>(~0);
@@ -236,10 +242,11 @@
void* arg = reinterpret_cast<void*>(this);
const std::vector<Space*>& spaces = heap_->GetSpaces();
for (size_t i = 0; i < spaces.size(); ++i) {
- if (!spaces[i]->IsImageSpace()) {
+ if (spaces[i]->IsAllocSpace()) {
uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin());
uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End());
- mark_bitmap_->ScanWalk(begin, end, &MarkSweep::ScanBitmapCallback, arg);
+ current_mark_bitmap_ = spaces[i]->GetMarkBitmap();
+ current_mark_bitmap_->ScanWalk(begin, end, &ScanBitmapCallback, arg);
}
}
finger_ = reinterpret_cast<Object*>(~0);
@@ -263,7 +270,7 @@
typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto
for (It it = table->begin(), end = table->end(); it != end; ++it) {
const Object** entry = *it;
- if (!IsMarked(*entry)) {
+ if (!heap_->GetMarkBitmap()->Test(*entry)) {
*entry = kClearedJniWeakGlobal;
}
}
@@ -296,7 +303,7 @@
for (size_t i = 0; i < num_ptrs; ++i) {
Object* obj = static_cast<Object*>(ptrs[i]);
freed_bytes += space->AllocationSize(obj);
- heap->GetLiveBits()->Clear(obj);
+ heap->GetLiveBitmap()->Clear(obj);
}
// AllocSpace::FreeList clears the value in ptrs, so perform after clearing the live bit
space->FreeList(num_ptrs, ptrs);
@@ -304,7 +311,7 @@
for (size_t i = 0; i < num_ptrs; ++i) {
Object* obj = static_cast<Object*>(ptrs[i]);
freed_bytes += space->AllocationSize(obj);
- heap->GetLiveBits()->Clear(obj);
+ heap->GetLiveBitmap()->Clear(obj);
space->Free(obj);
}
}
@@ -325,7 +332,9 @@
uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin());
uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End());
scc.space = spaces[i]->AsAllocSpace();
- HeapBitmap::SweepWalk(*live_bitmap_, *mark_bitmap_, begin, end,
+ SpaceBitmap* live_bitmap = scc.space->GetLiveBitmap();
+ SpaceBitmap* mark_bitmap = scc.space->GetMarkBitmap();
+ SpaceBitmap::SweepWalk(*live_bitmap, *mark_bitmap, begin, end,
&MarkSweep::SweepCallback, reinterpret_cast<void*>(&scc));
}
}
@@ -379,43 +388,46 @@
}
void MarkSweep::CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) {
- AllocSpace* alloc_space = heap_->GetAllocSpace();
- if (alloc_space->Contains(ref)) {
- DCHECK(IsMarked(obj));
+ const Spaces& spaces = heap_->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace() && (*cur)->Contains(ref)) {
+ DCHECK(IsMarked(obj));
- bool is_marked = mark_bitmap_->Test(ref);
+ bool is_marked = IsMarked(ref);
+ if (!is_marked) {
+ LOG(INFO) << **cur;
+ LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref)
+ << "' (" << reinterpret_cast<const void*>(ref) << ") in '" << PrettyTypeOf(obj)
+ << "' (" << reinterpret_cast<const void*>(obj) << ") at offset "
+ << reinterpret_cast<void*>(offset.Int32Value()) << " wasn't marked";
- if (!is_marked) {
- LOG(INFO) << *alloc_space;
- LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref)
- << "' (" << reinterpret_cast<const void*>(ref) << ") in '" << PrettyTypeOf(obj)
- << "' (" << reinterpret_cast<const void*>(obj) << ") at offset "
- << reinterpret_cast<void*>(offset.Int32Value()) << " wasn't marked";
+ const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
+ DCHECK(klass != NULL);
+ const ObjectArray<Field>* fields = is_static ? klass->GetSFields() : klass->GetIFields();
+ DCHECK(fields != NULL);
+ bool found = false;
+ for (int32_t i = 0; i < fields->GetLength(); ++i) {
+ const Field* cur = fields->Get(i);
+ if (cur->GetOffset().Int32Value() == offset.Int32Value()) {
+ LOG(WARNING) << "Field referencing the alloc space was " << PrettyField(cur);
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ LOG(WARNING) << "Could not find field in object alloc space with offset " << offset.Int32Value();
+ }
- const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
- DCHECK(klass != NULL);
- const ObjectArray<Field>* fields = is_static ? klass->GetSFields() : klass->GetIFields();
- DCHECK(fields != NULL);
- bool found = false;
- for (int32_t i = 0; i < fields->GetLength(); ++i) {
- const Field* cur = fields->Get(i);
- if (cur->GetOffset().Int32Value() == offset.Int32Value()) {
- LOG(WARNING) << "Field referencing the alloc space was " << PrettyField(cur);
- found = true;
- break;
+ bool obj_marked = heap_->GetCardTable()->IsDirty(obj);
+ if (!obj_marked) {
+ LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' "
+ << "(" << reinterpret_cast<const void*>(obj) << ") contains references to "
+ << "the alloc space, but wasn't card marked";
}
}
- if (!found) {
- LOG(WARNING) << "Could not find field in object alloc space with offset " << offset.Int32Value();
- }
-
- bool obj_marked = heap_->GetCardTable()->IsDirty(obj);
- if (!obj_marked) {
- LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' "
- << "(" << reinterpret_cast<const void*>(obj) << ") contains references to "
- << "the alloc space, but wasn't card marked";
- }
}
+ break;
}
}
@@ -489,7 +501,7 @@
inline void MarkSweep::ScanObject(const Object* obj) {
DCHECK(obj != NULL);
DCHECK(obj->GetClass() != NULL);
- DCHECK(IsMarked(obj));
+ DCHECK(heap_->GetMarkBitmap()->Test(obj));
if (obj->IsClass()) {
ScanClass(obj);
} else if (obj->IsArrayInstance()) {
@@ -501,12 +513,9 @@
// Scan anything that's on the mark stack.
void MarkSweep::ProcessMarkStack() {
- Space* alloc_space = heap_->GetAllocSpace();
while (!mark_stack_->IsEmpty()) {
const Object* obj = mark_stack_->Pop();
- if (alloc_space->Contains(obj)) {
- ScanObject(obj);
- }
+ ScanObject(obj);
}
}
@@ -634,7 +643,14 @@
#ifndef NDEBUG
VLOG(heap) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_ << " other=" << other_count_;
#endif
- mark_bitmap_->Clear();
+ // Clear all of the alloc spaces' mark bitmaps.
+ const Spaces& spaces = heap_->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ (*cur)->GetMarkBitmap()->Clear();
+ }
+ }
mark_stack_->Reset();
}
diff --git a/src/mark_sweep.h b/src/mark_sweep.h
index 00fdcfb..28a5523 100644
--- a/src/mark_sweep.h
+++ b/src/mark_sweep.h
@@ -83,7 +83,10 @@
private:
// Returns true if the object has its bit set in the mark bitmap.
bool IsMarked(const Object* object) const {
- return mark_bitmap_->Test(object);
+ if (current_mark_bitmap_->HasAddress(object)) {
+ return current_mark_bitmap_->Test(object);
+ }
+ return heap_->GetMarkBitmap()->Test(object);
}
static bool IsMarked(const Object* object, void* arg) {
@@ -246,11 +249,12 @@
void SweepSystemWeaks();
void SweepJniWeakGlobals();
+ // Current space, we check this space first to avoid searching for the appropriate space for an object.
+ SpaceBitmap* current_mark_bitmap_;
+
MarkStack* mark_stack_;
Heap* heap_;
- HeapBitmap* mark_bitmap_;
- HeapBitmap* live_bitmap_;
Object* finger_;
diff --git a/src/mod_union_table.cc b/src/mod_union_table.cc
index c73dd6b..58b6a18 100644
--- a/src/mod_union_table.cc
+++ b/src/mod_union_table.cc
@@ -26,29 +26,34 @@
class MarkIfReachesAllocspaceVisitor {
public:
- explicit MarkIfReachesAllocspaceVisitor(MarkSweep* const mark_sweep, HeapBitmap* bitmap)
+ explicit MarkIfReachesAllocspaceVisitor(MarkSweep* const mark_sweep, SpaceBitmap* bitmap)
: mark_sweep_(mark_sweep),
bitmap_(bitmap) {
}
// Extra parameters are required since we use this same visitor signature for checking objects.
void operator ()(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) const {
- if (mark_sweep_->heap_->GetAllocSpace()->Contains(ref)) {
- // This can mark the same object multiple times, but is unlikely to be a performance problem.
- bitmap_->Set(obj);
+ // TODO: Optimize?
+ // TODO: C++0x auto
+ const Spaces& spaces = mark_sweep_->heap_->GetSpaces();
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace() && (*cur)->Contains(ref)) {
+ bitmap_->Set(obj);
+ }
}
+
// Avoid warnings.
(void)obj;(void)offset;(void)is_static;
}
private:
MarkSweep* const mark_sweep_;
- HeapBitmap* bitmap_;
+ SpaceBitmap* bitmap_;
};
class ModUnionVisitor {
public:
- explicit ModUnionVisitor(MarkSweep* const mark_sweep, HeapBitmap* bitmap)
+ explicit ModUnionVisitor(MarkSweep* const mark_sweep, SpaceBitmap* bitmap)
: mark_sweep_(mark_sweep),
bitmap_(bitmap) {
}
@@ -62,7 +67,7 @@
}
private:
MarkSweep* const mark_sweep_;
- HeapBitmap* bitmap_;
+ SpaceBitmap* bitmap_;
};
class ModUnionClearCardVisitor {
@@ -96,7 +101,7 @@
if (spaces[i]->IsImageSpace()) {
// Allocate the mod-union table
// The mod-union table is only needed when we have an image space since it's purpose is to cache image roots.
- UniquePtr<HeapBitmap> bitmap(HeapBitmap::Create("mod-union table bitmap", spaces[i]->Begin(), spaces[i]->Capacity()));
+ UniquePtr<SpaceBitmap> bitmap(SpaceBitmap::Create("mod-union table bitmap", spaces[i]->Begin(), spaces[i]->Capacity()));
if (bitmap.get() == NULL) {
LOG(FATAL) << "Failed to create mod-union bitmap";
}
@@ -124,9 +129,11 @@
cleared_cards_.pop_back();
// Find out which bitmap the card maps to.
- HeapBitmap* bitmap = 0;
+ SpaceBitmap* bitmap = 0;
+ const Space* space = 0;
for (BitmapMap::iterator cur = bitmaps_.begin(); cur != bitmaps_.end(); ++cur) {
- if (cur->first->Contains(reinterpret_cast<Object*>(card_table->AddrFromCard(card)))) {
+ space = cur->first;
+ if (space->Contains(reinterpret_cast<Object*>(card_table->AddrFromCard(card)))) {
bitmap = cur->second;
break;
}
@@ -138,12 +145,12 @@
// Clear the mod-union bitmap range corresponding to this card so that we
// don't have any objects marked which do not reach the alloc space.
- bitmap->VisitRange(start, end, HeapBitmap::ClearVisitor(bitmap));
+ bitmap->VisitRange(start, end, SpaceBitmap::ClearVisitor(bitmap));
// At this point we need to update the mod-union bitmap to contain all the
// objects which reach the alloc space.
ModUnionVisitor visitor(mark_sweep, bitmap);
- heap_->GetLiveBits()->VisitMarkedRange(start, end, visitor);
+ space->GetLiveBitmap()->VisitMarkedRange(start, end, visitor);
}
}
diff --git a/src/mod_union_table.h b/src/mod_union_table.h
index 2f4ca5f..2d75a01 100644
--- a/src/mod_union_table.h
+++ b/src/mod_union_table.h
@@ -63,7 +63,7 @@
// One bitmap per image space.
// TODO: Add support for zygote spaces?
- typedef SafeMap<const Space*, HeapBitmap*> BitmapMap;
+ typedef SafeMap<Space*, SpaceBitmap*> BitmapMap;
BitmapMap bitmaps_;
Heap* heap_;
diff --git a/src/native/dalvik_system_DexFile.cc b/src/native/dalvik_system_DexFile.cc
index ef38f00..3e749e5 100644
--- a/src/native/dalvik_system_DexFile.cc
+++ b/src/native/dalvik_system_DexFile.cc
@@ -224,12 +224,20 @@
return JNI_TRUE;
}
- const ImageHeader& image_header = runtime->GetHeap()->GetImageSpace()->GetImageHeader();
- if (oat_file->GetOatHeader().GetImageFileLocationChecksum() != image_header.GetOatChecksum()) {
- LOG(INFO) << "DexFile_isDexOptNeeded cache file " << cache_location
- << " has out-of-date checksum compared to "
- << image_header.GetImageRoot(ImageHeader::kOatLocation)->AsString()->ToModifiedUtf8();
- return JNI_TRUE;
+ Heap* heap = runtime->GetHeap();
+ const Spaces& spaces = heap->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsImageSpace()) {
+ // TODO: Ensure this works with multiple image spaces.
+ const ImageHeader& image_header = (*cur)->AsImageSpace()->GetImageHeader();
+ if (oat_file->GetOatHeader().GetImageFileLocationChecksum() != image_header.GetOatChecksum()) {
+ LOG(INFO) << "DexFile_isDexOptNeeded cache file " << cache_location
+ << " has out-of-date checksum compared to "
+ << image_header.GetImageRoot(ImageHeader::kOatLocation)->AsString()->ToModifiedUtf8();
+ return JNI_TRUE;
+ }
+ }
}
uint32_t location_checksum;
diff --git a/src/native/dalvik_system_VMRuntime.cc b/src/native/dalvik_system_VMRuntime.cc
index 417ae5b..4ec1b92 100644
--- a/src/native/dalvik_system_VMRuntime.cc
+++ b/src/native/dalvik_system_VMRuntime.cc
@@ -164,23 +164,27 @@
// Trim the managed heap.
Heap* heap = Runtime::Current()->GetHeap();
- size_t alloc_space_size = heap->GetAllocSpace()->Size();
- float utilization = static_cast<float>(heap->GetBytesAllocated()) / alloc_space_size;
- uint64_t start_ns = NanoTime();
-
- heap->Trim();
-
- // Trim the native heap.
- dlmalloc_trim(0);
+ const Spaces& spaces = heap->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ uint64_t start_ns = NanoTime();
+ AllocSpace* alloc_space = (*cur)->AsAllocSpace();
+ size_t alloc_space_size = alloc_space->Size();
+ float utilization = static_cast<float>(heap->GetBytesAllocated()) / alloc_space_size;
+ heap->Trim(alloc_space);
+ // Trim the native heap.
+ dlmalloc_trim(0);
#if 0 // TODO: switch over to this when bionic has moved to dlmalloc 2.8.5
- dlmalloc_inspect_all(MspaceMadviseCallback, NULL);
+ dlmalloc_inspect_all(MspaceMadviseCallback, NULL);
#else
- dlmalloc_walk_free_pages(MspaceMadviseCallback, NULL);
+ dlmalloc_walk_free_pages(MspaceMadviseCallback, NULL);
#endif
-
- LOG(INFO) << "Parallel heap trimming took " << PrettyDuration(NanoTime() - start_ns)
- << " on a " << PrettySize(alloc_space_size)
- << " heap with " << static_cast<int>(100 * utilization) << "% utilization";
+ LOG(INFO) << "Parallel heap trimming took " << PrettyDuration(NanoTime() - start_ns)
+ << " on a " << PrettySize(alloc_space_size)
+ << " alloc space with " << static_cast<int>(100 * utilization) << "% utilization";
+ }
+ }
}
static void VMRuntime_concurrentGC(JNIEnv*, jobject) {
diff --git a/src/oatdump.cc b/src/oatdump.cc
index 9bae0d9..b1aa47e 100644
--- a/src/oatdump.cc
+++ b/src/oatdump.cc
@@ -552,10 +552,15 @@
oat_dumper_.reset(new OatDumper(host_prefix_, *oat_file));
os_ << "OBJECTS:\n" << std::flush;
- HeapBitmap* heap_bitmap = Runtime::Current()->GetHeap()->GetLiveBits();
- DCHECK(heap_bitmap != NULL);
- heap_bitmap->Walk(ImageDumper::Callback, this);
- os_ << "\n";
+
+ // Loop through all the image spaces and dump their objects.
+ Heap* heap = Runtime::Current()->GetHeap();
+ const Spaces& spaces = heap->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ (*cur)->GetLiveBitmap()->Walk(ImageDumper::Callback, this);
+ os_ << "\n";
+ }
os_ << "STATS:\n" << std::flush;
UniquePtr<File> file(OS::OpenFile(image_filename_.c_str(), false));
diff --git a/src/space.cc b/src/space.cc
index 4600443..569a0c9 100644
--- a/src/space.cc
+++ b/src/space.cc
@@ -22,6 +22,7 @@
#include "image.h"
#include "logging.h"
#include "os.h"
+#include "stl_util.h"
#include "utils.h"
namespace art {
@@ -39,6 +40,26 @@
} \
} while (false)
+size_t AllocSpace::bitmap_index_ = 0;
+
+AllocSpace::AllocSpace(const std::string& name, MemMap* mem_map, void* mspace, byte* end,
+ size_t growth_limit)
+ : Space(name, mem_map, end), mspace_(mspace), growth_limit_(growth_limit) {
+ CHECK(mspace != NULL);
+
+ size_t bitmap_index = bitmap_index_++;
+
+ live_bitmap_.reset(SpaceBitmap::Create(
+ StringPrintf("allocspace-%s-live-bitmap-%d", name.c_str(), static_cast<int>(bitmap_index)),
+ Begin(), Capacity()));
+ DCHECK(live_bitmap_.get() != NULL) << "could not create allocspace live bitmap #" << bitmap_index;
+
+ mark_bitmap_.reset(SpaceBitmap::Create(
+ StringPrintf("allocspace-%s-mark-bitmap-%d", name.c_str(), static_cast<int>(bitmap_index)),
+ Begin(), Capacity()));
+ DCHECK(live_bitmap_.get() != NULL) << "could not create allocspace mark bitmap #" << bitmap_index;
+}
+
AllocSpace* Space::CreateAllocSpace(const std::string& name, size_t initial_size,
size_t growth_limit, size_t capacity,
byte* requested_begin) {
@@ -175,24 +196,25 @@
// Callback from dlmalloc when it needs to increase the footprint
extern "C" void* art_heap_morecore(void* mspace, intptr_t increment) {
Heap* heap = Runtime::Current()->GetHeap();
- AllocSpace* space = heap->GetAllocSpace();
- if (LIKELY(space->GetMspace() == mspace)) {
- return space->MoreCore(increment);
+ if (heap->GetAllocSpace()->GetMspace() == mspace) {
+ return heap->GetAllocSpace()->MoreCore(increment);
} else {
- // Exhaustively search alloc spaces
- const std::vector<Space*>& spaces = heap->GetSpaces();
- for (size_t i = 0; i < spaces.size(); ++i) {
- if (spaces[i]->IsAllocSpace()) {
- AllocSpace* space = spaces[i]->AsAllocSpace();
+ // Exhaustively search alloc spaces.
+ const Spaces& spaces = heap->GetSpaces();
+ // TODO: C++0x auto
+ for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
+ if ((*cur)->IsAllocSpace()) {
+ AllocSpace* space = (*cur)->AsAllocSpace();
if (mspace == space->GetMspace()) {
return space->MoreCore(increment);
}
}
}
- LOG(FATAL) << "Unexpected call to art_heap_morecore. mspace: " << mspace
- << " increment: " << increment;
- return NULL;
}
+
+ LOG(FATAL) << "Unexpected call to art_heap_morecore. mspace: " << mspace
+ << " increment: " << increment;
+ return NULL;
}
void* AllocSpace::MoreCore(intptr_t increment) {
@@ -278,6 +300,17 @@
mspace_set_footprint_limit(mspace_, new_size);
}
+size_t ImageSpace::bitmap_index_ = 0;
+
+ImageSpace::ImageSpace(const std::string& name, MemMap* mem_map)
+ : Space(name, mem_map, mem_map->End()) {
+ const size_t bitmap_index = bitmap_index_++;
+ live_bitmap_.reset(SpaceBitmap::Create(
+ StringPrintf("imagespace-%s-live-bitmap-%d", name.c_str(), static_cast<int>(bitmap_index)),
+ Begin(), Capacity()));
+ DCHECK(live_bitmap_.get() != NULL) << "could not create imagespace live bitmap #" << bitmap_index;
+}
+
ImageSpace* Space::CreateImageSpace(const std::string& image_file_name) {
CHECK(!image_file_name.empty());
@@ -345,7 +378,7 @@
return space;
}
-void ImageSpace::RecordImageAllocations(HeapBitmap* live_bitmap) const {
+void ImageSpace::RecordImageAllocations(SpaceBitmap* live_bitmap) const {
uint64_t start_time = 0;
if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
LOG(INFO) << "ImageSpace::RecordImageAllocations entering";
diff --git a/src/space.h b/src/space.h
index 76d4817..ff16a51 100644
--- a/src/space.h
+++ b/src/space.h
@@ -31,6 +31,7 @@
class AllocSpace;
class ImageSpace;
class Object;
+class SpaceBitmap;
// A space contains memory allocated for managed objects.
class Space {
@@ -97,11 +98,19 @@
virtual bool IsAllocSpace() const = 0;
virtual bool IsImageSpace() const = 0;
+ virtual SpaceBitmap* GetLiveBitmap() const = 0;
+ virtual SpaceBitmap* GetMarkBitmap() const = 0;
+
+ const std::string GetName() const {
+ return name_;
+ }
+
protected:
Space(const std::string& name, MemMap* mem_map, byte* end)
: name_(name), mem_map_(mem_map), begin_(mem_map->Begin()), end_(end) {}
std::string name_;
+
// Underlying storage of the space
UniquePtr<MemMap> mem_map_;
@@ -178,14 +187,23 @@
return false;
}
+ virtual SpaceBitmap* GetLiveBitmap() const {
+ return live_bitmap_.get();
+ }
+
+ virtual SpaceBitmap* GetMarkBitmap() const {
+ return mark_bitmap_.get();
+ }
+
private:
friend class Space;
+ UniquePtr<SpaceBitmap> live_bitmap_;
+ UniquePtr<SpaceBitmap> mark_bitmap_;
+ static size_t bitmap_index_;
+
AllocSpace(const std::string& name, MemMap* mem_map, void* mspace, byte* end,
- size_t growth_limit)
- : Space(name, mem_map, end), mspace_(mspace), growth_limit_(growth_limit) {
- CHECK(mspace != NULL);
- }
+ size_t growth_limit);
bool Init(size_t initial_size, size_t maximum_size, size_t growth_size, byte* requested_base);
@@ -221,7 +239,7 @@
}
// Mark the objects defined in this space in the given live bitmap
- void RecordImageAllocations(HeapBitmap* live_bitmap) const;
+ void RecordImageAllocations(SpaceBitmap* live_bitmap) const;
virtual bool IsAllocSpace() const {
return false;
@@ -231,11 +249,20 @@
return true;
}
+ virtual SpaceBitmap* GetLiveBitmap() const {
+ return live_bitmap_.get();
+ }
+
+ virtual SpaceBitmap* GetMarkBitmap() const {
+ return live_bitmap_.get();
+ }
private:
friend class Space;
- ImageSpace(const std::string& name, MemMap* mem_map)
- : Space(name, mem_map, mem_map->End()) {}
+ UniquePtr<SpaceBitmap> live_bitmap_;
+ static size_t bitmap_index_;
+
+ ImageSpace(const std::string& name, MemMap* mem_map);
DISALLOW_COPY_AND_ASSIGN(ImageSpace);
};
diff --git a/src/space_bitmap.cc b/src/space_bitmap.cc
new file mode 100644
index 0000000..28dee45
--- /dev/null
+++ b/src/space_bitmap.cc
@@ -0,0 +1,311 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "heap_bitmap.h"
+
+#include "logging.h"
+#include "UniquePtr.h"
+#include "utils.h"
+
+namespace art {
+
+SpaceBitmap* SpaceBitmap::Create(const std::string& name, byte* heap_begin, size_t heap_capacity) {
+ CHECK(heap_begin != NULL);
+ // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
+ size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
+ UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), NULL, bitmap_size, PROT_READ | PROT_WRITE));
+ if (mem_map.get() == NULL) {
+ LOG(ERROR) << "Failed to allocate bitmap " << name;
+ return NULL;
+ }
+ word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin());
+ return new SpaceBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin);
+}
+
+// Clean up any resources associated with the bitmap.
+SpaceBitmap::~SpaceBitmap() {}
+
+// Fill the bitmap with zeroes. Returns the bitmap's memory to the
+// system as a side-effect.
+void SpaceBitmap::Clear() {
+ if (bitmap_begin_ != NULL) {
+ // This returns the memory to the system. Successive page faults
+ // will return zeroed memory.
+ int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED);
+ if (result == -1) {
+ PLOG(WARNING) << "madvise failed";
+ }
+ heap_end_ = heap_begin_ - 1;
+ }
+}
+
+// Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
+// even if a bit has not been set for it.
+bool SpaceBitmap::HasAddress(const void* obj) const {
+ // If obj < heap_begin_ then offset underflows to some very large value past the end of the bitmap.
+ const uintptr_t offset = (uintptr_t)obj - heap_begin_;
+ const size_t index = OffsetToIndex(offset);
+ return index < bitmap_size_ / kWordSize;
+}
+
+void SpaceBitmap::VisitRange(uintptr_t visit_begin, uintptr_t visit_end, Callback* visitor, void* arg) const {
+ size_t start = OffsetToIndex(visit_begin - heap_begin_);
+ size_t end = OffsetToIndex(visit_end - heap_begin_ - 1);
+ for (size_t i = start; i <= end; i++) {
+ word w = bitmap_begin_[i];
+ if (w != 0) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
+ while (w != 0) {
+ const int shift = CLZ(w);
+ Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ (*visitor)(obj, arg);
+ w &= ~(high_bit >> shift);
+ }
+ }
+ }
+}
+
+// Visits set bits in address order. The callback is not permitted to
+// change the bitmap bits or max during the traversal.
+void SpaceBitmap::Walk(SpaceBitmap::Callback* callback, void* arg) {
+ CHECK(bitmap_begin_ != NULL);
+ CHECK(callback != NULL);
+ if (heap_end_ < heap_begin_) {
+ return; // Bitmap is empty.
+ }
+ uintptr_t end = OffsetToIndex(heap_end_ - heap_begin_);
+ for (uintptr_t i = 0; i <= end; ++i) {
+ word w = bitmap_begin_[i];
+ if (UNLIKELY(w != 0)) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
+ while (w != 0) {
+ const int shift = CLZ(w);
+ Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ (*callback)(obj, arg);
+ w &= ~(high_bit >> shift);
+ }
+ }
+ }
+}
+
+// Similar to Walk but the callback routine is permitted to change the bitmap bits and end during
+// traversal. Used by the the root marking scan exclusively.
+//
+// The callback is invoked with a finger argument. The finger is a pointer to an address not yet
+// visited by the traversal. If the callback sets a bit for an address at or above the finger, this
+// address will be visited by the traversal. If the callback sets a bit for an address below the
+// finger, this address will not be visited (typiscally such an address would be placed on the
+// marking stack).
+void SpaceBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) {
+ CHECK(bitmap_begin_ != NULL);
+ CHECK(callback != NULL);
+ CHECK_LE(scan_begin, scan_end);
+ CHECK_GE(scan_begin, heap_begin_);
+ size_t start = OffsetToIndex(scan_begin - heap_begin_);
+ if (scan_end < heap_end_) {
+ // The end of the space we're looking at is before the current maximum bitmap PC, scan to that
+ // and don't recompute end on each iteration
+ size_t end = OffsetToIndex(scan_end - heap_begin_ - 1);
+ for (size_t i = start; i <= end; i++) {
+ word w = bitmap_begin_[i];
+ if (UNLIKELY(w != 0)) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
+ void* finger = reinterpret_cast<void*>(IndexToOffset(i + 1) + heap_begin_);
+ while (w != 0) {
+ const int shift = CLZ(w);
+ Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ (*callback)(obj, finger, arg);
+ w &= ~(high_bit >> shift);
+ }
+ }
+ }
+ } else {
+ size_t end = OffsetToIndex(heap_end_ - heap_begin_);
+ for (size_t i = start; i <= end; i++) {
+ word w = bitmap_begin_[i];
+ if (UNLIKELY(w != 0)) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
+ void* finger = reinterpret_cast<void*>(IndexToOffset(i + 1) + heap_begin_);
+ while (w != 0) {
+ const int shift = CLZ(w);
+ Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ (*callback)(obj, finger, arg);
+ w &= ~(high_bit >> shift);
+ }
+ }
+ // update 'end' in case callback modified bitmap
+ end = OffsetToIndex(heap_end_ - heap_begin_);
+ }
+ }
+}
+
+// Walk through the bitmaps in increasing address order, and find the
+// object pointers that correspond to garbage objects. Call
+// <callback> zero or more times with lists of these object pointers.
+//
+// The callback is not permitted to increase the max of either bitmap.
+void SpaceBitmap::SweepWalk(const SpaceBitmap& live_bitmap,
+ const SpaceBitmap& mark_bitmap,
+ uintptr_t sweep_begin, uintptr_t sweep_end,
+ SpaceBitmap::SweepCallback* callback, void* arg) {
+ CHECK(live_bitmap.bitmap_begin_ != NULL);
+ CHECK(mark_bitmap.bitmap_begin_ != NULL);
+ CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
+ CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
+ CHECK(callback != NULL);
+ CHECK_LE(sweep_begin, sweep_end);
+ CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
+ sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_);
+ if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) {
+ // Easy case; both are obviously empty.
+ // TODO: this should never happen
+ return;
+ }
+ // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**?
+ std::vector<Object*> pointer_buf(4 * kBitsPerWord);
+ Object** pb = &pointer_buf[0];
+ size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
+ size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_);
+ word* live = live_bitmap.bitmap_begin_;
+ word* mark = mark_bitmap.bitmap_begin_;
+ for (size_t i = start; i <= end; i++) {
+ word garbage = live[i] & ~mark[i];
+ if (UNLIKELY(garbage != 0)) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
+ while (garbage != 0) {
+ int shift = CLZ(garbage);
+ garbage &= ~(high_bit >> shift);
+ *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ }
+ // Make sure that there are always enough slots available for an
+ // entire word of one bits.
+ if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) {
+ (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
+ pb = &pointer_buf[0];
+ }
+ }
+ }
+ if (pb > &pointer_buf[0]) {
+ (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
+ }
+}
+
+} // namespace art
+
+// Support needed for in order traversal
+#include "object.h"
+#include "object_utils.h"
+
+namespace art {
+
+static void WalkFieldsInOrder(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj,
+ void* arg);
+
+// Walk instance fields of the given Class. Separate function to allow recursion on the super
+// class.
+static void WalkInstanceFields(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj,
+ Class* klass, void* arg) {
+ // Visit fields of parent classes first.
+ Class* super = klass->GetSuperClass();
+ if (super != NULL) {
+ WalkInstanceFields(visited, callback, obj, super, arg);
+ }
+ // Walk instance fields
+ ObjectArray<Field>* fields = klass->GetIFields();
+ if (fields != NULL) {
+ for (int32_t i = 0; i < fields->GetLength(); i++) {
+ Field* field = fields->Get(i);
+ FieldHelper fh(field);
+ if (!fh.GetType()->IsPrimitive()) {
+ Object* value = field->GetObj(obj);
+ if (value != NULL) {
+ WalkFieldsInOrder(visited, callback, value, arg);
+ }
+ }
+ }
+ }
+}
+
+// For an unvisited object, visit it then all its children found via fields.
+static void WalkFieldsInOrder(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj,
+ void* arg) {
+ if (visited->Test(obj)) {
+ return;
+ }
+ // visit the object itself
+ (*callback)(obj, arg);
+ visited->Set(obj);
+ // Walk instance fields of all objects
+ Class* klass = obj->GetClass();
+ WalkInstanceFields(visited, callback, obj, klass, arg);
+ // Walk static fields of a Class
+ if (obj->IsClass()) {
+ ObjectArray<Field>* fields = klass->GetSFields();
+ if (fields != NULL) {
+ for (int32_t i = 0; i < fields->GetLength(); i++) {
+ Field* field = fields->Get(i);
+ FieldHelper fh(field);
+ if (!fh.GetType()->IsPrimitive()) {
+ Object* value = field->GetObj(NULL);
+ if (value != NULL) {
+ WalkFieldsInOrder(visited, callback, value, arg);
+ }
+ }
+ }
+ }
+ } else if (obj->IsObjectArray()) {
+ // Walk elements of an object array
+ ObjectArray<Object>* obj_array = obj->AsObjectArray<Object>();
+ int32_t length = obj_array->GetLength();
+ for (int32_t i = 0; i < length; i++) {
+ Object* value = obj_array->Get(i);
+ if (value != NULL) {
+ WalkFieldsInOrder(visited, callback, value, arg);
+ }
+ }
+ }
+}
+
+// Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
+// bits or max during the traversal.
+void SpaceBitmap::InOrderWalk(SpaceBitmap::Callback* callback, void* arg) {
+ UniquePtr<SpaceBitmap> visited(Create("bitmap for in-order walk",
+ reinterpret_cast<byte*>(heap_begin_),
+ IndexToOffset(bitmap_size_ / kWordSize)));
+ CHECK(bitmap_begin_ != NULL);
+ CHECK(callback != NULL);
+ uintptr_t end = OffsetToIndex(heap_end_ - heap_begin_);
+ for (uintptr_t i = 0; i <= end; ++i) {
+ word w = bitmap_begin_[i];
+ if (UNLIKELY(w != 0)) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
+ while (w != 0) {
+ const int shift = CLZ(w);
+ Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ WalkFieldsInOrder(visited.get(), callback, obj, arg);
+ w &= ~(high_bit >> shift);
+ }
+ }
+ }
+}
+
+} // namespace art
diff --git a/src/space_bitmap.h b/src/space_bitmap.h
new file mode 100644
index 0000000..fa79d5d
--- /dev/null
+++ b/src/space_bitmap.h
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_SRC_SPACE_BITMAP_H_
+#define ART_SRC_SPACE_BITMAP_H_
+
+#include <limits.h>
+#include <stdint.h>
+#include <vector>
+
+#include "UniquePtr.h"
+#include "globals.h"
+#include "logging.h"
+#include "mem_map.h"
+#include "utils.h"
+
+namespace art {
+
+class Object;
+
+class SpaceBitmap {
+ public:
+ static const size_t kAlignment = 8;
+
+ typedef void Callback(Object* obj, void* arg);
+
+ typedef void ScanCallback(Object* obj, void* finger, void* arg);
+
+ typedef void SweepCallback(size_t ptr_count, Object** ptrs, void* arg);
+
+ // Initialize a HeapBitmap so that it points to a bitmap large enough to cover a heap at
+ // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
+ static SpaceBitmap* Create(const std::string& name, byte* heap_begin, size_t heap_capacity);
+
+ ~SpaceBitmap();
+
+ // <offset> is the difference from .base to a pointer address.
+ // <index> is the index of .bits that contains the bit representing
+ // <offset>.
+ static size_t OffsetToIndex(size_t offset) {
+ return offset / kAlignment / kBitsPerWord;
+ }
+
+ static uintptr_t IndexToOffset(size_t index) {
+ return static_cast<uintptr_t>(index * kAlignment * kBitsPerWord);
+ }
+
+ // Pack the bits in backwards so they come out in address order when using CLZ.
+ static word OffsetToMask(uintptr_t offset_) {
+ return 1 << (sizeof(word) * 8 - 1 - (offset_ / kAlignment) % kBitsPerWord);
+ }
+
+ inline void Set(const Object* obj) {
+ Modify(obj, true);
+ }
+
+ inline void Clear(const Object* obj) {
+ Modify(obj, false);
+ }
+
+ void Clear();
+
+ inline bool Test(const Object* obj) const {
+ uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
+ DCHECK(HasAddress(obj)) << obj;
+ DCHECK(bitmap_begin_ != NULL);
+ DCHECK_GE(addr, heap_begin_);
+ if (addr <= heap_end_) {
+ const uintptr_t offset = addr - heap_begin_;
+ return (bitmap_begin_[OffsetToIndex(offset)] & OffsetToMask(offset)) != 0;
+ } else {
+ return false;
+ }
+ }
+
+ bool HasAddress(const void* addr) const;
+
+ void VisitRange(uintptr_t base, uintptr_t max, Callback* visitor, void* arg) const;
+
+ class ClearVisitor {
+ public:
+ explicit ClearVisitor(SpaceBitmap* const bitmap)
+ : bitmap_(bitmap) {
+ }
+
+ void operator ()(Object* obj) const {
+ bitmap_->Clear(obj);
+ }
+ private:
+ SpaceBitmap* const bitmap_;
+ };
+
+ template <typename Visitor>
+ void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
+ for (; visit_begin < visit_end; visit_begin += kAlignment ) {
+ visitor(reinterpret_cast<Object*>(visit_begin));
+ }
+ }
+
+ template <typename Visitor>
+ void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
+ size_t start = OffsetToIndex(visit_begin - heap_begin_);
+ size_t end = OffsetToIndex(visit_end - heap_begin_ - 1);
+ for (size_t i = start; i <= end; i++) {
+ word w = bitmap_begin_[i];
+ if (w != 0) {
+ word high_bit = 1 << (kBitsPerWord - 1);
+ uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
+ do {
+ const int shift = CLZ(w);
+ Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
+ visitor(obj);
+ w &= ~(high_bit >> shift);
+ } while (w != 0);
+ }
+ }
+ }
+
+ void Walk(Callback* callback, void* arg);
+
+ void InOrderWalk(Callback* callback, void* arg);
+
+ void ScanWalk(uintptr_t base, uintptr_t max, ScanCallback* thunk, void* arg);
+
+ static void SweepWalk(const SpaceBitmap& live,
+ const SpaceBitmap& mark,
+ uintptr_t base, uintptr_t max,
+ SweepCallback* thunk, void* arg);
+
+ private:
+ // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
+ // however, we document that this is expected on heap_end_
+ SpaceBitmap(const std::string& name, MemMap* mem_map, word* bitmap_begin, size_t bitmap_size, const void* heap_begin)
+ : mem_map_(mem_map), bitmap_begin_(bitmap_begin), bitmap_size_(bitmap_size),
+ heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)), heap_end_(heap_begin_ - 1),
+ name_(name) {}
+
+ inline void Modify(const Object* obj, bool do_set) {
+ uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
+ DCHECK_GE(addr, heap_begin_);
+ const uintptr_t offset = addr - heap_begin_;
+ const size_t index = OffsetToIndex(offset);
+ const word mask = OffsetToMask(offset);
+ DCHECK_LT(index, bitmap_size_ / kWordSize) << " bitmap_size_ = " << bitmap_size_;
+ if (do_set) {
+ if (addr > heap_end_) {
+ heap_end_ = addr;
+ }
+ bitmap_begin_[index] |= mask;
+ } else {
+ bitmap_begin_[index] &= ~mask;
+ }
+ }
+
+ // Backing storage for bitmap.
+ UniquePtr<MemMap> mem_map_;
+
+ // This bitmap itself, word sized for efficiency in scanning.
+ word* const bitmap_begin_;
+
+ // Size of this bitmap.
+ const size_t bitmap_size_;
+
+ // The base address of the heap, which corresponds to the word containing the first bit in the
+ // bitmap.
+ const uintptr_t heap_begin_;
+
+ // The highest pointer value ever returned by an allocation from
+ // this heap. I.e., the highest address that may correspond to a
+ // set bit. If there are no bits set, (heap_end_ < heap_begin_).
+ uintptr_t heap_end_;
+
+ // Name of this bitmap.
+ std::string name_;
+};
+
+} // namespace art
+
+#endif // ART_SRC_SPACE_BITMAP_H_