Make allocations report usable size.
Work-in-progress to allow arrays to fill usable size. Bug: 13028925.
Use C++11's override keyword on GCC >= 2.7 to ensure that we override GC and
allocator methods.
Move initial mirror::Class set up into a Functor so that all allocated objects
have non-zero sizes. Use this property to assert that all objects are never
larger than their usable size.
Other bits of GC related clean-up, missing initialization, missing use of
const, hot methods in .cc files, "unimplemented" functions that fail at
runtime in header files, reducing header file includes, move valgrind's space
into its own files, reduce number of array allocation routines.
Change-Id: Id5760041a2d7f94dcaf17ec760f6095ec75dadaa
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 4435d98..8d8cdd6 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -952,6 +952,7 @@
mirror::Object* Heap::AllocateInternalWithGc(Thread* self, AllocatorType allocator,
size_t alloc_size, size_t* bytes_allocated,
+ size_t* usable_size,
mirror::Class** klass) {
mirror::Object* ptr = nullptr;
bool was_default_allocator = allocator == GetCurrentAllocator();
@@ -968,7 +969,7 @@
return nullptr;
}
// A GC was in progress and we blocked, retry allocation now that memory has been freed.
- ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated);
+ ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, usable_size);
}
// Loop through our different Gc types and try to Gc until we get enough free memory.
@@ -985,13 +986,13 @@
}
if (gc_ran) {
// Did we free sufficient memory for the allocation to succeed?
- ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated);
+ ptr = TryToAllocate<true, false>(self, allocator, alloc_size, bytes_allocated, usable_size);
}
}
// Allocations have failed after GCs; this is an exceptional state.
if (ptr == nullptr) {
// Try harder, growing the heap if necessary.
- ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated);
+ ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size);
}
if (ptr == nullptr) {
// Most allocations should have succeeded by now, so the heap is really full, really fragmented,
@@ -1008,7 +1009,7 @@
*klass = sirt_klass.get();
return nullptr;
}
- ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated);
+ ptr = TryToAllocate<true, true>(self, allocator, alloc_size, bytes_allocated, usable_size);
if (ptr == nullptr) {
ThrowOutOfMemoryError(self, alloc_size, false);
}
@@ -1318,9 +1319,10 @@
}
// Special compacting collector which uses sub-optimal bin packing to reduce zygote space size.
-class ZygoteCompactingCollector : public collector::SemiSpace {
+class ZygoteCompactingCollector FINAL : public collector::SemiSpace {
public:
- explicit ZygoteCompactingCollector(gc::Heap* heap) : SemiSpace(heap, "zygote collector") {
+ explicit ZygoteCompactingCollector(gc::Heap* heap) : SemiSpace(heap, "zygote collector"),
+ bin_live_bitmap_(nullptr), bin_mark_bitmap_(nullptr) {
}
void BuildBins(space::ContinuousSpace* space) {
@@ -1382,7 +1384,7 @@
// No available space in the bins, place it in the target space instead (grows the zygote
// space).
size_t bytes_allocated;
- forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated);
+ forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated, nullptr);
if (to_space_live_bitmap_ != nullptr) {
to_space_live_bitmap_->Set(forward_address);
} else {