summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2017-05-04 13:31:46 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2017-05-04 13:31:47 +0000
commit9459127abb57b0892d3ddeb1e30ac0bf28c93761 (patch)
tree0858d1ce964a5000630ed42e369ee7c8717a3fab
parent286fb4487c73d5ef1a9ff7ad69929d37d9dcdbee (diff)
parent7acddd83065bc8b12ade9528a84e6fcadda21250 (diff)
Merge "Don't use free list LOS for --force-determinism."
-rw-r--r--dex2oat/dex2oat.cc4
-rw-r--r--runtime/gc/collector/concurrent_copying.cc36
2 files changed, 21 insertions, 19 deletions
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index 9fd42d2cb7..58f394896a 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -2433,8 +2433,8 @@ class Dex2Oat FINAL {
// which uses an unstarted runtime.
raw_options.push_back(std::make_pair("-Xgc:nonconcurrent", nullptr));
- // Also force the free-list implementation for large objects.
- raw_options.push_back(std::make_pair("-XX:LargeObjectSpace=freelist", nullptr));
+ // The default LOS implementation (map) is not deterministic. So disable it.
+ raw_options.push_back(std::make_pair("-XX:LargeObjectSpace=disabled", nullptr));
// We also need to turn off the nonmoving space. For that, we need to disable HSpace
// compaction (done above) and ensure that neither foreground nor background collectors
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index bb09559e02..a450a751b8 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -1742,25 +1742,29 @@ void ConcurrentCopying::MarkZygoteLargeObjects() {
Thread* const self = Thread::Current();
WriterMutexLock rmu(self, *Locks::heap_bitmap_lock_);
space::LargeObjectSpace* const los = heap_->GetLargeObjectsSpace();
- // Pick the current live bitmap (mark bitmap if swapped).
- accounting::LargeObjectBitmap* const live_bitmap = los->GetLiveBitmap();
- accounting::LargeObjectBitmap* const mark_bitmap = los->GetMarkBitmap();
- // Walk through all of the objects and explicitly mark the zygote ones so they don't get swept.
- std::pair<uint8_t*, uint8_t*> range = los->GetBeginEndAtomic();
- live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(range.first),
- reinterpret_cast<uintptr_t>(range.second),
- [mark_bitmap, los, self](mirror::Object* obj)
- REQUIRES(Locks::heap_bitmap_lock_)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- if (los->IsZygoteLargeObject(self, obj)) {
- mark_bitmap->Set(obj);
- }
- });
+ if (los != nullptr) {
+ // Pick the current live bitmap (mark bitmap if swapped).
+ accounting::LargeObjectBitmap* const live_bitmap = los->GetLiveBitmap();
+ accounting::LargeObjectBitmap* const mark_bitmap = los->GetMarkBitmap();
+ // Walk through all of the objects and explicitly mark the zygote ones so they don't get swept.
+ std::pair<uint8_t*, uint8_t*> range = los->GetBeginEndAtomic();
+ live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(range.first),
+ reinterpret_cast<uintptr_t>(range.second),
+ [mark_bitmap, los, self](mirror::Object* obj)
+ REQUIRES(Locks::heap_bitmap_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (los->IsZygoteLargeObject(self, obj)) {
+ mark_bitmap->Set(obj);
+ }
+ });
+ }
}
void ConcurrentCopying::SweepLargeObjects(bool swap_bitmaps) {
TimingLogger::ScopedTiming split("SweepLargeObjects", GetTimings());
- RecordFreeLOS(heap_->GetLargeObjectsSpace()->Sweep(swap_bitmaps));
+ if (heap_->GetLargeObjectsSpace() != nullptr) {
+ RecordFreeLOS(heap_->GetLargeObjectsSpace()->Sweep(swap_bitmaps));
+ }
}
void ConcurrentCopying::ReclaimPhase() {
@@ -2009,7 +2013,6 @@ void ConcurrentCopying::AssertToSpaceInvariantInNonMovingSpace(mirror::Object* o
heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
accounting::LargeObjectBitmap* los_bitmap =
heap_mark_bitmap_->GetLargeObjectBitmap(ref);
- CHECK(los_bitmap != nullptr) << "LOS bitmap covers the entire address range";
bool is_los = mark_bitmap == nullptr;
if ((!is_los && mark_bitmap->Test(ref)) ||
(is_los && los_bitmap->Test(ref))) {
@@ -2513,7 +2516,6 @@ mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref,
heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
accounting::LargeObjectBitmap* los_bitmap =
heap_mark_bitmap_->GetLargeObjectBitmap(ref);
- CHECK(los_bitmap != nullptr) << "LOS bitmap covers the entire address range";
bool is_los = mark_bitmap == nullptr;
if (!is_los && mark_bitmap->Test(ref)) {
// Already marked.