diff options
| -rw-r--r-- | runtime/gc/collector/concurrent_copying.h | 5 | ||||
| -rw-r--r-- | runtime/jit/profiling_info.cc | 12 | ||||
| -rw-r--r-- | runtime/read_barrier-inl.h | 20 | ||||
| -rw-r--r-- | runtime/read_barrier.h | 5 |
4 files changed, 36 insertions, 6 deletions
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h index 37b6a2c541..e4099c8a57 100644 --- a/runtime/gc/collector/concurrent_copying.h +++ b/runtime/gc/collector/concurrent_copying.h @@ -130,6 +130,9 @@ class ConcurrentCopying : public GarbageCollector { void RevokeThreadLocalMarkStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); + virtual mirror::Object* IsMarked(mirror::Object* from_ref) OVERRIDE + REQUIRES_SHARED(Locks::mutator_lock_); + private: void PushOntoMarkStack(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); @@ -192,8 +195,6 @@ class ConcurrentCopying : public GarbageCollector { bool do_atomic_update) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); - virtual mirror::Object* IsMarked(mirror::Object* from_ref) OVERRIDE - REQUIRES_SHARED(Locks::mutator_lock_); bool IsMarkedInUnevacFromSpace(mirror::Object* from_ref) REQUIRES_SHARED(Locks::mutator_lock_); virtual bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* field, diff --git a/runtime/jit/profiling_info.cc b/runtime/jit/profiling_info.cc index 7d80d2c208..1bd095a88e 100644 --- a/runtime/jit/profiling_info.cc +++ b/runtime/jit/profiling_info.cc @@ -90,13 +90,17 @@ InlineCache* ProfilingInfo::GetInlineCache(uint32_t dex_pc) { void ProfilingInfo::AddInvokeInfo(uint32_t dex_pc, mirror::Class* cls) { InlineCache* cache = GetInlineCache(dex_pc); for (size_t i = 0; i < InlineCache::kIndividualCacheSize; ++i) { - mirror::Class* existing = cache->classes_[i].Read(); - if (existing == cls) { + mirror::Class* existing = cache->classes_[i].Read<kWithoutReadBarrier>(); + mirror::Class* marked = ReadBarrier::IsMarked(existing); + if (marked == cls) { // Receiver type is already in the cache, nothing else to do. return; - } else if (existing == nullptr) { + } else if (marked == nullptr) { // Cache entry is empty, try to put `cls` in it. - GcRoot<mirror::Class> expected_root(nullptr); + // Note: it's ok to spin on 'existing' here: if 'existing' is not null, that means + // it is a stalled heap address, which will only be cleared during SweepSystemWeaks, + // *after* this thread hits a suspend point. + GcRoot<mirror::Class> expected_root(existing); GcRoot<mirror::Class> desired_root(cls); if (!reinterpret_cast<Atomic<GcRoot<mirror::Class>>*>(&cache->classes_[i])-> CompareExchangeStrongSequentiallyConsistent(expected_root, desired_root)) { diff --git a/runtime/read_barrier-inl.h b/runtime/read_barrier-inl.h index d3859b0dfa..dbe7f5c957 100644 --- a/runtime/read_barrier-inl.h +++ b/runtime/read_barrier-inl.h @@ -182,6 +182,26 @@ inline MirrorType* ReadBarrier::BarrierForRoot(mirror::CompressedReference<Mirro } } +template <typename MirrorType> +inline MirrorType* ReadBarrier::IsMarked(MirrorType* ref) { + // Only read-barrier configurations can have mutators run while + // the GC is marking. + if (!kUseReadBarrier) { + return ref; + } + // IsMarked does not handle null, so handle it here. + if (ref == nullptr) { + return nullptr; + } + // IsMarked should only be called when the GC is marking. + if (!Thread::Current()->GetIsGcMarking()) { + return ref; + } + + return reinterpret_cast<MirrorType*>( + Runtime::Current()->GetHeap()->ConcurrentCopyingCollector()->IsMarked(ref)); +} + inline bool ReadBarrier::IsDuringStartup() { gc::Heap* heap = Runtime::Current()->GetHeap(); if (heap == nullptr) { diff --git a/runtime/read_barrier.h b/runtime/read_barrier.h index cbc26977fb..296409014e 100644 --- a/runtime/read_barrier.h +++ b/runtime/read_barrier.h @@ -64,6 +64,11 @@ class ReadBarrier { GcRootSource* gc_root_source = nullptr) REQUIRES_SHARED(Locks::mutator_lock_); + // Return the mirror Object if it is marked, or null if not. + template <typename MirrorType> + ALWAYS_INLINE static MirrorType* IsMarked(MirrorType* ref) + REQUIRES_SHARED(Locks::mutator_lock_); + static bool IsDuringStartup(); // Without the holder object. |