ART: Fix mutator lock annotations for new clang++.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 265153643
Change-Id: Ic76eff61d7ecac4c384032699882d010d88c62a9
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc
index 729b551..4df26e4 100644
--- a/dex2oat/linker/image_writer.cc
+++ b/dex2oat/linker/image_writer.cc
@@ -1141,7 +1141,8 @@
last_class_set.erase(it);
DCHECK(std::none_of(class_table->classes_.begin(),
class_table->classes_.end(),
- [klass, hash](ClassTable::ClassSet& class_set) {
+ [klass, hash](ClassTable::ClassSet& class_set)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
ClassTable::TableSlot slot(klass, hash);
return class_set.FindWithHash(slot, hash) != class_set.end();
}));
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 0d163bd..3561673 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1150,11 +1150,17 @@
if (Runtime::Current() != nullptr) {
// We need to have the handle scope stay live until after the verifier since the verifier has
// a handle to the dex cache from hs.
+ ScopedObjectAccess soa(Thread::Current());
hs.reset(new StackHandleScope<1>(Thread::Current()));
vios->Stream() << "VERIFIER TYPE ANALYSIS:\n";
ScopedIndentation indent2(vios);
- verifier.reset(DumpVerifier(vios, hs.get(),
- dex_method_idx, &dex_file, class_def, code_item,
+ verifier.reset(DumpVerifier(vios,
+ soa,
+ hs.get(),
+ dex_method_idx,
+ &dex_file,
+ class_def,
+ code_item,
method_access_flags));
}
{
@@ -1459,14 +1465,15 @@
}
verifier::MethodVerifier* DumpVerifier(VariableIndentationOutputStream* vios,
+ ScopedObjectAccess& soa,
StackHandleScope<1>* hs,
uint32_t dex_method_idx,
const DexFile* dex_file,
const dex::ClassDef& class_def,
const dex::CodeItem* code_item,
- uint32_t method_access_flags) {
+ uint32_t method_access_flags)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
if ((method_access_flags & kAccNative) == 0) {
- ScopedObjectAccess soa(Thread::Current());
Runtime* const runtime = Runtime::Current();
DCHECK(options_.class_loader_ != nullptr);
Handle<mirror::DexCache> dex_cache = hs->NewHandle(
diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h
index 7904aeb..56e3225 100644
--- a/runtime/common_runtime_test.h
+++ b/runtime/common_runtime_test.h
@@ -46,12 +46,13 @@
using ScopedLogSeverity = android::base::ScopedLogSeverity;
template<class MirrorType>
-static inline ObjPtr<MirrorType> MakeObjPtr(MirrorType* ptr) {
+static inline ObjPtr<MirrorType> MakeObjPtr(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
return ptr;
}
template<class MirrorType>
-static inline ObjPtr<MirrorType> MakeObjPtr(ObjPtr<MirrorType> ptr) {
+static inline ObjPtr<MirrorType> MakeObjPtr(ObjPtr<MirrorType> ptr)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
return ptr;
}
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 9f1f2dc..607c00c 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1819,7 +1819,7 @@
void Heap::VerifyHeap() {
ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
- auto visitor = [&](mirror::Object* obj) {
+ auto visitor = [&](mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS {
VerifyObjectBody(obj);
};
// Technically we need the mutator lock here to call Visit. However, VerifyObjectBody is already
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 3112d27..a6b101b 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -230,10 +230,12 @@
bool PrivateRegionContainsPc(const void* pc) const;
// Return true if the code cache contains this method.
- bool ContainsMethod(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
+ bool ContainsMethod(ArtMethod* method)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::jit_lock_);
// Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise.
- const void* GetJniStubCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
+ const void* GetJniStubCode(ArtMethod* method)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::jit_lock_);
// Allocate a region for both code and data in the JIT code cache.
// The reserved memory is left completely uninitialized.
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index 55a30c3..76de550 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -736,7 +736,7 @@
}
template<VerifyObjectFlags kVerifyFlags>
- ALWAYS_INLINE void Verify() {
+ ALWAYS_INLINE void Verify() REQUIRES_SHARED(Locks::mutator_lock_) {
if (kVerifyFlags & kVerifyThis) {
VerifyObject(this);
}
@@ -744,21 +744,23 @@
// Not ObjPtr since the values may be unaligned for logic in verification.cc.
template<VerifyObjectFlags kVerifyFlags, typename Reference>
- ALWAYS_INLINE static void VerifyRead(Reference value) {
+ ALWAYS_INLINE static void VerifyRead(Reference value) REQUIRES_SHARED(Locks::mutator_lock_) {
if (kVerifyFlags & kVerifyReads) {
VerifyObject(value);
}
}
template<VerifyObjectFlags kVerifyFlags>
- ALWAYS_INLINE static void VerifyWrite(ObjPtr<mirror::Object> value) {
+ ALWAYS_INLINE static void VerifyWrite(ObjPtr<mirror::Object> value)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
if (kVerifyFlags & kVerifyWrites) {
VerifyObject(value);
}
}
template<VerifyObjectFlags kVerifyFlags>
- ALWAYS_INLINE void VerifyCAS(ObjPtr<mirror::Object> new_value, ObjPtr<mirror::Object> old_value) {
+ ALWAYS_INLINE void VerifyCAS(ObjPtr<mirror::Object> new_value, ObjPtr<mirror::Object> old_value)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
Verify<kVerifyFlags>();
VerifyRead<kVerifyFlags>(old_value);
VerifyWrite<kVerifyFlags>(new_value);
diff --git a/runtime/monitor.h b/runtime/monitor.h
index dd4c21c..ad7a0b4 100644
--- a/runtime/monitor.h
+++ b/runtime/monitor.h
@@ -134,7 +134,7 @@
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
ObjPtr<mirror::Object> GetObject() REQUIRES_SHARED(Locks::mutator_lock_);
- void SetObject(ObjPtr<mirror::Object> object);
+ void SetObject(ObjPtr<mirror::Object> object) REQUIRES_SHARED(Locks::mutator_lock_);
// Provides no memory ordering guarantees.
Thread* GetOwner() const {
diff --git a/runtime/reflective_handle.h b/runtime/reflective_handle.h
index 014d976..c2dbf53 100644
--- a/runtime/reflective_handle.h
+++ b/runtime/reflective_handle.h
@@ -81,14 +81,12 @@
public:
MutableReflectiveHandle() {}
- ALWAYS_INLINE MutableReflectiveHandle(const MutableReflectiveHandle<T>& handle)
- REQUIRES_SHARED(Locks::mutator_lock_) = default;
+ ALWAYS_INLINE MutableReflectiveHandle(const MutableReflectiveHandle<T>& handle) = default;
ALWAYS_INLINE MutableReflectiveHandle<T>& operator=(const MutableReflectiveHandle<T>& handle)
- REQUIRES_SHARED(Locks::mutator_lock_) = default;
+ = default;
ALWAYS_INLINE explicit MutableReflectiveHandle(ReflectiveReference<T>* reference)
- REQUIRES_SHARED(Locks::mutator_lock_)
: ReflectiveHandle<T>(reference) {}
ALWAYS_INLINE T* Assign(T* reference) REQUIRES_SHARED(Locks::mutator_lock_) {
diff --git a/runtime/thread.cc b/runtime/thread.cc
index ab4069c..14865cb 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -214,7 +214,9 @@
JValue GetReturnValue() const { return ret_val_; }
bool IsReference() const { return is_reference_; }
bool GetFromCode() const { return from_code_; }
- ObjPtr<mirror::Throwable> GetPendingException() const { return pending_exception_; }
+ ObjPtr<mirror::Throwable> GetPendingException() const REQUIRES_SHARED(Locks::mutator_lock_) {
+ return pending_exception_;
+ }
DeoptimizationContextRecord* GetLink() const { return link_; }
mirror::Object** GetReturnValueAsGCRoot() {
DCHECK(is_reference_);