Fix x86 build.
Also fix attributes/annotalysis on entrypoint_utils functions now we have
clang that is smarter wrt warnings than GCC.
Change-Id: I69257b4ad9a27d07acbc973d21a1cfa4260a8ed6
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index 542e1a9..90c8fcf 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -25,7 +25,6 @@
#include "indirect_reference_table.h"
#include "invoke_type.h"
#include "jni_internal.h"
-#include "method_helper.h"
#include "mirror/art_method.h"
#include "mirror/array.h"
#include "mirror/class-inl.h"
@@ -38,10 +37,9 @@
// TODO: Fix no thread safety analysis when GCC can handle template specialization.
template <const bool kAccessCheck>
-ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
- mirror::ArtMethod* method,
- Thread* self, bool* slow_path)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
+ mirror::ArtMethod* method,
+ Thread* self, bool* slow_path) {
mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
if (UNLIKELY(klass == NULL)) {
klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
@@ -88,9 +86,9 @@
}
// TODO: Fix no thread safety analysis when annotalysis is smarter.
-ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
- Thread* self, bool* slow_path)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
+ Thread* self,
+ bool* slow_path) {
if (UNLIKELY(!klass->IsInitialized())) {
StackHandleScope<1> hs(self);
Handle<mirror::Class> h_class(hs.NewHandle(klass));
@@ -118,11 +116,10 @@
// check.
// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
template <bool kAccessCheck, bool kInstrumented>
-ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
- mirror::ArtMethod* method,
- Thread* self,
- gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
+ mirror::ArtMethod* method,
+ Thread* self,
+ gc::AllocatorType allocator_type) {
bool slow_path = false;
mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
if (UNLIKELY(slow_path)) {
@@ -138,11 +135,10 @@
// Given the context of a calling Method and a resolved class, create an instance.
// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
template <bool kInstrumented>
-ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
- mirror::ArtMethod* method,
- Thread* self,
- gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
+ mirror::ArtMethod* method,
+ Thread* self,
+ gc::AllocatorType allocator_type) {
DCHECK(klass != nullptr);
bool slow_path = false;
klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
@@ -161,11 +157,10 @@
// Given the context of a calling Method and an initialized class, create an instance.
// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
template <bool kInstrumented>
-ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
- mirror::ArtMethod* method,
- Thread* self,
- gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
+ mirror::ArtMethod* method,
+ Thread* self,
+ gc::AllocatorType allocator_type) {
DCHECK(klass != nullptr);
// Pass in false since the object can not be finalizable.
return klass->Alloc<kInstrumented, false>(self, allocator_type);
@@ -174,11 +169,10 @@
// TODO: Fix no thread safety analysis when GCC can handle template specialization.
template <bool kAccessCheck>
-ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
- mirror::ArtMethod* method,
- int32_t component_count,
- bool* slow_path)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
+ mirror::ArtMethod* method,
+ int32_t component_count,
+ bool* slow_path) {
if (UNLIKELY(component_count < 0)) {
ThrowNegativeArraySizeException(component_count);
*slow_path = true;
@@ -211,12 +205,11 @@
// check.
// TODO: Fix no thread safety analysis when GCC can handle template specialization.
template <bool kAccessCheck, bool kInstrumented>
-ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
- mirror::ArtMethod* method,
- int32_t component_count,
- Thread* self,
- gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
+ mirror::ArtMethod* method,
+ int32_t component_count,
+ Thread* self,
+ gc::AllocatorType allocator_type) {
bool slow_path = false;
mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count,
&slow_path);
@@ -234,12 +227,11 @@
}
template <bool kAccessCheck, bool kInstrumented>
-ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
- mirror::ArtMethod* method,
- int32_t component_count,
- Thread* self,
- gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS {
+static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
+ mirror::ArtMethod* method,
+ int32_t component_count,
+ Thread* self,
+ gc::AllocatorType allocator_type) {
DCHECK(klass != nullptr);
if (UNLIKELY(component_count < 0)) {
ThrowNegativeArraySizeException(component_count);
@@ -476,8 +468,7 @@
// Fast path field resolution that can't initialize classes or throw exceptions.
static inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
mirror::ArtMethod* referrer,
- FindFieldType type, size_t expected_size)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ FindFieldType type, size_t expected_size) {
mirror::ArtField* resolved_field =
referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx);
if (UNLIKELY(resolved_field == nullptr)) {
@@ -534,8 +525,7 @@
static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
mirror::Object* this_object,
mirror::ArtMethod* referrer,
- bool access_check, InvokeType type)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ bool access_check, InvokeType type) {
bool is_direct = type == kStatic || type == kDirect;
if (UNLIKELY(this_object == NULL && !is_direct)) {
return NULL;
@@ -576,8 +566,7 @@
static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
mirror::ArtMethod* referrer,
Thread* self, bool can_run_clinit,
- bool verify_access)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ bool verify_access) {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
if (UNLIKELY(klass == nullptr)) {
@@ -611,14 +600,12 @@
}
static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
- uint32_t string_idx)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ uint32_t string_idx) {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
return class_linker->ResolveString(string_idx, referrer);
}
-static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self)
- NO_THREAD_SAFETY_ANALYSIS /* SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) */ {
+static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
// Save any pending exception over monitor exit call.
mirror::Throwable* saved_exception = NULL;
ThrowLocation saved_throw_location;
@@ -642,27 +629,7 @@
}
}
-static inline void CheckReferenceResult(mirror::Object* o, Thread* self)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- if (o == NULL) {
- return;
- }
- mirror::ArtMethod* m = self->GetCurrentMethod(NULL);
- if (o == kInvalidIndirectRefObject) {
- JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str());
- }
- // Make sure that the result is an instance of the type this method was expected to return.
- StackHandleScope<1> hs(self);
- Handle<mirror::ArtMethod> h_m(hs.NewHandle(m));
- mirror::Class* return_type = MethodHelper(h_m).GetReturnType();
-
- if (!o->InstanceOf(return_type)) {
- JniAbortF(NULL, "attempt to return an instance of %s from %s", PrettyTypeOf(o).c_str(),
- PrettyMethod(h_m.Get()).c_str());
- }
-}
-
-static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+static inline void CheckSuspend(Thread* thread) {
for (;;) {
if (thread->ReadFlag(kCheckpointRequest)) {
thread->RunCheckpointFunction();
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index d063dfb..0fa0e41 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -20,6 +20,7 @@
#include "class_linker-inl.h"
#include "dex_file-inl.h"
#include "gc/accounting/card_table-inl.h"
+#include "method_helper-inl.h"
#include "mirror/art_field-inl.h"
#include "mirror/art_method-inl.h"
#include "mirror/class-inl.h"
@@ -138,6 +139,25 @@
self->ResetDefaultStackEnd(!explicit_overflow_check); // Return to default stack size.
}
+void CheckReferenceResult(mirror::Object* o, Thread* self) {
+ if (o == NULL) {
+ return;
+ }
+ mirror::ArtMethod* m = self->GetCurrentMethod(NULL);
+ if (o == kInvalidIndirectRefObject) {
+ JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str());
+ }
+ // Make sure that the result is an instance of the type this method was expected to return.
+ StackHandleScope<1> hs(self);
+ Handle<mirror::ArtMethod> h_m(hs.NewHandle(m));
+ mirror::Class* return_type = MethodHelper(h_m).GetReturnType();
+
+ if (!o->InstanceOf(return_type)) {
+ JniAbortF(NULL, "attempt to return an instance of %s from %s", PrettyTypeOf(o).c_str(),
+ PrettyMethod(h_m.Get()).c_str());
+ }
+}
+
JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty,
jobject rcvr_jobj, jobject interface_method_jobj,
std::vector<jvalue>& args) {
diff --git a/runtime/entrypoints/entrypoint_utils.h b/runtime/entrypoints/entrypoint_utils.h
index 11a67ac..c5d67aa 100644
--- a/runtime/entrypoints/entrypoint_utils.h
+++ b/runtime/entrypoints/entrypoint_utils.h
@@ -45,12 +45,12 @@
ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
mirror::ArtMethod* method,
Thread* self, bool* slow_path)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// TODO: Fix no thread safety analysis when annotalysis is smarter.
ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
Thread* self, bool* slow_path)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
// cannot be resolved, throw an error. If it can, use it to create an instance.
@@ -61,7 +61,8 @@
ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
mirror::ArtMethod* method,
Thread* self,
- gc::AllocatorType allocator_type);
+ gc::AllocatorType allocator_type)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given the context of a calling Method and a resolved class, create an instance.
// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
@@ -70,7 +71,7 @@
mirror::ArtMethod* method,
Thread* self,
gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given the context of a calling Method and an initialized class, create an instance.
// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
@@ -78,7 +79,8 @@
ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
mirror::ArtMethod* method,
Thread* self,
- gc::AllocatorType allocator_type);
+ gc::AllocatorType allocator_type)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// TODO: Fix no thread safety analysis when GCC can handle template specialization.
@@ -87,7 +89,7 @@
mirror::ArtMethod* method,
int32_t component_count,
bool* slow_path)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
// it cannot be resolved, throw an error. If it can, use it to create an array.
@@ -100,7 +102,7 @@
int32_t component_count,
Thread* self,
gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
template <bool kAccessCheck, bool kInstrumented>
ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
@@ -108,7 +110,7 @@
int32_t component_count,
Thread* self,
gc::AllocatorType allocator_type)
- NO_THREAD_SAFETY_ANALYSIS;
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* method,
int32_t component_count, Thread* self,
@@ -171,10 +173,11 @@
uint32_t string_idx)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+// TODO: annotalysis disabled as monitor semantics are maintained in Java code.
static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ NO_THREAD_SAFETY_ANALYSIS;
-static inline void CheckReferenceResult(mirror::Object* o, Thread* self)
+void CheckReferenceResult(mirror::Object* o, Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);