diff options
Diffstat (limited to 'runtime/entrypoints/entrypoint_utils.h')
-rw-r--r-- | runtime/entrypoints/entrypoint_utils.h | 38 |
1 files changed, 19 insertions, 19 deletions
diff --git a/runtime/entrypoints/entrypoint_utils.h b/runtime/entrypoints/entrypoint_utils.h index a28376fadf..f88e81dc61 100644 --- a/runtime/entrypoints/entrypoint_utils.h +++ b/runtime/entrypoints/entrypoint_utils.h @@ -47,12 +47,12 @@ template <const bool kAccessCheck> ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, ArtMethod* method, Thread* self, bool* slow_path) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); ALWAYS_INLINE inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, Thread* self, bool* slow_path) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it // cannot be resolved, throw an error. If it can, use it to create an instance. @@ -63,21 +63,21 @@ ALWAYS_INLINE inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Given the context of a calling Method and a resolved class, create an instance. template <bool kInstrumented> ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, Thread* self, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Given the context of a calling Method and an initialized class, create an instance. template <bool kInstrumented> ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, Thread* self, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); template <bool kAccessCheck> @@ -85,7 +85,7 @@ ALWAYS_INLINE inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, int32_t component_count, ArtMethod* method, bool* slow_path) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If // it cannot be resolved, throw an error. If it can, use it to create an array. @@ -97,7 +97,7 @@ ALWAYS_INLINE inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); template <bool kAccessCheck, bool kInstrumented> ALWAYS_INLINE inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, @@ -105,13 +105,13 @@ ALWAYS_INLINE inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* kl ArtMethod* method, Thread* self, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self, bool access_check, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, int32_t component_count, @@ -119,7 +119,7 @@ extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, Thread* self, bool access_check, gc::AllocatorType allocator_type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Type of find field operation for fast and slow case. enum FindFieldType { @@ -136,45 +136,45 @@ enum FindFieldType { template<FindFieldType type, bool access_check> inline ArtField* FindFieldFromCode( uint32_t field_idx, ArtMethod* referrer, Thread* self, size_t expected_size) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); template<InvokeType type, bool access_check> inline ArtMethod* FindMethodFromCode( uint32_t method_idx, mirror::Object** this_object, ArtMethod* referrer, Thread* self) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Fast path field resolution that can't initialize classes or throw exceptions. inline ArtField* FindFieldFast( uint32_t field_idx, ArtMethod* referrer, FindFieldType type, size_t expected_size) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // Fast path method resolution that can't throw exceptions. inline ArtMethod* FindMethodFast( uint32_t method_idx, mirror::Object* this_object, ArtMethod* referrer, bool access_check, InvokeType type) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); inline mirror::Class* ResolveVerifyAndClinit( uint32_t type_idx, ArtMethod* referrer, Thread* self, bool can_run_clinit, bool verify_access) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); // TODO: annotalysis disabled as monitor semantics are maintained in Java code. inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) NO_THREAD_SAFETY_ANALYSIS; void CheckReferenceResult(mirror::Object* o, Thread* self) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty, jobject rcvr_jobj, jobject interface_art_method_jobj, std::vector<jvalue>& args) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); bool FillArrayData(mirror::Object* obj, const Instruction::ArrayDataPayload* payload) - SHARED_REQUIRES(Locks::mutator_lock_); + REQUIRES_SHARED(Locks::mutator_lock_); template <typename INT_TYPE, typename FLOAT_TYPE> inline INT_TYPE art_float_to_integral(FLOAT_TYPE f); |