diff options
author | 2017-08-17 16:10:09 +0100 | |
---|---|---|
committer | 2017-08-29 11:29:31 +0100 | |
commit | 5122e6ba34d46851cd89f2ad55bf6bb067e038d6 (patch) | |
tree | e96ba37b6451be7a06d930b0274251cac35ce05e /runtime | |
parent | 02cb397857c979dffae95e2db2678a72ec407cf0 (diff) |
ART: Remove ArtMethod::dex_cache_resolved_methods_.
Test: m test-art-host-gtest
Test: testrunner.py --host
Test: testrunner.py --target on Nexus 6P
Test: Repeat the above tests with ART_HEAP_POISONING=true
Test: Build aosp_mips64-eng
Change-Id: I9cd0b8aa5001542b0863cccfca4f9c1cd4d25396
Diffstat (limited to 'runtime')
-rw-r--r-- | runtime/arch/arm/quick_entrypoints_arm.S | 15 | ||||
-rw-r--r-- | runtime/arch/arm64/quick_entrypoints_arm64.S | 12 | ||||
-rw-r--r-- | runtime/arch/mips/quick_entrypoints_mips.S | 10 | ||||
-rw-r--r-- | runtime/arch/mips64/quick_entrypoints_mips64.S | 10 | ||||
-rw-r--r-- | runtime/arch/x86/quick_entrypoints_x86.S | 9 | ||||
-rw-r--r-- | runtime/arch/x86_64/quick_entrypoints_x86_64.S | 9 | ||||
-rw-r--r-- | runtime/art_method-inl.h | 67 | ||||
-rw-r--r-- | runtime/art_method.h | 36 | ||||
-rw-r--r-- | runtime/class_linker-inl.h | 6 | ||||
-rw-r--r-- | runtime/class_linker.cc | 38 | ||||
-rw-r--r-- | runtime/class_linker_test.cc | 5 | ||||
-rw-r--r-- | runtime/entrypoints/entrypoint_utils-inl.h | 4 | ||||
-rw-r--r-- | runtime/entrypoints/quick/quick_trampoline_entrypoints.cc | 18 | ||||
-rw-r--r-- | runtime/gc/space/image_space.cc | 2 | ||||
-rw-r--r-- | runtime/generated/asm_support_gen.h | 22 | ||||
-rw-r--r-- | runtime/image.cc | 2 |
16 files changed, 97 insertions, 168 deletions
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S index ea8d501939..ab9ca840ef 100644 --- a/runtime/arch/arm/quick_entrypoints_arm.S +++ b/runtime/arch/arm/quick_entrypoints_arm.S @@ -1596,10 +1596,21 @@ ENTRY art_quick_imt_conflict_trampoline .cfi_rel_offset r1, 0 .cfi_rel_offset r2, 4 ldr r4, [sp, #(2 * 4)] // Load referrer. + ldr r2, [r0, #ART_METHOD_JNI_OFFSET_32] // Load ImtConflictTable + // Load the declaring class (without read barrier) and access flags (for obsolete method check). + // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. +#if ART_METHOD_ACCESS_FLAGS_OFFSET != ART_METHOD_DECLARING_CLASS_OFFSET + 4 +#error "Expecting declaring class and access flags to be consecutive for LDRD." +#endif + ldrd r0, r1, [r4, #ART_METHOD_DECLARING_CLASS_OFFSET] + // If the method is obsolete, just go through the dex cache miss slow path. + lsrs r1, #(ACC_OBSOLETE_METHOD_SHIFT + 1) + bcs .Limt_conflict_trampoline_dex_cache_miss + ldr r4, [r0, #MIRROR_CLASS_DEX_CACHE_OFFSET] // Load the DexCache (without read barrier). + UNPOISON_HEAP_REF r4 ubfx r1, r12, #0, #METHOD_DEX_CACHE_HASH_BITS // Calculate DexCache method slot index. - ldr r4, [r4, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_32] // Load dex cache methods array + ldr r4, [r4, #MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET] // Load the resolved methods. add r4, r4, r1, lsl #(POINTER_SIZE_SHIFT + 1) // Load DexCache method slot address. - ldr r2, [r0, #ART_METHOD_JNI_OFFSET_32] // Load ImtConflictTable // FIXME: Configure the build to use the faster code when appropriate. // Currently we fall back to the slower version. diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S index 6c9ce93d2e..adfc88fd35 100644 --- a/runtime/arch/arm64/quick_entrypoints_arm64.S +++ b/runtime/arch/arm64/quick_entrypoints_arm64.S @@ -2060,8 +2060,18 @@ END art_quick_proxy_invoke_handler .extern artLookupResolvedMethod ENTRY art_quick_imt_conflict_trampoline ldr xIP0, [sp, #0] // Load referrer + // Load the declaring class (without read barrier) and access flags (for obsolete method check). + // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. +#if ART_METHOD_ACCESS_FLAGS_OFFSET != ART_METHOD_DECLARING_CLASS_OFFSET + 4 +#error "Expecting declaring class and access flags to be consecutive for LDP." +#endif + ldp wIP0, w15, [xIP0, #ART_METHOD_DECLARING_CLASS_OFFSET] + // If the method is obsolete, just go through the dex cache miss slow path. + tbnz x15, #ACC_OBSOLETE_METHOD_SHIFT, .Limt_conflict_trampoline_dex_cache_miss + ldr wIP0, [xIP0, #MIRROR_CLASS_DEX_CACHE_OFFSET] // Load the DexCache (without read barrier). + UNPOISON_HEAP_REF wIP0 ubfx x15, xIP1, #0, #METHOD_DEX_CACHE_HASH_BITS // Calculate DexCache method slot index. - ldr xIP0, [xIP0, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_64] // Load dex cache methods array + ldr xIP0, [xIP0, #MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET] // Load the resolved methods. add xIP0, xIP0, x15, lsl #(POINTER_SIZE_SHIFT + 1) // Load DexCache method slot address. // Relaxed atomic load x14:x15 from the dex cache slot. diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S index bb82d5819d..b876353183 100644 --- a/runtime/arch/mips/quick_entrypoints_mips.S +++ b/runtime/arch/mips/quick_entrypoints_mips.S @@ -2112,10 +2112,18 @@ ENTRY art_quick_imt_conflict_trampoline SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY /* save_s4_thru_s8 */ 0 lw $t8, FRAME_SIZE_SAVE_REFS_AND_ARGS($sp) # $t8 = referrer. + // If the method is obsolete, just go through the dex cache miss slow path. + // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. + lw $t9, ART_METHOD_ACCESS_FLAGS_OFFSET($t8) # $t9 = access flags. + sll $t9, $t9, 31 - ACC_OBSOLETE_METHOD_SHIFT # Move obsolete method bit to sign bit. + bltz $t9, .Limt_conflict_trampoline_dex_cache_miss + lw $t8, ART_METHOD_DECLARING_CLASS_OFFSET($t8) # $t8 = declaring class (no read barrier). + lw $t8, MIRROR_CLASS_DEX_CACHE_OFFSET($t8) # $t8 = dex cache (without read barrier). + UNPOISON_HEAP_REF $t8 la $t9, __atomic_load_8 addiu $sp, $sp, -ARG_SLOT_SIZE # Reserve argument slots on the stack. .cfi_adjust_cfa_offset ARG_SLOT_SIZE - lw $t8, ART_METHOD_DEX_CACHE_METHODS_OFFSET_32($t8) # $t8 = dex cache methods array. + lw $t8, MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET($t8) # $t8 = dex cache methods array. move $s2, $t7 # $s2 = method index (callee-saved). lw $s3, ART_METHOD_JNI_OFFSET_32($a0) # $s3 = ImtConflictTable (callee-saved). diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S index 7350c8521a..eeaae3c698 100644 --- a/runtime/arch/mips64/quick_entrypoints_mips64.S +++ b/runtime/arch/mips64/quick_entrypoints_mips64.S @@ -2024,8 +2024,16 @@ ENTRY art_quick_imt_conflict_trampoline SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL /* save_s4_thru_s8 */ 0 ld $t1, FRAME_SIZE_SAVE_REFS_AND_ARGS($sp) # $t1 = referrer. + // If the method is obsolete, just go through the dex cache miss slow path. + // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. + lw $t9, ART_METHOD_ACCESS_FLAGS_OFFSET($t1) # $t9 = access flags. + sll $t9, $t9, 31 - ACC_OBSOLETE_METHOD_SHIFT # Move obsolete method bit to sign bit. + bltzc $t9, .Limt_conflict_trampoline_dex_cache_miss + lwu $t1, ART_METHOD_DECLARING_CLASS_OFFSET($t1) # $t1 = declaring class (no read barrier). + lwu $t1, MIRROR_CLASS_DEX_CACHE_OFFSET($t1) # $t1 = dex cache (without read barrier). + UNPOISON_HEAP_REF $t1 dla $t9, __atomic_load_16 - ld $t1, ART_METHOD_DEX_CACHE_METHODS_OFFSET_64($t1) # $t1 = dex cache methods array. + ld $t1, MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET($t1) # $t1 = dex cache methods array. dext $s2, $t0, 0, 32 # $s2 = zero-extended method index # (callee-saved). diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S index af82e08698..eecca58a41 100644 --- a/runtime/arch/x86/quick_entrypoints_x86.S +++ b/runtime/arch/x86/quick_entrypoints_x86.S @@ -1787,7 +1787,14 @@ DEFINE_FUNCTION art_quick_imt_conflict_trampoline PUSH ESI PUSH EDX movl 16(%esp), %edi // Load referrer. - movl ART_METHOD_DEX_CACHE_METHODS_OFFSET_32(%edi), %edi // Load dex cache methods array. + // If the method is obsolete, just go through the dex cache miss slow path. + // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. + testl LITERAL(ACC_OBSOLETE_METHOD), ART_METHOD_ACCESS_FLAGS_OFFSET(%edi) + jnz .Limt_conflict_trampoline_dex_cache_miss + movl ART_METHOD_DECLARING_CLASS_OFFSET(%edi), %edi // Load declaring class (no read barrier). + movl MIRROR_CLASS_DEX_CACHE_OFFSET(%edi), %edi // Load the DexCache (without read barrier). + UNPOISON_HEAP_REF edi + movl MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET(%edi), %edi // Load the resolved methods. pushl ART_METHOD_JNI_OFFSET_32(%eax) // Push ImtConflictTable. CFI_ADJUST_CFA_OFFSET(4) movd %xmm7, %eax // Get target method index stored in xmm7. diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S index 6bf08289ee..2c3da90f25 100644 --- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S +++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S @@ -1646,7 +1646,14 @@ DEFINE_FUNCTION art_quick_imt_conflict_trampoline int3 #else movq __SIZEOF_POINTER__(%rsp), %r10 // Load referrer. - movq ART_METHOD_DEX_CACHE_METHODS_OFFSET_64(%r10), %r10 // Load dex cache methods array. + // If the method is obsolete, just go through the dex cache miss slow path. + // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. + testl LITERAL(ACC_OBSOLETE_METHOD), ART_METHOD_ACCESS_FLAGS_OFFSET(%r10) + jnz .Limt_conflict_trampoline_dex_cache_miss + movl ART_METHOD_DECLARING_CLASS_OFFSET(%r10), %r10d // Load declaring class (no read barrier). + movl MIRROR_CLASS_DEX_CACHE_OFFSET(%r10), %r10d // Load the DexCache (without read barrier). + UNPOISON_HEAP_REF r10d + movq MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET(%r10), %r10 // Load the resolved methods. mov %eax, %r11d // Remember method index in R11. andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index. shll LITERAL(1), %eax // Multiply by 2 as entries have size 2 * __SIZEOF_POINTER__. diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h index 11f825353b..1588920e94 100644 --- a/runtime/art_method-inl.h +++ b/runtime/art_method-inl.h @@ -102,58 +102,6 @@ inline uint32_t ArtMethod::GetDexMethodIndex() { return GetDexMethodIndexUnchecked(); } -inline mirror::MethodDexCacheType* ArtMethod::GetDexCacheResolvedMethods(PointerSize pointer_size) { - return GetNativePointer<mirror::MethodDexCacheType*>(DexCacheResolvedMethodsOffset(pointer_size), - pointer_size); -} - -inline ArtMethod* ArtMethod::GetDexCacheResolvedMethod(uint16_t method_index, - PointerSize pointer_size) { - // NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here - // without accessing the DexCache and we don't want to do that in release build. - DCHECK_LT(method_index, GetInterfaceMethodIfProxy(pointer_size)->GetDexFile()->NumMethodIds()); - uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize; - DCHECK_LT(slot_idx, GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods()); - mirror::MethodDexCachePair pair = mirror::DexCache::GetNativePairPtrSize( - GetDexCacheResolvedMethods(pointer_size), slot_idx, pointer_size); - ArtMethod* method = pair.GetObjectForIndex(method_index); - if (LIKELY(method != nullptr)) { - auto* declaring_class = method->GetDeclaringClass(); - if (LIKELY(declaring_class == nullptr || !declaring_class->IsErroneous())) { - return method; - } - } - return nullptr; -} - -inline void ArtMethod::SetDexCacheResolvedMethod(uint16_t method_index, - ArtMethod* new_method, - PointerSize pointer_size) { - // NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here - // without accessing the DexCache and we don't want to do that in release build. - DCHECK_LT(method_index, GetInterfaceMethodIfProxy(pointer_size)->GetDexFile()->NumMethodIds()); - DCHECK(new_method == nullptr || new_method->GetDeclaringClass() != nullptr); - uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize; - DCHECK_LT(slot_idx, GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods()); - mirror::MethodDexCachePair pair(new_method, method_index); - mirror::DexCache::SetNativePairPtrSize( - GetDexCacheResolvedMethods(pointer_size), slot_idx, pair, pointer_size); -} - -inline bool ArtMethod::HasDexCacheResolvedMethods(PointerSize pointer_size) { - return GetDexCacheResolvedMethods(pointer_size) != nullptr; -} - -inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size) { - return GetDexCacheResolvedMethods(pointer_size) == - other->GetDexCacheResolvedMethods(pointer_size); -} - -inline bool ArtMethod::HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache, - PointerSize pointer_size) { - return GetDexCacheResolvedMethods(pointer_size) == other_cache; -} - inline ObjPtr<mirror::Class> ArtMethod::LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx) { ObjPtr<mirror::DexCache> dex_cache = GetDexCache(); ObjPtr<mirror::Class> type = dex_cache->GetResolvedType(type_idx); @@ -403,13 +351,6 @@ inline ArtMethod* ArtMethod::GetInterfaceMethodIfProxy(PointerSize pointer_size) return interface_method; } -inline void ArtMethod::SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods, - PointerSize pointer_size) { - SetNativePointer(DexCacheResolvedMethodsOffset(pointer_size), - new_dex_cache_methods, - pointer_size); -} - inline dex::TypeIndex ArtMethod::GetReturnTypeIndex() { DCHECK(!IsProxyMethod()); const DexFile* dex_file = GetDexFile(); @@ -489,18 +430,12 @@ void ArtMethod::VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) { } template <typename Visitor> -inline void ArtMethod::UpdateObjectsForImageRelocation(const Visitor& visitor, - PointerSize pointer_size) { +inline void ArtMethod::UpdateObjectsForImageRelocation(const Visitor& visitor) { mirror::Class* old_class = GetDeclaringClassUnchecked<kWithoutReadBarrier>(); mirror::Class* new_class = visitor(old_class); if (old_class != new_class) { SetDeclaringClass(new_class); } - mirror::MethodDexCacheType* old_methods = GetDexCacheResolvedMethods(pointer_size); - mirror::MethodDexCacheType* new_methods = visitor(old_methods); - if (old_methods != new_methods) { - SetDexCacheResolvedMethods(new_methods, pointer_size); - } } template <ReadBarrierOption kReadBarrierOption, typename Visitor> diff --git a/runtime/art_method.h b/runtime/art_method.h index 64988f2528..2d677617d9 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -137,6 +137,10 @@ class ArtMethod FINAL { } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags)); } + static MemberOffset AccessFlagsOffset() { + return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_)); + } + // Approximate what kind of method call would be used for this method. InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_); @@ -356,26 +360,6 @@ class ArtMethod FINAL { dex_method_index_ = new_idx; } - ALWAYS_INLINE mirror::MethodDexCacheType* GetDexCacheResolvedMethods(PointerSize pointer_size) - REQUIRES_SHARED(Locks::mutator_lock_); - ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, - PointerSize pointer_size) - REQUIRES_SHARED(Locks::mutator_lock_); - - ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index, - ArtMethod* new_method, - PointerSize pointer_size) - REQUIRES_SHARED(Locks::mutator_lock_); - ALWAYS_INLINE void SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods, - PointerSize pointer_size) - REQUIRES_SHARED(Locks::mutator_lock_); - bool HasDexCacheResolvedMethods(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_); - bool HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size) - REQUIRES_SHARED(Locks::mutator_lock_); - bool HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache, - PointerSize pointer_size) - REQUIRES_SHARED(Locks::mutator_lock_); - // Lookup the Class* from the type index into this method's dex cache. ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); @@ -427,12 +411,6 @@ class ArtMethod FINAL { void UnregisterNative() REQUIRES_SHARED(Locks::mutator_lock_); - static MemberOffset DexCacheResolvedMethodsOffset(PointerSize pointer_size) { - return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( - PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*) - * static_cast<size_t>(pointer_size)); - } - static MemberOffset DataOffset(PointerSize pointer_size) { return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size)); @@ -686,8 +664,7 @@ class ArtMethod FINAL { // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation. // Does not use read barrier. template <typename Visitor> - ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor, - PointerSize pointer_size) + ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_); // Update entry points by passing them through the visitor. @@ -728,9 +705,6 @@ class ArtMethod FINAL { // Must be the last fields in the method. struct PtrSizedFields { - // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. - mirror::MethodDexCacheType* dex_cache_resolved_methods_; - // Depending on the method type, the data is // - native method: pointer to the JNI function registered to this method // or a function to resolve the JNI function, diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h index 439ecaf28e..d6f003027b 100644 --- a/runtime/class_linker-inl.h +++ b/runtime/class_linker-inl.h @@ -186,7 +186,8 @@ inline ArtMethod* ClassLinker::GetResolvedMethod(uint32_t method_idx, ArtMethod* // lookup in the context of the original method from where it steals the code. // However, we delay the GetInterfaceMethodIfProxy() until needed. DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor()); - ArtMethod* resolved_method = referrer->GetDexCacheResolvedMethod(method_idx, image_pointer_size_); + ArtMethod* resolved_method = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedMethod( + method_idx, image_pointer_size_); if (resolved_method == nullptr) { return nullptr; } @@ -226,7 +227,8 @@ inline ArtMethod* ClassLinker::ResolveMethod(Thread* self, // However, we delay the GetInterfaceMethodIfProxy() until needed. DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor()); Thread::PoisonObjectPointersIfDebug(); - ArtMethod* resolved_method = referrer->GetDexCacheResolvedMethod(method_idx, image_pointer_size_); + ArtMethod* resolved_method = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedMethod( + method_idx, image_pointer_size_); DCHECK(resolved_method == nullptr || !resolved_method->IsRuntimeMethod()); if (UNLIKELY(resolved_method == nullptr)) { referrer = referrer->GetInterfaceMethodIfProxy(image_pointer_size_); diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index 051c0c2938..1beb7837d4 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1115,35 +1115,6 @@ static bool FlattenPathClassLoader(ObjPtr<mirror::ClassLoader> class_loader, return true; } -class FixupArtMethodArrayVisitor : public ArtMethodVisitor { - public: - explicit FixupArtMethodArrayVisitor(const ImageHeader& header) : header_(header) {} - - virtual void Visit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) { - const bool is_copied = method->IsCopied(); - mirror::MethodDexCacheType* resolved_methods = - method->GetDexCacheResolvedMethods(kRuntimePointerSize); - if (resolved_methods != nullptr) { - bool in_image_space = false; - if (kIsDebugBuild || is_copied) { - in_image_space = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays).Contains( - reinterpret_cast<const uint8_t*>(resolved_methods) - header_.GetImageBegin()); - } - // Must be in image space for non-miranda method. - DCHECK(is_copied || in_image_space) - << resolved_methods << " is not in image starting at " - << reinterpret_cast<void*>(header_.GetImageBegin()); - if (!is_copied || in_image_space) { - method->SetDexCacheResolvedMethods(method->GetDexCache()->GetResolvedMethods(), - kRuntimePointerSize); - } - } - } - - private: - const ImageHeader& header_; -}; - class VerifyDeclaringClassVisitor : public ArtMethodVisitor { public: VerifyDeclaringClassVisitor() REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) @@ -1492,12 +1463,6 @@ bool AppImageClassLoadersAndDexCachesHelper::Update( FixupInternVisitor fixup_intern_visitor; bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_intern_visitor); } - if (*out_forward_dex_cache_array) { - ScopedTrace timing("Fixup ArtMethod dex cache arrays"); - FixupArtMethodArrayVisitor visitor(header); - header.VisitPackedArtMethods(&visitor, space->Begin(), kRuntimePointerSize); - Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(class_loader.Get()); - } if (kVerifyArtMethodDeclaringClasses) { ScopedTrace timing("Verify declaring classes"); ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_); @@ -3444,8 +3409,6 @@ void ClassLinker::LoadMethod(const DexFile& dex_file, dst->SetDeclaringClass(klass.Get()); dst->SetCodeItemOffset(it.GetMethodCodeItemOffset()); - dst->SetDexCacheResolvedMethods(klass->GetDexCache()->GetResolvedMethods(), image_pointer_size_); - uint32_t access_flags = it.GetMethodAccessFlags(); if (UNLIKELY(strcmp("finalize", method_name) == 0)) { @@ -4729,7 +4692,6 @@ void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) cons // The proxy method doesn't have its own dex cache or dex file and so it steals those of its // interface prototype. The exception to this are Constructors and the Class of the Proxy itself. - CHECK(prototype->HasSameDexCacheResolvedMethods(method, image_pointer_size_)); auto* np = method->GetInterfaceMethodIfProxy(image_pointer_size_); CHECK_EQ(prototype->GetDeclaringClass()->GetDexCache(), np->GetDexCache()); CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex()); diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index 5e9707c062..f887b8ed42 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -245,11 +245,6 @@ class ClassLinkerTest : public CommonRuntimeTest { EXPECT_TRUE(method->GetDeclaringClass() != nullptr); EXPECT_TRUE(method->GetName() != nullptr); EXPECT_TRUE(method->GetSignature() != Signature::NoSignature()); - - EXPECT_TRUE(method->HasDexCacheResolvedMethods(kRuntimePointerSize)); - EXPECT_TRUE(method->HasSameDexCacheResolvedMethods( - method->GetDeclaringClass()->GetDexCache()->GetResolvedMethods(), - kRuntimePointerSize)); } void AssertField(ObjPtr<mirror::Class> klass, ArtField* field) diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h index be3e4f811a..8253739427 100644 --- a/runtime/entrypoints/entrypoint_utils-inl.h +++ b/runtime/entrypoints/entrypoint_utils-inl.h @@ -83,7 +83,7 @@ inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method, ObjPtr<mirror::DexCache> dex_cache = caller->GetDexCache(); const DexFile* dex_file = dex_cache->GetDexFile(); const DexFile::MethodId& method_id = dex_file->GetMethodId(method_index); - ArtMethod* inlined_method = caller->GetDexCacheResolvedMethod(method_index, kRuntimePointerSize); + ArtMethod* inlined_method = dex_cache->GetResolvedMethod(method_index, kRuntimePointerSize); if (inlined_method != nullptr) { DCHECK(!inlined_method->IsRuntimeMethod()); return inlined_method; @@ -106,7 +106,7 @@ inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method, << dex_file->GetMethodSignature(method_id) << " declared. " << "This must be due to duplicate classes or playing wrongly with class loaders"; } - caller->SetDexCacheResolvedMethod(method_index, inlined_method, kRuntimePointerSize); + dex_cache->SetResolvedMethod(method_index, inlined_method, kRuntimePointerSize); return inlined_method; } diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 5f713265df..7b83f20450 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -1271,7 +1271,7 @@ extern "C" const void* artQuickResolutionTrampoline( // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares // about the name and signature. uint32_t update_dex_cache_method_index = called->GetDexMethodIndex(); - if (!called->HasSameDexCacheResolvedMethods(caller, kRuntimePointerSize)) { + if (called->GetDexFile() != caller->GetDexFile()) { // Calling from one dex file to another, need to compute the method index appropriate to // the caller's dex file. Since we get here only if the original called was a runtime // method, we've got the correct dex_file and a dex_method_idx from above. @@ -1283,12 +1283,16 @@ extern "C" const void* artQuickResolutionTrampoline( called->FindDexMethodIndexInOtherDexFile(*caller_dex_file, caller_method_name_and_sig_index); } - if ((update_dex_cache_method_index != DexFile::kDexNoIndex) && - (caller->GetDexCacheResolvedMethod( - update_dex_cache_method_index, kRuntimePointerSize) != called)) { - caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, - called, - kRuntimePointerSize); + if (update_dex_cache_method_index != DexFile::kDexNoIndex) { + // Note: We do not need the read barrier for the dex cache as the SetResolvedMethod() + // operates on native (non-moveable) data and constants (num_resolved_methods_). + ObjPtr<mirror::DexCache> caller_dex_cache = caller->GetDexCache<kWithoutReadBarrier>(); + if (caller_dex_cache->GetResolvedMethod( + update_dex_cache_method_index, kRuntimePointerSize) != called) { + caller_dex_cache->SetResolvedMethod(update_dex_cache_method_index, + called, + kRuntimePointerSize); + } } } else if (invoke_type == kStatic) { const auto called_dex_method_idx = called->GetDexMethodIndex(); diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc index 14e017abd9..1a48b46020 100644 --- a/runtime/gc/space/image_space.cc +++ b/runtime/gc/space/image_space.cc @@ -1128,7 +1128,7 @@ class ImageSpaceLoader { } } else { if (fixup_heap_objects_) { - method->UpdateObjectsForImageRelocation(ForwardObjectAdapter(this), pointer_size_); + method->UpdateObjectsForImageRelocation(ForwardObjectAdapter(this)); } method->UpdateEntrypoints<kWithoutReadBarrier>(ForwardCodeAdapter(this), pointer_size_); } diff --git a/runtime/generated/asm_support_gen.h b/runtime/generated/asm_support_gen.h index 314c45e117..071d1aedb7 100644 --- a/runtime/generated/asm_support_gen.h +++ b/runtime/generated/asm_support_gen.h @@ -48,6 +48,10 @@ DEFINE_CHECK_EQ(static_cast<int32_t>(THREAD_IS_GC_MARKING_OFFSET), (static_cast< DEFINE_CHECK_EQ(static_cast<int32_t>(THREAD_CARD_TABLE_OFFSET), (static_cast<int32_t>(art::Thread:: CardTableOffset<art::kRuntimePointerSize>().Int32Value()))) #define CODEITEM_INSNS_OFFSET 16 DEFINE_CHECK_EQ(static_cast<int32_t>(CODEITEM_INSNS_OFFSET), (static_cast<int32_t>(__builtin_offsetof(art::DexFile::CodeItem, insns_)))) +#define MIRROR_CLASS_DEX_CACHE_OFFSET 16 +DEFINE_CHECK_EQ(static_cast<int32_t>(MIRROR_CLASS_DEX_CACHE_OFFSET), (static_cast<int32_t>(art::mirror::Class:: DexCacheOffset().Int32Value()))) +#define MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET 48 +DEFINE_CHECK_EQ(static_cast<int32_t>(MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET), (static_cast<int32_t>(art::mirror::DexCache:: ResolvedMethodsOffset().Int32Value()))) #define MIRROR_OBJECT_CLASS_OFFSET 0 DEFINE_CHECK_EQ(static_cast<int32_t>(MIRROR_OBJECT_CLASS_OFFSET), (static_cast<int32_t>(art::mirror::Object:: ClassOffset().Int32Value()))) #define MIRROR_OBJECT_LOCK_WORD_OFFSET 4 @@ -60,20 +64,18 @@ DEFINE_CHECK_EQ(static_cast<uint32_t>(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), (stati DEFINE_CHECK_EQ(static_cast<uint32_t>(ACCESS_FLAGS_CLASS_IS_INTERFACE), (static_cast<uint32_t>((art::kAccInterface)))) #define ACCESS_FLAGS_CLASS_IS_FINALIZABLE_BIT 0x1f DEFINE_CHECK_EQ(static_cast<uint32_t>(ACCESS_FLAGS_CLASS_IS_FINALIZABLE_BIT), (static_cast<uint32_t>((art::MostSignificantBit(art::kAccClassIsFinalizable))))) -#define ART_METHOD_DEX_CACHE_METHODS_OFFSET_32 20 -DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_DEX_CACHE_METHODS_OFFSET_32), (static_cast<int32_t>(art::ArtMethod:: DexCacheResolvedMethodsOffset(art::PointerSize::k32).Int32Value()))) -#define ART_METHOD_DEX_CACHE_METHODS_OFFSET_64 24 -DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_DEX_CACHE_METHODS_OFFSET_64), (static_cast<int32_t>(art::ArtMethod:: DexCacheResolvedMethodsOffset(art::PointerSize::k64).Int32Value()))) -#define ART_METHOD_JNI_OFFSET_32 24 +#define ART_METHOD_JNI_OFFSET_32 20 DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_JNI_OFFSET_32), (static_cast<int32_t>(art::ArtMethod:: EntryPointFromJniOffset(art::PointerSize::k32).Int32Value()))) -#define ART_METHOD_JNI_OFFSET_64 32 +#define ART_METHOD_JNI_OFFSET_64 24 DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_JNI_OFFSET_64), (static_cast<int32_t>(art::ArtMethod:: EntryPointFromJniOffset(art::PointerSize::k64).Int32Value()))) -#define ART_METHOD_QUICK_CODE_OFFSET_32 28 +#define ART_METHOD_QUICK_CODE_OFFSET_32 24 DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_QUICK_CODE_OFFSET_32), (static_cast<int32_t>(art::ArtMethod:: EntryPointFromQuickCompiledCodeOffset(art::PointerSize::k32).Int32Value()))) -#define ART_METHOD_QUICK_CODE_OFFSET_64 40 +#define ART_METHOD_QUICK_CODE_OFFSET_64 32 DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_QUICK_CODE_OFFSET_64), (static_cast<int32_t>(art::ArtMethod:: EntryPointFromQuickCompiledCodeOffset(art::PointerSize::k64).Int32Value()))) #define ART_METHOD_DECLARING_CLASS_OFFSET 0 DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_DECLARING_CLASS_OFFSET), (static_cast<int32_t>(art::ArtMethod:: DeclaringClassOffset().Int32Value()))) +#define ART_METHOD_ACCESS_FLAGS_OFFSET 4 +DEFINE_CHECK_EQ(static_cast<int32_t>(ART_METHOD_ACCESS_FLAGS_OFFSET), (static_cast<int32_t>(art::ArtMethod:: AccessFlagsOffset().Int32Value()))) #define STRING_DEX_CACHE_ELEMENT_SIZE_SHIFT 3 DEFINE_CHECK_EQ(static_cast<int32_t>(STRING_DEX_CACHE_ELEMENT_SIZE_SHIFT), (static_cast<int32_t>(art::WhichPowerOf2(sizeof(art::mirror::StringDexCachePair))))) #define STRING_DEX_CACHE_SIZE_MINUS_ONE 1023 @@ -126,6 +128,10 @@ DEFINE_CHECK_EQ(static_cast<size_t>(OBJECT_ALIGNMENT_MASK), (static_cast<size_t> DEFINE_CHECK_EQ(static_cast<uint32_t>(OBJECT_ALIGNMENT_MASK_TOGGLED), (static_cast<uint32_t>(~static_cast<uint32_t>(art::kObjectAlignment - 1)))) #define OBJECT_ALIGNMENT_MASK_TOGGLED64 0xfffffffffffffff8 DEFINE_CHECK_EQ(static_cast<uint64_t>(OBJECT_ALIGNMENT_MASK_TOGGLED64), (static_cast<uint64_t>(~static_cast<uint64_t>(art::kObjectAlignment - 1)))) +#define ACC_OBSOLETE_METHOD 262144 +DEFINE_CHECK_EQ(static_cast<int32_t>(ACC_OBSOLETE_METHOD), (static_cast<int32_t>(art::kAccObsoleteMethod))) +#define ACC_OBSOLETE_METHOD_SHIFT 18 +DEFINE_CHECK_EQ(static_cast<int32_t>(ACC_OBSOLETE_METHOD_SHIFT), (static_cast<int32_t>(art::WhichPowerOf2(art::kAccObsoleteMethod)))) #define ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE 128 DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), (static_cast<int32_t>((art::gc::allocator::RosAlloc::kMaxThreadLocalBracketSize)))) #define ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT 3 diff --git a/runtime/image.cc b/runtime/image.cc index 950ac5dcbf..1f7e0f31b5 100644 --- a/runtime/image.cc +++ b/runtime/image.cc @@ -26,7 +26,7 @@ namespace art { const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' }; -const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '6', '\0' }; // Hash-based methods array. +const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '7', '\0' }; // Smaller ArtMethod. ImageHeader::ImageHeader(uint32_t image_begin, uint32_t image_size, |