Hash-based DexCache methods array.
Total boot*.art size for aosp_angler-userdebug:
- arm64:
- before: 11603968
- after: 10129408 (-1.4MiB, -12.7%)
- arm:
- before: 8626176
- after: 7888896 (-0.7MiB, -8.5%)
Test: m test-art-host-gtest
Test: testrunner.py --host
Test: Nexus 6P boots.
Test: testrunner.py --target
Test: Build aosp_mips64-eng
Bug: 30627598
Change-Id: I7f858605de5f074cbd7f0d9c4c072fbd44aee28f
diff --git a/compiler/dex/inline_method_analyser.cc b/compiler/dex/inline_method_analyser.cc
index 2572291..e5ff7fc 100644
--- a/compiler/dex/inline_method_analyser.cc
+++ b/compiler/dex/inline_method_analyser.cc
@@ -145,9 +145,8 @@
DCHECK_EQ(invoke_direct->VRegC_35c(),
method->GetCodeItem()->registers_size_ - method->GetCodeItem()->ins_size_);
uint32_t method_index = invoke_direct->VRegB_35c();
- PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
- ArtMethod* target_method =
- method->GetDexCache()->GetResolvedMethod(method_index, pointer_size);
+ ArtMethod* target_method = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
+ method_index, method->GetDexCache(), method->GetClassLoader());
if (kIsDebugBuild && target_method != nullptr) {
CHECK(!target_method->IsStatic());
CHECK(target_method->IsConstructor());
diff --git a/compiler/driver/compiler_driver_test.cc b/compiler/driver/compiler_driver_test.cc
index b4ad325..5d1d972 100644
--- a/compiler/driver/compiler_driver_test.cc
+++ b/compiler/driver/compiler_driver_test.cc
@@ -118,10 +118,12 @@
EXPECT_TRUE(type != nullptr) << "type_idx=" << i
<< " " << dex.GetTypeDescriptor(dex.GetTypeId(dex::TypeIndex(i)));
}
- EXPECT_EQ(dex.NumMethodIds(), dex_cache->NumResolvedMethods());
+ EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
+ || dex.NumMethodIds() == dex_cache->NumResolvedMethods());
auto* cl = Runtime::Current()->GetClassLinker();
auto pointer_size = cl->GetImagePointerSize();
for (size_t i = 0; i < dex_cache->NumResolvedMethods(); i++) {
+ // FIXME: This is outdated for hash-based method array.
ArtMethod* method = dex_cache->GetResolvedMethod(i, pointer_size);
EXPECT_TRUE(method != nullptr) << "method_idx=" << i
<< " " << dex.GetMethodDeclaringClassDescriptor(dex.GetMethodId(i))
@@ -133,6 +135,7 @@
EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields()
|| dex.NumFieldIds() == dex_cache->NumResolvedFields());
for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
+ // FIXME: This is outdated for hash-based field array.
ArtField* field = dex_cache->GetResolvedField(i, cl->GetImagePointerSize());
EXPECT_TRUE(field != nullptr) << "field_idx=" << i
<< " " << dex.GetFieldDeclaringClassDescriptor(dex.GetFieldId(i))
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index f92bf95..51730cf 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -1023,41 +1023,58 @@
Runtime* runtime = Runtime::Current();
ClassLinker* class_linker = runtime->GetClassLinker();
- ArtMethod* resolution_method = runtime->GetResolutionMethod();
const DexFile& dex_file = *dex_cache->GetDexFile();
// Prune methods.
- ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
- ArtMethod* method =
- mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
- DCHECK(method != nullptr) << "Expected resolution method instead of null method";
+ mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
+ dex::TypeIndex last_class_idx; // Initialized to invalid index.
+ ObjPtr<mirror::Class> last_class = nullptr;
+ for (size_t i = 0, num = dex_cache->GetDexFile()->NumMethodIds(); i != num; ++i) {
+ uint32_t slot_idx = dex_cache->MethodSlotIndex(i);
+ auto pair =
+ mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_);
+ uint32_t stored_index = pair.index;
+ ArtMethod* method = pair.object;
+ if (method != nullptr && i > stored_index) {
+ continue; // Already checked.
+ }
// Check if the referenced class is in the image. Note that we want to check the referenced
// class rather than the declaring class to preserve the semantics, i.e. using a MethodId
// results in resolving the referenced class and that can for example throw OOME.
- ObjPtr<mirror::Class> referencing_class = class_linker->LookupResolvedType(
- dex_file,
- dex_file.GetMethodId(i).class_idx_,
- dex_cache,
- class_loader);
- // Copied methods may be held live by a class which was not an image class but have a
- // declaring class which is an image class. Set it to the resolution method to be safe and
- // prevent dangling pointers.
- if (method->IsCopied() || !KeepClass(referencing_class)) {
- mirror::DexCache::SetElementPtrSize(resolved_methods,
- i,
- resolution_method,
- target_ptr_size_);
- } else if (kIsDebugBuild) {
- // Check that the class is still in the classes table.
- ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
- CHECK(class_linker->ClassInClassTable(referencing_class)) << "Class "
- << Class::PrettyClass(referencing_class) << " not in class linker table";
+ const DexFile::MethodId& method_id = dex_file.GetMethodId(i);
+ if (method_id.class_idx_ != last_class_idx) {
+ last_class_idx = method_id.class_idx_;
+ last_class = class_linker->LookupResolvedType(
+ dex_file, last_class_idx, dex_cache, class_loader);
+ if (last_class != nullptr && !KeepClass(last_class)) {
+ last_class = nullptr;
+ }
+ }
+ if (method == nullptr || i < stored_index) {
+ if (last_class != nullptr) {
+ const char* name = dex_file.StringDataByIdx(method_id.name_idx_);
+ Signature signature = dex_file.GetMethodSignature(method_id);
+ if (last_class->IsInterface()) {
+ method = last_class->FindInterfaceMethod(name, signature, target_ptr_size_);
+ } else {
+ method = last_class->FindClassMethod(name, signature, target_ptr_size_);
+ }
+ if (method != nullptr) {
+ // If the referenced class is in the image, the defining class must also be there.
+ DCHECK(KeepClass(method->GetDeclaringClass()));
+ dex_cache->SetResolvedMethod(i, method, target_ptr_size_);
+ }
+ }
+ } else {
+ DCHECK_EQ(i, stored_index);
+ if (last_class == nullptr) {
+ dex_cache->ClearResolvedMethod(stored_index, target_ptr_size_);
+ }
}
}
// Prune fields and make the contents of the field array deterministic.
mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
- dex::TypeIndex last_class_idx; // Initialized to invalid index.
- ObjPtr<mirror::Class> last_class = nullptr;
+ last_class_idx = dex::TypeIndex(); // Initialized to invalid index.
+ last_class = nullptr;
for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
@@ -2401,17 +2418,19 @@
orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
fixup_visitor);
}
- ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* orig_methods = orig_dex_cache->GetResolvedMethods();
if (orig_methods != nullptr) {
copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
NativeLocationInImage(orig_methods),
PointerSize::k64);
- ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
+ mirror::MethodDexCacheType* copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
- ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
+ mirror::MethodDexCachePair orig_pair =
+ mirror::DexCache::GetNativePairPtrSize(orig_methods, i, target_ptr_size_);
// NativeLocationInImage also handles runtime methods since these have relocation info.
- ArtMethod* copy = NativeLocationInImage(orig);
- mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
+ mirror::MethodDexCachePair copy_pair(NativeLocationInImage(orig_pair.object),
+ orig_pair.index);
+ mirror::DexCache::SetNativePairPtrSize(copy_methods, i, copy_pair, target_ptr_size_);
}
}
mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
@@ -2552,7 +2571,8 @@
CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
- ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
+ mirror::MethodDexCacheType* orig_resolved_methods =
+ orig->GetDexCacheResolvedMethods(target_ptr_size_);
copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
// OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc
index f8bb417..4d258af 100644
--- a/compiler/oat_writer.cc
+++ b/compiler/oat_writer.cc
@@ -1116,6 +1116,7 @@
const std::vector<const DexFile*>* dex_files)
: OatDexMethodVisitor(writer, offset),
pointer_size_(GetInstructionSetPointerSize(writer_->compiler_driver_->GetInstructionSet())),
+ class_loader_(writer->HasImage() ? writer->image_writer_->GetClassLoader() : nullptr),
dex_files_(dex_files),
class_linker_(Runtime::Current()->GetClassLinker()) {}
@@ -1131,10 +1132,7 @@
if (!IsImageClass()) {
return true;
}
- ScopedObjectAccessUnchecked soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
- Handle<mirror::DexCache> dex_cache = hs.NewHandle(
- class_linker_->FindDexCache(Thread::Current(), *dex_file));
+ ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(Thread::Current(), *dex_file);
const DexFile::ClassDef& class_def = dex_file->GetClassDef(class_def_index);
mirror::Class* klass = dex_cache->GetResolvedType(class_def.class_idx_);
if (klass != nullptr) {
@@ -1182,36 +1180,36 @@
++method_offsets_index_;
}
- // Unchecked as we hold mutator_lock_ on entry.
- ScopedObjectAccessUnchecked soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
- Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker_->FindDexCache(
- Thread::Current(), *dex_file_)));
+ Thread* self = Thread::Current();
+ ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(self, *dex_file_);
ArtMethod* method;
if (writer_->HasBootImage()) {
const InvokeType invoke_type = it.GetMethodInvokeType(
dex_file_->GetClassDef(class_def_index_));
+ // Unchecked as we hold mutator_lock_ on entry.
+ ScopedObjectAccessUnchecked soa(self);
+ StackHandleScope<1> hs(self);
method = class_linker_->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
*dex_file_,
it.GetMemberIndex(),
- dex_cache,
+ hs.NewHandle(dex_cache),
ScopedNullHandle<mirror::ClassLoader>(),
nullptr,
invoke_type);
if (method == nullptr) {
LOG(FATAL_WITHOUT_ABORT) << "Unexpected failure to resolve a method: "
<< dex_file_->PrettyMethod(it.GetMemberIndex(), true);
- soa.Self()->AssertPendingException();
- mirror::Throwable* exc = soa.Self()->GetException();
+ self->AssertPendingException();
+ mirror::Throwable* exc = self->GetException();
std::string dump = exc->Dump();
LOG(FATAL) << dump;
UNREACHABLE();
}
} else {
- // Should already have been resolved by the compiler, just peek into the dex cache.
+ // Should already have been resolved by the compiler.
// It may not be resolved if the class failed to verify, in this case, don't set the
- // entrypoint. This is not fatal since the dex cache will contain a resolution method.
- method = dex_cache->GetResolvedMethod(it.GetMemberIndex(), pointer_size_);
+ // entrypoint. This is not fatal since we shall use a resolution method.
+ method = class_linker_->LookupResolvedMethod(it.GetMemberIndex(), dex_cache, class_loader_);
}
if (method != nullptr &&
compiled_method != nullptr &&
@@ -1252,6 +1250,7 @@
private:
const PointerSize pointer_size_;
+ ObjPtr<mirror::ClassLoader> class_loader_;
const std::vector<const DexFile*>* dex_files_;
ClassLinker* const class_linker_;
std::vector<std::pair<ArtMethod*, ArtMethod*>> methods_to_process_;
@@ -1471,7 +1470,8 @@
ObjPtr<mirror::DexCache> dex_cache =
(dex_file_ == ref.dex_file) ? dex_cache_ : class_linker_->FindDexCache(
Thread::Current(), *ref.dex_file);
- ArtMethod* method = dex_cache->GetResolvedMethod(ref.dex_method_index, pointer_size_);
+ ArtMethod* method =
+ class_linker_->LookupResolvedMethod(ref.dex_method_index, dex_cache, class_loader_);
CHECK(method != nullptr);
return method;
}
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index ecbf52b..f172e16 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -525,7 +525,7 @@
// Use a null loader. We should probably use the compiling method's class loader,
// but then we would need to pass it to RTPVisitor just for this debug check. Since
// the method is from the String class, the null loader is good enough.
- Handle<mirror::ClassLoader> loader;
+ Handle<mirror::ClassLoader> loader(hs.NewHandle<mirror::ClassLoader>(nullptr));
ArtMethod* method = cl->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
dex_file, invoke->GetDexMethodIndex(), dex_cache, loader, nullptr, kDirect);
DCHECK(method != nullptr);
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index 113bdb5..9ca24cd 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -2542,7 +2542,6 @@
runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
}
}
- runtime_->GetClassLinker()->FixupDexCaches(runtime_->GetResolutionMethod());
// Initialize maps for unstarted runtime. This needs to be here, as running clinits needs this
// set up.
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index ae26e7d..7bd71e8 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -2233,16 +2233,15 @@
if (num_methods != 0u) {
os << "Methods (size=" << num_methods << "):\n";
ScopedIndentation indent2(&vios_);
- auto* resolved_methods = dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
for (size_t i = 0, length = dex_cache->NumResolvedMethods(); i < length; ++i) {
- auto* elem = mirror::DexCache::GetElementPtrSize(resolved_methods,
- i,
- image_pointer_size);
+ ArtMethod* elem = mirror::DexCache::GetNativePairPtrSize(
+ resolved_methods, i, image_pointer_size).object;
size_t run = 0;
for (size_t j = i + 1;
- j != length && elem == mirror::DexCache::GetElementPtrSize(resolved_methods,
- j,
- image_pointer_size);
+ j != length &&
+ elem == mirror::DexCache::GetNativePairPtrSize(
+ resolved_methods, j, image_pointer_size).object;
++j) {
++run;
}
@@ -2270,7 +2269,7 @@
ScopedIndentation indent2(&vios_);
auto* resolved_fields = dex_cache->GetResolvedFields();
for (size_t i = 0, length = dex_cache->NumResolvedFields(); i < length; ++i) {
- auto* elem = mirror::DexCache::GetNativePairPtrSize(
+ ArtField* elem = mirror::DexCache::GetNativePairPtrSize(
resolved_fields, i, image_pointer_size).object;
size_t run = 0;
for (size_t j = i + 1;
diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc
index a93969f..1ee2fbd 100644
--- a/patchoat/patchoat.cc
+++ b/patchoat/patchoat.cc
@@ -535,17 +535,18 @@
orig_dex_cache->FixupResolvedTypes(RelocatedCopyOf(orig_types),
RelocatedPointerVisitor(this));
}
- ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
- ArtMethod** relocated_methods = RelocatedAddressOfPointer(orig_methods);
+ mirror::MethodDexCacheType* orig_methods = orig_dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* relocated_methods = RelocatedAddressOfPointer(orig_methods);
copy_dex_cache->SetField64<false>(
mirror::DexCache::ResolvedMethodsOffset(),
static_cast<int64_t>(reinterpret_cast<uintptr_t>(relocated_methods)));
if (orig_methods != nullptr) {
- ArtMethod** copy_methods = RelocatedCopyOf(orig_methods);
+ mirror::MethodDexCacheType* copy_methods = RelocatedCopyOf(orig_methods);
for (size_t j = 0, num = orig_dex_cache->NumResolvedMethods(); j != num; ++j) {
- ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, j, pointer_size);
- ArtMethod* copy = RelocatedAddressOfPointer(orig);
- mirror::DexCache::SetElementPtrSize(copy_methods, j, copy, pointer_size);
+ mirror::MethodDexCachePair orig =
+ mirror::DexCache::GetNativePairPtrSize(orig_methods, j, pointer_size);
+ mirror::MethodDexCachePair copy(RelocatedAddressOfPointer(orig.object), orig.index);
+ mirror::DexCache::SetNativePairPtrSize(copy_methods, j, copy, pointer_size);
}
}
mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields();
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 0de5905..375768e 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -1585,31 +1585,98 @@
*
* Note that this stub writes to r0, r4, and r12.
*/
+ .extern artLookupResolvedMethod
ENTRY art_quick_imt_conflict_trampoline
- ldr r4, [sp, #0] // Load referrer
- ldr r4, [r4, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_32] // Load dex cache methods array
- ldr r12, [r4, r12, lsl #POINTER_SIZE_SHIFT] // Load interface method
- ldr r0, [r0, #ART_METHOD_JNI_OFFSET_32] // Load ImtConflictTable
- ldr r4, [r0] // Load first entry in ImtConflictTable.
+ push {r1-r2}
+ .cfi_adjust_cfa_offset (2 * 4)
+ .cfi_rel_offset r1, 0
+ .cfi_rel_offset r2, 4
+ ldr r4, [sp, #(2 * 4)] // Load referrer.
+ ubfx r1, r12, #0, #METHOD_DEX_CACHE_HASH_BITS // Calculate DexCache method slot index.
+ ldr r4, [r4, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_32] // Load dex cache methods array
+ add r4, r4, r1, lsl #(POINTER_SIZE_SHIFT + 1) // Load DexCache method slot address.
+ ldr r2, [r0, #ART_METHOD_JNI_OFFSET_32] // Load ImtConflictTable
+
+// FIXME: Configure the build to use the faster code when appropriate.
+// Currently we fall back to the slower version.
+#if HAS_ATOMIC_LDRD
+ ldrd r0, r1, [r4]
+#else
+ push {r3}
+ .cfi_adjust_cfa_offset 4
+ .cfi_rel_offset r3, 0
+.Limt_conflict_trampoline_retry_load:
+ ldrexd r0, r1, [r4]
+ strexd r3, r0, r1, [r4]
+ cmp r3, #0
+ bne .Limt_conflict_trampoline_retry_load
+ pop {r3}
+ .cfi_adjust_cfa_offset -4
+ .cfi_restore r3
+#endif
+
+ ldr r4, [r2] // Load first entry in ImtConflictTable.
+ cmp r1, r12 // Compare method index to see if we had a DexCache method hit.
+ bne .Limt_conflict_trampoline_dex_cache_miss
.Limt_table_iterate:
- cmp r4, r12
+ cmp r4, r0
// Branch if found. Benchmarks have shown doing a branch here is better.
- beq .Limt_table_found
+ beq .Limt_table_found
// If the entry is null, the interface method is not in the ImtConflictTable.
- cbz r4, .Lconflict_trampoline
+ cbz r4, .Lconflict_trampoline
// Iterate over the entries of the ImtConflictTable.
- ldr r4, [r0, #(2 * __SIZEOF_POINTER__)]!
+ ldr r4, [r2, #(2 * __SIZEOF_POINTER__)]!
b .Limt_table_iterate
.Limt_table_found:
// We successfully hit an entry in the table. Load the target method
// and jump to it.
- ldr r0, [r0, #__SIZEOF_POINTER__]
- ldr pc, [r0, #ART_METHOD_QUICK_CODE_OFFSET_32]
+ ldr r0, [r2, #__SIZEOF_POINTER__]
+ .cfi_remember_state
+ pop {r1-r2}
+ .cfi_adjust_cfa_offset -(2 * 4)
+ .cfi_restore r1
+ .cfi_restore r2
+ ldr pc, [r0, #ART_METHOD_QUICK_CODE_OFFSET_32]
+ .cfi_restore_state
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- mov r0, r12 // Load interface method
+ .cfi_remember_state
+ pop {r1-r2}
+ .cfi_adjust_cfa_offset -(2 * 4)
+ .cfi_restore r1
+ .cfi_restore r2
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+ .cfi_restore_state
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save ImtConflictTable (r2), remaining arg (r3), first entry (r4), return address (lr).
+ push {r2-r4, lr}
+ .cfi_adjust_cfa_offset (4 * 4)
+ .cfi_rel_offset r3, 4
+ .cfi_rel_offset lr, 12
+ // Save FPR args.
+ vpush {d0-d7}
+ .cfi_adjust_cfa_offset (8 * 8)
+
+ mov r0, ip // Pass method index.
+ ldr r1, [sp, #(8 * 8 + 6 * 4)] // Pass referrer.
+ bl artLookupResolvedMethod // (uint32_t method_index, ArtMethod* referrer)
+
+ // Restore FPR args.
+ vpop {d0-d7}
+ .cfi_adjust_cfa_offset -(8 * 8)
+ // Restore ImtConflictTable (r2), remaining arg (r3), first entry (r4), return address (lr).
+ pop {r2-r4, lr}
+ .cfi_adjust_cfa_offset -(4 * 4)
+ .cfi_restore r3
+ .cfi_restore lr
+
+ cmp r0, #0 // If the method wasn't resolved,
+ beq .Lconflict_trampoline // skip the lookup and go to artInvokeInterfaceTrampoline().
+ b .Limt_table_iterate
END art_quick_imt_conflict_trampoline
.extern artQuickResolutionTrampoline
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index e097a33..d15f5b8 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -2052,17 +2052,28 @@
* x0 is the conflict ArtMethod.
* xIP1 is a hidden argument that holds the target interface method's dex method index.
*
- * Note that this stub writes to xIP0, xIP1, and x0.
+ * Note that this stub writes to xIP0, xIP1, x13-x15, and x0.
*/
- .extern artInvokeInterfaceTrampoline
+ .extern artLookupResolvedMethod
ENTRY art_quick_imt_conflict_trampoline
ldr xIP0, [sp, #0] // Load referrer
+ ubfx x15, xIP1, #0, #METHOD_DEX_CACHE_HASH_BITS // Calculate DexCache method slot index.
ldr xIP0, [xIP0, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_64] // Load dex cache methods array
- ldr xIP0, [xIP0, xIP1, lsl #POINTER_SIZE_SHIFT] // Load interface method
+ add xIP0, xIP0, x15, lsl #(POINTER_SIZE_SHIFT + 1) // Load DexCache method slot address.
+
+ // Relaxed atomic load x14:x15 from the dex cache slot.
+.Limt_conflict_trampoline_retry_load:
+ ldxp x14, x15, [xIP0]
+ stxp w13, x14, x15, [xIP0]
+ cbnz w13, .Limt_conflict_trampoline_retry_load
+
+ cmp x15, xIP1 // Compare method index to see if we had a DexCache method hit.
+ bne .Limt_conflict_trampoline_dex_cache_miss
+.Limt_conflict_trampoline_have_interface_method:
ldr xIP1, [x0, #ART_METHOD_JNI_OFFSET_64] // Load ImtConflictTable
ldr x0, [xIP1] // Load first entry in ImtConflictTable.
.Limt_table_iterate:
- cmp x0, xIP0
+ cmp x0, x14
// Branch if found. Benchmarks have shown doing a branch here is better.
beq .Limt_table_found
// If the entry is null, the interface method is not in the ImtConflictTable.
@@ -2079,8 +2090,46 @@
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- mov x0, xIP0 // Load interface method
+ mov x0, x14 // Load interface method
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save GPR args and return address, allocate space for FPR args, align stack.
+ SAVE_TWO_REGS_INCREASE_FRAME x0, x1, (8 * 8 + 8 * 8 + 8 + 8)
+ SAVE_TWO_REGS x2, x3, 16
+ SAVE_TWO_REGS x4, x5, 32
+ SAVE_TWO_REGS x6, x7, 48
+ SAVE_REG xLR, (8 * 8 + 8 * 8 + 8)
+
+ // Save FPR args.
+ stp d0, d1, [sp, #64]
+ stp d2, d3, [sp, #80]
+ stp d4, d5, [sp, #96]
+ stp d6, d7, [sp, #112]
+
+ mov x0, xIP1 // Pass method index.
+ ldr x1, [sp, #(8 * 8 + 8 * 8 + 8 + 8)] // Pass referrer.
+ bl artLookupResolvedMethod // (uint32_t method_index, ArtMethod* referrer)
+ mov x14, x0 // Move the interface method to x14 where the loop above expects it.
+
+ // Restore FPR args.
+ ldp d0, d1, [sp, #64]
+ ldp d2, d3, [sp, #80]
+ ldp d4, d5, [sp, #96]
+ ldp d6, d7, [sp, #112]
+
+ // Restore GPR args and return address.
+ RESTORE_REG xLR, (8 * 8 + 8 * 8 + 8)
+ RESTORE_TWO_REGS x2, x3, 16
+ RESTORE_TWO_REGS x4, x5, 32
+ RESTORE_TWO_REGS x6, x7, 48
+ RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, (8 * 8 + 8 * 8 + 8 + 8)
+
+ // If the method wasn't resolved, skip the lookup and go to artInvokeInterfaceTrampoline().
+ cbz x14, .Lconflict_trampoline
+ b .Limt_conflict_trampoline_have_interface_method
END art_quick_imt_conflict_trampoline
ENTRY art_quick_resolution_trampoline
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 00e3d67..e5a1e3f 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -2066,6 +2066,10 @@
* Note that this stub writes to a0, t7 and t8.
*/
ENTRY art_quick_imt_conflict_trampoline
+// FIXME: The DexCache method array has been changed to hash-based cache with eviction.
+// We need a relaxed atomic load of a 64-bit location to try and load the method
+// and call artQuickResolutionTrampoline() if the index does not match.
+#if 0
lw $t8, 0($sp) # Load referrer.
lw $t8, ART_METHOD_DEX_CACHE_METHODS_OFFSET_32($t8) # Load dex cache methods array.
sll $t7, $t7, POINTER_SIZE_SHIFT # Calculate offset.
@@ -2095,6 +2099,9 @@
.Lconflict_trampoline:
# Call the runtime stub to populate the ImtConflictTable and jump to the resolved method.
move $a0, $t7 # Load interface method.
+#else
+ move $a0, $zero
+#endif
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
END art_quick_imt_conflict_trampoline
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index d427fe3..6f5f82a 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1989,6 +1989,10 @@
* Mote that this stub writes to a0, t0 and t1.
*/
ENTRY art_quick_imt_conflict_trampoline
+// FIXME: The DexCache method array has been changed to hash-based cache with eviction.
+// We need a relaxed atomic load of a 128-bit location to try and load the method
+// and call artQuickResolutionTrampoline() if the index does not match.
+#if 0
ld $t1, 0($sp) # Load referrer.
ld $t1, ART_METHOD_DEX_CACHE_METHODS_OFFSET_64($t1) # Load dex cache methods array.
dsll $t0, $t0, POINTER_SIZE_SHIFT # Calculate offset.
@@ -2017,6 +2021,9 @@
.Lconflict_trampoline:
# Call the runtime stub to populate the ImtConflictTable and jump to the resolved method.
move $a0, $t0 # Load interface method.
+#else
+ move $a0, $zero
+#endif
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
END art_quick_imt_conflict_trampoline
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 031b36b..48d2de9 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1780,35 +1780,90 @@
*/
DEFINE_FUNCTION art_quick_imt_conflict_trampoline
PUSH EDI
- movl 8(%esp), %edi // Load referrer
- movl ART_METHOD_DEX_CACHE_METHODS_OFFSET_32(%edi), %edi // Load dex cache methods array
+ PUSH ESI
+ PUSH EDX
+ movl 16(%esp), %edi // Load referrer.
+ movl ART_METHOD_DEX_CACHE_METHODS_OFFSET_32(%edi), %edi // Load dex cache methods array.
pushl ART_METHOD_JNI_OFFSET_32(%eax) // Push ImtConflictTable.
CFI_ADJUST_CFA_OFFSET(4)
- movd %xmm7, %eax // get target method index stored in xmm7
- movl 0(%edi, %eax, __SIZEOF_POINTER__), %edi // Load interface method
- popl %eax // Pop ImtConflictTable.
+ movd %xmm7, %eax // Get target method index stored in xmm7.
+ movl %eax, %esi // Remember method index in ESI.
+ andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
+ leal 0(%edi, %eax, 2 * __SIZEOF_POINTER__), %edi // Load DexCache method slot address.
+ mov %ecx, %edx // Make EDX:EAX == ECX:EBX so that LOCK CMPXCHG8B makes no changes.
+ mov %ebx, %eax // (The actual value does not matter.)
+ lock cmpxchg8b (%edi) // Relaxed atomic load EDX:EAX from the dex cache slot.
+ popl %edi // Pop ImtConflictTable.
CFI_ADJUST_CFA_OFFSET(-4)
+ cmp %edx, %esi // Compare method index to see if we had a DexCache method hit.
+ jne .Limt_conflict_trampoline_dex_cache_miss
.Limt_table_iterate:
- cmpl %edi, 0(%eax)
+ cmpl %eax, 0(%edi)
jne .Limt_table_next_entry
// We successfully hit an entry in the table. Load the target method
// and jump to it.
+ movl __SIZEOF_POINTER__(%edi), %eax
+ CFI_REMEMBER_STATE
+ POP EDX
+ POP ESI
POP EDI
- movl __SIZEOF_POINTER__(%eax), %eax
jmp *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
+ CFI_RESTORE_STATE
.Limt_table_next_entry:
// If the entry is null, the interface method is not in the ImtConflictTable.
- cmpl LITERAL(0), 0(%eax)
+ cmpl LITERAL(0), 0(%edi)
jz .Lconflict_trampoline
// Iterate over the entries of the ImtConflictTable.
- addl LITERAL(2 * __SIZEOF_POINTER__), %eax
+ addl LITERAL(2 * __SIZEOF_POINTER__), %edi
jmp .Limt_table_iterate
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- movl %edi, %eax // Load interface method
+ CFI_REMEMBER_STATE
+ POP EDX
+ POP ESI
POP EDI
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+ CFI_RESTORE_STATE
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save core register args; EDX is already saved.
+ PUSH ebx
+ PUSH ecx
+
+ // Save FPR args.
+ subl MACRO_LITERAL(32), %esp
+ CFI_ADJUST_CFA_OFFSET(32)
+ movsd %xmm0, 0(%esp)
+ movsd %xmm1, 8(%esp)
+ movsd %xmm2, 16(%esp)
+ movsd %xmm3, 24(%esp)
+
+ pushl 32+8+16(%esp) // Pass referrer.
+ CFI_ADJUST_CFA_OFFSET(4)
+ pushl %esi // Pass method index.
+ CFI_ADJUST_CFA_OFFSET(4)
+ call SYMBOL(artLookupResolvedMethod) // (uint32_t method_index, ArtMethod* referrer)
+ addl LITERAL(8), %esp // Pop arguments.
+ CFI_ADJUST_CFA_OFFSET(-8)
+
+ // Restore FPR args.
+ movsd 0(%esp), %xmm0
+ movsd 8(%esp), %xmm1
+ movsd 16(%esp), %xmm2
+ movsd 24(%esp), %xmm3
+ addl MACRO_LITERAL(32), %esp
+ CFI_ADJUST_CFA_OFFSET(-32)
+
+ // Restore core register args.
+ POP ecx
+ POP ebx
+
+ cmp LITERAL(0), %eax // If the method wasn't resolved,
+ je .Lconflict_trampoline // skip the lookup and go to artInvokeInterfaceTrampoline().
+ jmp .Limt_table_iterate
END_FUNCTION art_quick_imt_conflict_trampoline
DEFINE_FUNCTION art_quick_resolution_trampoline
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index ad06873..0a9199e 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1641,17 +1641,29 @@
int3
int3
#else
- movq __SIZEOF_POINTER__(%rsp), %r10 // Load referrer
- movq ART_METHOD_DEX_CACHE_METHODS_OFFSET_64(%r10), %r10 // Load dex cache methods array
- movq 0(%r10, %rax, __SIZEOF_POINTER__), %r10 // Load interface method
+ movq __SIZEOF_POINTER__(%rsp), %r10 // Load referrer.
+ movq ART_METHOD_DEX_CACHE_METHODS_OFFSET_64(%r10), %r10 // Load dex cache methods array.
+ mov %eax, %r11d // Remember method index in R11.
+ andl LITERAL(METHOD_DEX_CACHE_SIZE_MINUS_ONE), %eax // Calculate DexCache method slot index.
+ shll LITERAL(1), %eax // Multiply by 2 as entries have size 2 * __SIZEOF_POINTER__.
+ leaq 0(%r10, %rax, __SIZEOF_POINTER__), %r10 // Load DexCache method slot address.
+ PUSH rdx // Preserve RDX as we need to clobber it by LOCK CMPXCHG16B.
+ mov %rcx, %rdx // Make RDX:RAX == RCX:RBX so that LOCK CMPXCHG16B makes no changes.
+ mov %rbx, %rax // (The actual value does not matter.)
+ lock cmpxchg16b (%r10) // Relaxed atomic load RDX:RAX from the dex cache slot.
movq ART_METHOD_JNI_OFFSET_64(%rdi), %rdi // Load ImtConflictTable
+ cmp %rdx, %r11 // Compare method index to see if we had a DexCache method hit.
+ jne .Limt_conflict_trampoline_dex_cache_miss
.Limt_table_iterate:
- cmpq %r10, 0(%rdi)
+ cmpq %rax, 0(%rdi)
jne .Limt_table_next_entry
// We successfully hit an entry in the table. Load the target method
// and jump to it.
movq __SIZEOF_POINTER__(%rdi), %rdi
+ CFI_REMEMBER_STATE
+ POP rdx
jmp *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi)
+ CFI_RESTORE_STATE
.Limt_table_next_entry:
// If the entry is null, the interface method is not in the ImtConflictTable.
cmpq LITERAL(0), 0(%rdi)
@@ -1662,8 +1674,66 @@
.Lconflict_trampoline:
// Call the runtime stub to populate the ImtConflictTable and jump to the
// resolved method.
- movq %r10, %rdi // Load interface method
+ CFI_REMEMBER_STATE
+ POP rdx
+ movq %rax, %rdi // Load interface method
INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
+ CFI_RESTORE_STATE
+.Limt_conflict_trampoline_dex_cache_miss:
+ // We're not creating a proper runtime method frame here,
+ // artLookupResolvedMethod() is not allowed to walk the stack.
+
+ // Save GPR args and ImtConflictTable; RDX is already saved.
+ PUSH r9 // Quick arg 5.
+ PUSH r8 // Quick arg 4.
+ PUSH rsi // Quick arg 1.
+ PUSH rcx // Quick arg 3.
+ PUSH rdi // ImtConflictTable
+ // Save FPR args and callee-saves, align stack to 16B.
+ subq MACRO_LITERAL(12 * 8 + 8), %rsp
+ CFI_ADJUST_CFA_OFFSET(12 * 8 + 8)
+ movq %xmm0, 0(%rsp)
+ movq %xmm1, 8(%rsp)
+ movq %xmm2, 16(%rsp)
+ movq %xmm3, 24(%rsp)
+ movq %xmm4, 32(%rsp)
+ movq %xmm5, 40(%rsp)
+ movq %xmm6, 48(%rsp)
+ movq %xmm7, 56(%rsp)
+ movq %xmm12, 64(%rsp) // XMM12-15 are callee-save in ART compiled code ABI
+ movq %xmm13, 72(%rsp) // but caller-save in native ABI.
+ movq %xmm14, 80(%rsp)
+ movq %xmm15, 88(%rsp)
+
+ movq %r11, %rdi // Pass method index.
+ movq 12 * 8 + 8 + 6 * 8 + 8(%rsp), %rsi // Pass referrer.
+ call SYMBOL(artLookupResolvedMethod) // (uint32_t method_index, ArtMethod* referrer)
+
+ // Restore FPRs.
+ movq 0(%rsp), %xmm0
+ movq 8(%rsp), %xmm1
+ movq 16(%rsp), %xmm2
+ movq 24(%rsp), %xmm3
+ movq 32(%rsp), %xmm4
+ movq 40(%rsp), %xmm5
+ movq 48(%rsp), %xmm6
+ movq 56(%rsp), %xmm7
+ movq 64(%rsp), %xmm12
+ movq 72(%rsp), %xmm13
+ movq 80(%rsp), %xmm14
+ movq 88(%rsp), %xmm15
+ addq MACRO_LITERAL(12 * 8 + 8), %rsp
+ CFI_ADJUST_CFA_OFFSET(-(12 * 8 + 8))
+ // Restore ImtConflictTable and GPR args.
+ POP rdi
+ POP rcx
+ POP rsi
+ POP r8
+ POP r9
+
+ cmp LITERAL(0), %rax // If the method wasn't resolved,
+ je .Lconflict_trampoline // skip the lookup and go to artInvokeInterfaceTrampoline().
+ jmp .Limt_table_iterate
#endif // __APPLE__
END_FUNCTION art_quick_imt_conflict_trampoline
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 40d7e5c..4300544 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -102,20 +102,21 @@
return GetDexMethodIndexUnchecked();
}
-inline ArtMethod** ArtMethod::GetDexCacheResolvedMethods(PointerSize pointer_size) {
- return GetNativePointer<ArtMethod**>(DexCacheResolvedMethodsOffset(pointer_size),
- pointer_size);
+inline mirror::MethodDexCacheType* ArtMethod::GetDexCacheResolvedMethods(PointerSize pointer_size) {
+ return GetNativePointer<mirror::MethodDexCacheType*>(DexCacheResolvedMethodsOffset(pointer_size),
+ pointer_size);
}
inline ArtMethod* ArtMethod::GetDexCacheResolvedMethod(uint16_t method_index,
PointerSize pointer_size) {
// NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here
// without accessing the DexCache and we don't want to do that in release build.
- DCHECK_LT(method_index,
- GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
- ArtMethod* method = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
- method_index,
- pointer_size);
+ DCHECK_LT(method_index, GetInterfaceMethodIfProxy(pointer_size)->GetDexFile()->NumMethodIds());
+ uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize;
+ DCHECK_LT(slot_idx, GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
+ mirror::MethodDexCachePair pair = mirror::DexCache::GetNativePairPtrSize(
+ GetDexCacheResolvedMethods(pointer_size), slot_idx, pointer_size);
+ ArtMethod* method = pair.GetObjectForIndex(method_index);
if (LIKELY(method != nullptr)) {
auto* declaring_class = method->GetDeclaringClass();
if (LIKELY(declaring_class == nullptr || !declaring_class->IsErroneous())) {
@@ -130,29 +131,29 @@
PointerSize pointer_size) {
// NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here
// without accessing the DexCache and we don't want to do that in release build.
- DCHECK_LT(method_index,
- GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
+ DCHECK_LT(method_index, GetInterfaceMethodIfProxy(pointer_size)->GetDexFile()->NumMethodIds());
DCHECK(new_method == nullptr || new_method->GetDeclaringClass() != nullptr);
- mirror::DexCache::SetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
- method_index,
- new_method,
- pointer_size);
+ uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize;
+ DCHECK_LT(slot_idx, GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
+ mirror::MethodDexCachePair pair(new_method, method_index);
+ mirror::DexCache::SetNativePairPtrSize(
+ GetDexCacheResolvedMethods(pointer_size), slot_idx, pair, pointer_size);
}
inline bool ArtMethod::HasDexCacheResolvedMethods(PointerSize pointer_size) {
return GetDexCacheResolvedMethods(pointer_size) != nullptr;
}
-inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod** other_cache,
- PointerSize pointer_size) {
- return GetDexCacheResolvedMethods(pointer_size) == other_cache;
-}
-
inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size) {
return GetDexCacheResolvedMethods(pointer_size) ==
other->GetDexCacheResolvedMethods(pointer_size);
}
+inline bool ArtMethod::HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache,
+ PointerSize pointer_size) {
+ return GetDexCacheResolvedMethods(pointer_size) == other_cache;
+}
+
inline mirror::Class* ArtMethod::GetClassFromTypeIndex(dex::TypeIndex type_idx, bool resolve) {
// TODO: Refactor this function into two functions, Resolve...() and Lookup...()
// so that we can properly annotate it with no-suspension possible / suspension possible.
@@ -381,17 +382,21 @@
if (LIKELY(!IsProxyMethod())) {
return this;
}
- ArtMethod* interface_method = mirror::DexCache::GetElementPtrSize(
- GetDexCacheResolvedMethods(pointer_size),
- GetDexMethodIndex(),
- pointer_size);
- DCHECK(interface_method != nullptr);
- DCHECK_EQ(interface_method,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
+ uint32_t method_index = GetDexMethodIndex();
+ uint32_t slot_idx = method_index % mirror::DexCache::kDexCacheMethodCacheSize;
+ mirror::MethodDexCachePair pair = mirror::DexCache::GetNativePairPtrSize(
+ GetDexCacheResolvedMethods(pointer_size), slot_idx, pointer_size);
+ ArtMethod* interface_method = pair.GetObjectForIndex(method_index);
+ if (LIKELY(interface_method != nullptr)) {
+ DCHECK_EQ(interface_method, Runtime::Current()->GetClassLinker()->FindMethodForProxy(this));
+ } else {
+ interface_method = Runtime::Current()->GetClassLinker()->FindMethodForProxy(this);
+ DCHECK(interface_method != nullptr);
+ }
return interface_method;
}
-inline void ArtMethod::SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods,
+inline void ArtMethod::SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods,
PointerSize pointer_size) {
SetNativePointer(DexCacheResolvedMethodsOffset(pointer_size),
new_dex_cache_methods,
@@ -462,14 +467,8 @@
if (UNLIKELY(klass->IsProxyClass())) {
// For normal methods, dex cache shortcuts will be visited through the declaring class.
// However, for proxies we need to keep the interface method alive, so we visit its roots.
- ArtMethod* interface_method = mirror::DexCache::GetElementPtrSize(
- GetDexCacheResolvedMethods(pointer_size),
- GetDexMethodIndex(),
- pointer_size);
+ ArtMethod* interface_method = GetInterfaceMethodIfProxy(pointer_size);
DCHECK(interface_method != nullptr);
- DCHECK_EQ(interface_method,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy<kReadBarrierOption>(
- klass, this));
interface_method->VisitRoots(visitor, pointer_size);
}
}
@@ -483,8 +482,8 @@
if (old_class != new_class) {
SetDeclaringClass(new_class);
}
- ArtMethod** old_methods = GetDexCacheResolvedMethods(pointer_size);
- ArtMethod** new_methods = visitor(old_methods);
+ mirror::MethodDexCacheType* old_methods = GetDexCacheResolvedMethods(pointer_size);
+ mirror::MethodDexCacheType* new_methods = visitor(old_methods);
if (old_methods != new_methods) {
SetDexCacheResolvedMethods(new_methods, pointer_size);
}
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index ef9c457..d8984e8 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -216,11 +216,8 @@
} else {
// Method didn't override superclass method so search interfaces
if (IsProxyMethod()) {
- result = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
- GetDexMethodIndex(),
- pointer_size);
- CHECK_EQ(result,
- Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this));
+ result = GetInterfaceMethodIfProxy(pointer_size);
+ DCHECK(result != nullptr);
} else {
mirror::IfTable* iftable = GetDeclaringClass()->GetIfTable();
for (size_t i = 0; i < iftable->Count() && result == nullptr; i++) {
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 4b3e8ef..511ac83 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -53,6 +53,10 @@
template <typename MirrorType> class ObjectArray;
class PointerArray;
class String;
+
+template <typename T> struct NativeDexCachePair;
+using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+using MethodDexCacheType = std::atomic<MethodDexCachePair>;
} // namespace mirror
class ArtMethod FINAL {
@@ -352,7 +356,7 @@
dex_method_index_ = new_idx;
}
- ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(PointerSize pointer_size)
+ ALWAYS_INLINE mirror::MethodDexCacheType* GetDexCacheResolvedMethods(PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index,
PointerSize pointer_size)
@@ -362,13 +366,14 @@
ArtMethod* new_method,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods,
+ ALWAYS_INLINE void SetDexCacheResolvedMethods(mirror::MethodDexCacheType* new_dex_cache_methods,
PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
bool HasDexCacheResolvedMethods(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
bool HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
- bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, PointerSize pointer_size)
+ bool HasSameDexCacheResolvedMethods(mirror::MethodDexCacheType* other_cache,
+ PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
// Get the Class* from the type index into this method's dex cache.
@@ -714,7 +719,7 @@
// Must be the last fields in the method.
struct PtrSizedFields {
// Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
- ArtMethod** dex_cache_resolved_methods_;
+ mirror::MethodDexCacheType* dex_cache_resolved_methods_;
// Pointer to JNI function registered to this method, or a function to resolve the JNI function,
// or the profiling data for non-native methods, or an ImtConflictTable, or the
diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h
index d29db15..9a73697 100644
--- a/runtime/class_linker-inl.h
+++ b/runtime/class_linker-inl.h
@@ -156,6 +156,29 @@
});
}
+inline ArtMethod* ClassLinker::LookupResolvedMethod(uint32_t method_idx,
+ ObjPtr<mirror::DexCache> dex_cache,
+ ObjPtr<mirror::ClassLoader> class_loader) {
+ PointerSize pointer_size = image_pointer_size_;
+ ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, pointer_size);
+ if (resolved == nullptr) {
+ const DexFile& dex_file = *dex_cache->GetDexFile();
+ const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
+ ObjPtr<mirror::Class> klass = LookupResolvedType(method_id.class_idx_, dex_cache, class_loader);
+ if (klass != nullptr) {
+ if (klass->IsInterface()) {
+ resolved = klass->FindInterfaceMethod(dex_cache, method_idx, pointer_size);
+ } else {
+ resolved = klass->FindClassMethod(dex_cache, method_idx, pointer_size);
+ }
+ if (resolved != nullptr) {
+ dex_cache->SetResolvedMethod(method_idx, resolved, pointer_size);
+ }
+ }
+ }
+ return resolved;
+}
+
template <InvokeType type, ClassLinker::ResolveMode kResolveMode>
inline ArtMethod* ClassLinker::GetResolvedMethod(uint32_t method_idx, ArtMethod* referrer) {
DCHECK(referrer != nullptr);
@@ -164,9 +187,10 @@
// However, we delay the GetInterfaceMethodIfProxy() until needed.
DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor());
ArtMethod* resolved_method = referrer->GetDexCacheResolvedMethod(method_idx, image_pointer_size_);
- if (resolved_method == nullptr || resolved_method->IsRuntimeMethod()) {
+ if (resolved_method == nullptr) {
return nullptr;
}
+ DCHECK(!resolved_method->IsRuntimeMethod());
if (kResolveMode == ResolveMode::kCheckICCEAndIAE) {
referrer = referrer->GetInterfaceMethodIfProxy(image_pointer_size_);
// Check if the invoke type matches the class type.
@@ -203,7 +227,8 @@
DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor());
Thread::PoisonObjectPointersIfDebug();
ArtMethod* resolved_method = referrer->GetDexCacheResolvedMethod(method_idx, image_pointer_size_);
- if (UNLIKELY(resolved_method == nullptr || resolved_method->IsRuntimeMethod())) {
+ DCHECK(resolved_method == nullptr || !resolved_method->IsRuntimeMethod());
+ if (UNLIKELY(resolved_method == nullptr)) {
referrer = referrer->GetInterfaceMethodIfProxy(image_pointer_size_);
ObjPtr<mirror::Class> declaring_class = referrer->GetDeclaringClass();
StackHandleScope<2> hs(self);
@@ -287,35 +312,6 @@
return klass.Ptr();
}
-template<ReadBarrierOption kReadBarrierOption>
-ArtMethod* ClassLinker::FindMethodForProxy(ObjPtr<mirror::Class> proxy_class,
- ArtMethod* proxy_method) {
- DCHECK(proxy_class->IsProxyClass());
- DCHECK(proxy_method->IsProxyMethod());
- {
- Thread* const self = Thread::Current();
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- // Locate the dex cache of the original interface/Object
- for (const DexCacheData& data : dex_caches_) {
- if (!self->IsJWeakCleared(data.weak_root) &&
- proxy_method->HasSameDexCacheResolvedMethods(data.resolved_methods,
- image_pointer_size_)) {
- ObjPtr<mirror::DexCache> dex_cache =
- ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
- if (dex_cache != nullptr) {
- ArtMethod* resolved_method = dex_cache->GetResolvedMethod(
- proxy_method->GetDexMethodIndex(), image_pointer_size_);
- CHECK(resolved_method != nullptr);
- return resolved_method;
- }
- }
- }
- }
- LOG(FATAL) << "Didn't find dex cache for " << proxy_class->PrettyClass() << " "
- << proxy_method->PrettyMethod();
- UNREACHABLE();
-}
-
} // namespace art
#endif // ART_RUNTIME_CLASS_LINKER_INL_H_
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 07a40c4..0e0d73b 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1113,7 +1113,8 @@
virtual void Visit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
const bool is_copied = method->IsCopied();
- ArtMethod** resolved_methods = method->GetDexCacheResolvedMethods(kRuntimePointerSize);
+ mirror::MethodDexCacheType* resolved_methods =
+ method->GetDexCacheResolvedMethods(kRuntimePointerSize);
if (resolved_methods != nullptr) {
bool in_image_space = false;
if (kIsDebugBuild || is_copied) {
@@ -1283,6 +1284,25 @@
}
}
+template <typename T>
+static void CopyNativeDexCachePairs(std::atomic<mirror::NativeDexCachePair<T>>* src,
+ size_t count,
+ std::atomic<mirror::NativeDexCachePair<T>>* dst,
+ PointerSize pointer_size) {
+ DCHECK_NE(count, 0u);
+ DCHECK(mirror::DexCache::GetNativePairPtrSize(src, 0, pointer_size).object != nullptr ||
+ mirror::DexCache::GetNativePairPtrSize(src, 0, pointer_size).index != 0u);
+ for (size_t i = 0; i < count; ++i) {
+ DCHECK_EQ(mirror::DexCache::GetNativePairPtrSize(dst, i, pointer_size).index, 0u);
+ DCHECK(mirror::DexCache::GetNativePairPtrSize(dst, i, pointer_size).object == nullptr);
+ mirror::NativeDexCachePair<T> source =
+ mirror::DexCache::GetNativePairPtrSize(src, i, pointer_size);
+ if (source.index != 0u || source.object != nullptr) {
+ mirror::DexCache::SetNativePairPtrSize(dst, i, source, pointer_size);
+ }
+ }
+}
+
// new_class_set is the set of classes that were read from the class table section in the image.
// If there was no class table section, it is null.
// Note: using a class here to avoid having to make ClassLinker internals public.
@@ -1362,7 +1382,10 @@
if (dex_file->NumTypeIds() < num_types) {
num_types = dex_file->NumTypeIds();
}
- const size_t num_methods = dex_file->NumMethodIds();
+ size_t num_methods = mirror::DexCache::kDexCacheMethodCacheSize;
+ if (dex_file->NumMethodIds() < num_methods) {
+ num_methods = dex_file->NumMethodIds();
+ }
size_t num_fields = mirror::DexCache::kDexCacheFieldCacheSize;
if (dex_file->NumFieldIds() < num_fields) {
num_fields = dex_file->NumFieldIds();
@@ -1395,37 +1418,18 @@
dex_cache->SetResolvedTypes(types);
}
if (num_methods != 0u) {
- ArtMethod** const methods = reinterpret_cast<ArtMethod**>(
- raw_arrays + layout.MethodsOffset());
- ArtMethod** const image_resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t j = 0; kIsDebugBuild && j < num_methods; ++j) {
- DCHECK(methods[j] == nullptr);
- }
- CopyNonNull(image_resolved_methods,
- num_methods,
- methods,
- [] (const ArtMethod* method) {
- return method == nullptr;
- });
+ mirror::MethodDexCacheType* const image_resolved_methods =
+ dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* const methods =
+ reinterpret_cast<mirror::MethodDexCacheType*>(raw_arrays + layout.MethodsOffset());
+ CopyNativeDexCachePairs(image_resolved_methods, num_methods, methods, image_pointer_size);
dex_cache->SetResolvedMethods(methods);
}
if (num_fields != 0u) {
mirror::FieldDexCacheType* const image_resolved_fields = dex_cache->GetResolvedFields();
mirror::FieldDexCacheType* const fields =
reinterpret_cast<mirror::FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
- for (size_t j = 0; j < num_fields; ++j) {
- DCHECK_EQ(mirror::DexCache::GetNativePairPtrSize(fields, j, image_pointer_size).index,
- 0u);
- DCHECK(mirror::DexCache::GetNativePairPtrSize(fields, j, image_pointer_size).object ==
- nullptr);
- mirror::DexCache::SetNativePairPtrSize(
- fields,
- j,
- mirror::DexCache::GetNativePairPtrSize(image_resolved_fields,
- j,
- image_pointer_size),
- image_pointer_size);
- }
+ CopyNativeDexCachePairs(image_resolved_fields, num_fields, fields, image_pointer_size);
dex_cache->SetResolvedFields(fields);
}
if (num_method_types != 0u) {
@@ -1662,13 +1666,13 @@
heap->VisitObjects(visitor);
}
- static void CheckPointerArray(gc::Heap* heap,
- ClassLinker* class_linker,
- ArtMethod** arr,
- size_t size)
+ static void CheckArtMethodDexCacheArray(gc::Heap* heap,
+ ClassLinker* class_linker,
+ mirror::MethodDexCacheType* arr,
+ size_t size)
REQUIRES_SHARED(Locks::mutator_lock_) {
ImageSanityChecks isc(heap, class_linker);
- isc.SanityCheckArtMethodPointerArray(arr, size);
+ isc.SanityCheckArtMethodDexCacheArray(arr, size);
}
private:
@@ -1723,7 +1727,7 @@
}
}
- void SanityCheckArtMethodPointerArray(ArtMethod** arr, size_t size)
+ void SanityCheckArtMethodDexCacheArray(mirror::MethodDexCacheType* arr, size_t size)
REQUIRES_SHARED(Locks::mutator_lock_) {
CHECK_EQ(arr != nullptr, size != 0u);
if (arr != nullptr) {
@@ -1739,7 +1743,8 @@
CHECK(contains);
}
for (size_t j = 0; j < size; ++j) {
- ArtMethod* method = mirror::DexCache::GetElementPtrSize(arr, j, pointer_size_);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(arr, j, pointer_size_);
+ ArtMethod* method = pair.object;
// expected_class == null means we are a dex cache.
if (method != nullptr) {
SanityCheckArtMethod(method, nullptr);
@@ -1850,10 +1855,10 @@
}
} else {
if (kSanityCheckObjects) {
- ImageSanityChecks::CheckPointerArray(heap,
- this,
- dex_cache->GetResolvedMethods(),
- dex_cache->NumResolvedMethods());
+ ImageSanityChecks::CheckArtMethodDexCacheArray(heap,
+ this,
+ dex_cache->GetResolvedMethods(),
+ dex_cache->NumResolvedMethods());
}
// Register dex files, keep track of existing ones that are conflicts.
AppendToBootClassPath(*dex_file.get(), dex_cache);
@@ -3742,20 +3747,6 @@
return DexCacheData();
}
-void ClassLinker::FixupDexCaches(ArtMethod* resolution_method) {
- Thread* const self = Thread::Current();
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- for (const DexCacheData& data : dex_caches_) {
- if (!self->IsJWeakCleared(data.weak_root)) {
- ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
- self->DecodeJObject(data.weak_root));
- if (dex_cache != nullptr) {
- dex_cache->Fixup(resolution_method, image_pointer_size_);
- }
- }
- }
-}
-
mirror::Class* ClassLinker::CreatePrimitiveClass(Thread* self, Primitive::Type type) {
ObjPtr<mirror::Class> klass =
AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
@@ -6906,7 +6897,8 @@
// Check that there are no stale methods are in the dex cache array.
auto* resolved_methods = klass_->GetDexCache()->GetResolvedMethods();
for (size_t i = 0, count = klass_->GetDexCache()->NumResolvedMethods(); i < count; ++i) {
- auto* m = mirror::DexCache::GetElementPtrSize(resolved_methods, i, pointer_size);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_methods, i, pointer_size);
+ ArtMethod* m = pair.object;
CHECK(move_table_.find(m) == move_table_.end() ||
// The original versions of copied methods will still be present so allow those too.
// Note that if the first check passes this might fail to GetDeclaringClass().
@@ -7969,7 +7961,8 @@
PointerSize pointer_size = image_pointer_size_;
ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, pointer_size);
Thread::PoisonObjectPointersIfDebug();
- bool valid_dex_cache_method = resolved != nullptr && !resolved->IsRuntimeMethod();
+ DCHECK(resolved == nullptr || !resolved->IsRuntimeMethod());
+ bool valid_dex_cache_method = resolved != nullptr;
if (kResolveMode == ResolveMode::kNoChecks && valid_dex_cache_method) {
// We have a valid method from the DexCache and no checks to perform.
DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
@@ -8065,7 +8058,8 @@
Handle<mirror::ClassLoader> class_loader) {
ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx, image_pointer_size_);
Thread::PoisonObjectPointersIfDebug();
- if (resolved != nullptr && !resolved->IsRuntimeMethod()) {
+ if (resolved != nullptr) {
+ DCHECK(!resolved->IsRuntimeMethod());
DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
return resolved;
}
@@ -9085,6 +9079,53 @@
ifcount * mirror::IfTable::kMax));
}
+ArtMethod* ClassLinker::FindMethodForProxy(ArtMethod* proxy_method) {
+ DCHECK(proxy_method->IsProxyMethod());
+ {
+ uint32_t method_index = proxy_method->GetDexMethodIndex();
+ PointerSize pointer_size = image_pointer_size_;
+ Thread* const self = Thread::Current();
+ ReaderMutexLock mu(self, *Locks::dex_lock_);
+ // Locate the dex cache of the original interface/Object
+ for (const DexCacheData& data : dex_caches_) {
+ if (!self->IsJWeakCleared(data.weak_root) &&
+ proxy_method->HasSameDexCacheResolvedMethods(data.resolved_methods, pointer_size)) {
+ ObjPtr<mirror::DexCache> dex_cache =
+ ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
+ if (dex_cache != nullptr) {
+ // Lookup up the method. Instead of going through LookupResolvedMethod()
+ // and thus LookupResolvedType(), use the ClassTable from the DexCacheData.
+ ArtMethod* resolved_method = dex_cache->GetResolvedMethod(method_index, pointer_size);
+ if (resolved_method == nullptr) {
+ const DexFile::MethodId& method_id = data.dex_file->GetMethodId(method_index);
+ ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(method_id.class_idx_);
+ if (klass == nullptr) {
+ const char* descriptor = data.dex_file->StringByTypeIdx(method_id.class_idx_);
+ klass = data.class_table->Lookup(descriptor, ComputeModifiedUtf8Hash(descriptor));
+ DCHECK(klass != nullptr);
+ dex_cache->SetResolvedType(method_id.class_idx_, klass);
+ }
+ if (klass->IsInterface()) {
+ resolved_method = klass->FindInterfaceMethod(dex_cache, method_index, pointer_size);
+ } else {
+ DCHECK(
+ klass == WellKnownClasses::ToClass(WellKnownClasses::java_lang_reflect_Proxy) ||
+ klass == WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object));
+ resolved_method = klass->FindClassMethod(dex_cache, method_index, pointer_size);
+ }
+ CHECK(resolved_method != nullptr);
+ dex_cache->SetResolvedMethod(method_index, resolved_method, pointer_size);
+ }
+ return resolved_method;
+ }
+ }
+ }
+ }
+ // Note: Do not use proxy_method->PrettyMethod() as it can call back here.
+ LOG(FATAL) << "Didn't find dex cache for " << proxy_method->GetDeclaringClass()->PrettyClass();
+ UNREACHABLE();
+}
+
// Instantiate ResolveMethod.
template ArtMethod* ClassLinker::ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
const DexFile& dex_file,
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 3cf59f0..4a99c66 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -55,6 +55,9 @@
class MethodType;
template<class T> class ObjectArray;
class StackTraceElement;
+ template <typename T> struct NativeDexCachePair;
+ using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+ using MethodDexCacheType = std::atomic<MethodDexCachePair>;
} // namespace mirror
class ClassTable;
@@ -287,6 +290,12 @@
kCheckICCEAndIAE
};
+ // Look up a previously resolved method with the given index.
+ ArtMethod* LookupResolvedMethod(uint32_t method_idx,
+ ObjPtr<mirror::DexCache> dex_cache,
+ ObjPtr<mirror::ClassLoader> class_loader)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
// Resolve a method with a given ID from the DexFile, storing the
// result in DexCache. The ClassLinker and ClassLoader are used as
// in ResolveType. What is unique is the method type argument which
@@ -423,9 +432,6 @@
ClassTable* FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- void FixupDexCaches(ArtMethod* resolution_method)
- REQUIRES(!Locks::dex_lock_)
- REQUIRES_SHARED(Locks::mutator_lock_);
LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self,
LinearAlloc* allocator,
@@ -475,8 +481,7 @@
REQUIRES_SHARED(Locks::mutator_lock_);
std::string GetDescriptorForProxy(ObjPtr<mirror::Class> proxy_class)
REQUIRES_SHARED(Locks::mutator_lock_);
- template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
- ArtMethod* FindMethodForProxy(ObjPtr<mirror::Class> proxy_class, ArtMethod* proxy_method)
+ ArtMethod* FindMethodForProxy(ArtMethod* proxy_method)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -692,7 +697,7 @@
// jweak decode that triggers read barriers (and mark them alive unnecessarily and mess with
// class unloading.)
const DexFile* dex_file;
- ArtMethod** resolved_methods;
+ mirror::MethodDexCacheType* resolved_methods;
// Identify the associated class loader's class table. This is used to make sure that
// the Java call to native DexCache.setResolvedType() inserts the resolved type in that
// class table. It is also used to make sure we don't register the same dex cache with
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 98d7c7c..39d77f0 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -440,14 +440,6 @@
}
TestRootVisitor visitor;
class_linker_->VisitRoots(&visitor, kVisitRootFlagAllRoots);
- // Verify the dex cache has resolution methods in all resolved method slots
- ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(Thread::Current(), dex);
- auto* resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t i = 0, num_methods = dex_cache->NumResolvedMethods(); i != num_methods; ++i) {
- EXPECT_TRUE(
- mirror::DexCache::GetElementPtrSize(resolved_methods, i, kRuntimePointerSize) != nullptr)
- << dex.GetLocation() << " i=" << i;
- }
}
class TestRootVisitor : public SingleRootVisitor {
diff --git a/runtime/common_runtime_test.cc b/runtime/common_runtime_test.cc
index 659c7e4..aa5259e 100644
--- a/runtime/common_runtime_test.cc
+++ b/runtime/common_runtime_test.cc
@@ -425,7 +425,6 @@
PostRuntimeCreate();
runtime_.reset(Runtime::Current());
class_linker_ = runtime_->GetClassLinker();
- class_linker_->FixupDexCaches(runtime_->GetResolutionMethod());
// Runtime::Create acquired the mutator_lock_ that is normally given away when we
// Runtime::Start, give it away now and then switch to a more managable ScopedObjectAccess.
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index 828148a..a6c5d6c 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -84,7 +84,8 @@
const DexFile* dex_file = dex_cache->GetDexFile();
const DexFile::MethodId& method_id = dex_file->GetMethodId(method_index);
ArtMethod* inlined_method = caller->GetDexCacheResolvedMethod(method_index, kRuntimePointerSize);
- if (inlined_method != nullptr && !inlined_method->IsRuntimeMethod()) {
+ if (inlined_method != nullptr) {
+ DCHECK(!inlined_method->IsRuntimeMethod());
return inlined_method;
}
const char* descriptor = dex_file->StringByTypeIdx(method_id.class_idx_);
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 6abf7c5..3061365 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -2461,6 +2461,21 @@
return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
}
+// Helper function for art_quick_imt_conflict_trampoline to look up the interface method.
+extern "C" ArtMethod* artLookupResolvedMethod(uint32_t method_index, ArtMethod* referrer)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ ScopedAssertNoThreadSuspension ants(__FUNCTION__);
+ DCHECK(!referrer->IsProxyMethod());
+ ArtMethod* result = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
+ method_index, referrer->GetDexCache(), referrer->GetClassLoader());
+ DCHECK(result == nullptr ||
+ result->GetDeclaringClass()->IsInterface() ||
+ result->GetDeclaringClass() ==
+ WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object))
+ << result->PrettyMethod();
+ return result;
+}
+
// Determine target of interface dispatch. The interface method and this object are known non-null.
// The interface method is the method returned by the dex cache in the conflict trampoline.
extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method,
@@ -2468,7 +2483,6 @@
Thread* self,
ArtMethod** sp)
REQUIRES_SHARED(Locks::mutator_lock_) {
- CHECK(interface_method != nullptr);
ObjPtr<mirror::Object> this_object(raw_this_object);
ScopedQuickEntrypointChecks sqec(self);
StackHandleScope<1> hs(self);
@@ -2478,7 +2492,8 @@
ArtMethod* method = nullptr;
ImTable* imt = cls->GetImt(kRuntimePointerSize);
- if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
+ if (LIKELY(interface_method != nullptr)) {
+ DCHECK_NE(interface_method->GetDexMethodIndex(), DexFile::kDexNoIndex);
// If the interface method is already resolved, look whether we have a match in the
// ImtConflictTable.
ArtMethod* conflict_method = imt->Get(ImTable::GetImtIndex(interface_method),
@@ -2505,9 +2520,7 @@
return GetTwoWordFailureValue(); // Failure.
}
} else {
- // The interface method is unresolved, so look it up in the dex file of the caller.
- DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod());
-
+ // The interface method is unresolved, so resolve it in the dex file of the caller.
// Fetch the dex_method_idx of the target interface method from the caller.
uint32_t dex_method_idx;
uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 3ae382e..fe0d35f 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -1268,17 +1268,19 @@
}
dex_cache->FixupResolvedTypes<kWithoutReadBarrier>(new_types, fixup_adapter);
}
- ArtMethod** methods = dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* methods = dex_cache->GetResolvedMethods();
if (methods != nullptr) {
- ArtMethod** new_methods = fixup_adapter.ForwardObject(methods);
+ mirror::MethodDexCacheType* new_methods = fixup_adapter.ForwardObject(methods);
if (methods != new_methods) {
dex_cache->SetResolvedMethods(new_methods);
}
for (size_t j = 0, num = dex_cache->NumResolvedMethods(); j != num; ++j) {
- ArtMethod* orig = mirror::DexCache::GetElementPtrSize(new_methods, j, pointer_size);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(new_methods, j, pointer_size);
+ ArtMethod* orig = pair.object;
ArtMethod* copy = fixup_adapter.ForwardObject(orig);
if (orig != copy) {
- mirror::DexCache::SetElementPtrSize(new_methods, j, copy, pointer_size);
+ pair.object = copy;
+ mirror::DexCache::SetNativePairPtrSize(new_methods, j, pair, pointer_size);
}
}
}
diff --git a/runtime/generated/asm_support_gen.h b/runtime/generated/asm_support_gen.h
index 06e4704..acfd889 100644
--- a/runtime/generated/asm_support_gen.h
+++ b/runtime/generated/asm_support_gen.h
@@ -78,6 +78,10 @@
DEFINE_CHECK_EQ(static_cast<int32_t>(STRING_DEX_CACHE_HASH_BITS), (static_cast<int32_t>(art::LeastSignificantBit(art::mirror::DexCache::kDexCacheStringCacheSize))))
#define STRING_DEX_CACHE_ELEMENT_SIZE 8
DEFINE_CHECK_EQ(static_cast<int32_t>(STRING_DEX_CACHE_ELEMENT_SIZE), (static_cast<int32_t>(sizeof(art::mirror::StringDexCachePair))))
+#define METHOD_DEX_CACHE_SIZE_MINUS_ONE 1023
+DEFINE_CHECK_EQ(static_cast<int32_t>(METHOD_DEX_CACHE_SIZE_MINUS_ONE), (static_cast<int32_t>(art::mirror::DexCache::kDexCacheMethodCacheSize - 1)))
+#define METHOD_DEX_CACHE_HASH_BITS 10
+DEFINE_CHECK_EQ(static_cast<int32_t>(METHOD_DEX_CACHE_HASH_BITS), (static_cast<int32_t>(art::LeastSignificantBit(art::mirror::DexCache::kDexCacheMethodCacheSize))))
#define CARD_TABLE_CARD_SHIFT 0xa
DEFINE_CHECK_EQ(static_cast<size_t>(CARD_TABLE_CARD_SHIFT), (static_cast<size_t>(art::gc::accounting::CardTable::kCardShift)))
#define MIN_LARGE_OBJECT_THRESHOLD 0x3000
diff --git a/runtime/image.cc b/runtime/image.cc
index ac36d7c..7d0a709 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -26,7 +26,7 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '5', '\0' }; // Fix DexCache fields.
+const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '6', '\0' }; // Hash-based methods array.
ImageHeader::ImageHeader(uint32_t image_begin,
uint32_t image_size,
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 18e22ef..fdb14f1 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -208,24 +208,38 @@
}
}
+inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
+ DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
+ const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
+ DCHECK_LT(slot_idx, NumResolvedMethods());
+ return slot_idx;
+}
+
inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
- DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB.
- ArtMethod* method = GetElementPtrSize<ArtMethod*>(GetResolvedMethods(), method_idx, ptr_size);
- // Hide resolution trampoline methods from the caller
- if (method != nullptr && method->IsRuntimeMethod()) {
- DCHECK_EQ(method, Runtime::Current()->GetResolutionMethod());
- return nullptr;
- }
- return method;
+ auto pair = GetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), ptr_size);
+ return pair.GetObjectForIndex(method_idx);
}
inline void DexCache::SetResolvedMethod(uint32_t method_idx,
ArtMethod* method,
PointerSize ptr_size) {
DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
- DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB.
- SetElementPtrSize(GetResolvedMethods(), method_idx, method, ptr_size);
+ DCHECK(method != nullptr);
+ MethodDexCachePair pair(method, method_idx);
+ SetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), pair, ptr_size);
+}
+
+inline void DexCache::ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
+ DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
+ uint32_t slot_idx = MethodSlotIndex(method_idx);
+ auto* resolved_methods = GetResolvedMethods();
+ // This is racy but should only be called from the single-threaded ImageWriter.
+ DCHECK(Runtime::Current()->IsAotCompiler());
+ if (GetNativePairPtrSize(resolved_methods, slot_idx, ptr_size).index == method_idx) {
+ MethodDexCachePair cleared(nullptr, MethodDexCachePair::InvalidIndexForSlot(slot_idx));
+ SetNativePairPtrSize(resolved_methods, slot_idx, cleared, ptr_size);
+ }
}
template <typename PtrType>
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index 96e3475..7b18a4c 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -61,14 +61,14 @@
: reinterpret_cast<uint8_t*>(linear_alloc->Alloc(self, layout.Size()));
}
- mirror::StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
- reinterpret_cast<mirror::StringDexCacheType*>(raw_arrays + layout.StringsOffset());
- mirror::TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
- reinterpret_cast<mirror::TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
- ArtMethod** methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
- reinterpret_cast<ArtMethod**>(raw_arrays + layout.MethodsOffset());
- mirror::FieldDexCacheType* fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
- reinterpret_cast<mirror::FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
+ StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
+ reinterpret_cast<StringDexCacheType*>(raw_arrays + layout.StringsOffset());
+ TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
+ reinterpret_cast<TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
+ MethodDexCacheType* methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
+ reinterpret_cast<MethodDexCacheType*>(raw_arrays + layout.MethodsOffset());
+ FieldDexCacheType* fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
+ reinterpret_cast<FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
size_t num_strings = kDexCacheStringCacheSize;
if (dex_file->NumStringIds() < num_strings) {
@@ -82,6 +82,10 @@
if (dex_file->NumFieldIds() < num_fields) {
num_fields = dex_file->NumFieldIds();
}
+ size_t num_methods = kDexCacheMethodCacheSize;
+ if (dex_file->NumMethodIds() < num_methods) {
+ num_methods = dex_file->NumMethodIds();
+ }
// Note that we allocate the method type dex caches regardless of this flag,
// and we make sure here that they're not used by the runtime. This is in the
@@ -105,7 +109,7 @@
GcRoot<mirror::CallSite>* call_sites = (dex_file->NumCallSiteIds() == 0)
? nullptr
- : reinterpret_cast<GcRoot<mirror::CallSite>*>(raw_arrays + layout.CallSitesOffset());
+ : reinterpret_cast<GcRoot<CallSite>*>(raw_arrays + layout.CallSitesOffset());
DCHECK_ALIGNED(raw_arrays, alignof(StringDexCacheType)) <<
"Expected raw_arrays to align to StringDexCacheType.";
@@ -125,8 +129,9 @@
CHECK_EQ(types[i].load(std::memory_order_relaxed).index, 0u);
CHECK(types[i].load(std::memory_order_relaxed).object.IsNull());
}
- for (size_t i = 0; i < dex_file->NumMethodIds(); ++i) {
- CHECK(GetElementPtrSize(methods, i, image_pointer_size) == nullptr);
+ for (size_t i = 0; i < num_methods; ++i) {
+ CHECK_EQ(GetNativePairPtrSize(methods, i, image_pointer_size).index, 0u);
+ CHECK(GetNativePairPtrSize(methods, i, image_pointer_size).object == nullptr);
}
for (size_t i = 0; i < num_fields; ++i) {
CHECK_EQ(GetNativePairPtrSize(fields, i, image_pointer_size).index, 0u);
@@ -149,6 +154,9 @@
if (fields != nullptr) {
mirror::FieldDexCachePair::Initialize(fields, image_pointer_size);
}
+ if (methods != nullptr) {
+ mirror::MethodDexCachePair::Initialize(methods, image_pointer_size);
+ }
if (method_types != nullptr) {
mirror::MethodTypeDexCachePair::Initialize(method_types);
}
@@ -159,14 +167,13 @@
types,
num_types,
methods,
- dex_file->NumMethodIds(),
+ num_methods,
fields,
num_fields,
method_types,
num_method_types,
call_sites,
- dex_file->NumCallSiteIds(),
- image_pointer_size);
+ dex_file->NumCallSiteIds());
}
void DexCache::Init(const DexFile* dex_file,
@@ -175,15 +182,14 @@
uint32_t num_strings,
TypeDexCacheType* resolved_types,
uint32_t num_resolved_types,
- ArtMethod** resolved_methods,
+ MethodDexCacheType* resolved_methods,
uint32_t num_resolved_methods,
FieldDexCacheType* resolved_fields,
uint32_t num_resolved_fields,
MethodTypeDexCacheType* resolved_method_types,
uint32_t num_resolved_method_types,
GcRoot<CallSite>* resolved_call_sites,
- uint32_t num_resolved_call_sites,
- PointerSize pointer_size) {
+ uint32_t num_resolved_call_sites) {
CHECK(dex_file != nullptr);
CHECK(location != nullptr);
CHECK_EQ(num_strings != 0u, strings != nullptr);
@@ -207,24 +213,6 @@
SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields);
SetField32<false>(NumResolvedMethodTypesOffset(), num_resolved_method_types);
SetField32<false>(NumResolvedCallSitesOffset(), num_resolved_call_sites);
-
- Runtime* const runtime = Runtime::Current();
- if (runtime->HasResolutionMethod()) {
- // Initialize the resolve methods array to contain trampolines for resolution.
- Fixup(runtime->GetResolutionMethod(), pointer_size);
- }
-}
-
-void DexCache::Fixup(ArtMethod* trampoline, PointerSize pointer_size) {
- // Fixup the resolve methods array to contain trampoline for resolution.
- CHECK(trampoline != nullptr);
- CHECK(trampoline->IsRuntimeMethod());
- auto* resolved_methods = GetResolvedMethods();
- for (size_t i = 0, length = NumResolvedMethods(); i < length; i++) {
- if (GetElementPtrSize<ArtMethod*>(resolved_methods, i, pointer_size) == nullptr) {
- SetElementPtrSize(resolved_methods, i, trampoline, pointer_size);
- }
- }
}
void DexCache::SetLocation(ObjPtr<mirror::String> location) {
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index cf570b8..7fd5dd1 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -129,6 +129,9 @@
using FieldDexCachePair = NativeDexCachePair<ArtField>;
using FieldDexCacheType = std::atomic<FieldDexCachePair>;
+using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+using MethodDexCacheType = std::atomic<MethodDexCachePair>;
+
using MethodTypeDexCachePair = DexCachePair<MethodType>;
using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
@@ -153,6 +156,11 @@
static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize),
"Field dex cache size is not a power of 2.");
+ // Size of method dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
+ static constexpr size_t kDexCacheMethodCacheSize = 1024;
+ static_assert(IsPowerOfTwo(kDexCacheMethodCacheSize),
+ "Method dex cache size is not a power of 2.");
+
// Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions
// to hold.
static constexpr size_t kDexCacheMethodTypeCacheSize = 1024;
@@ -171,6 +179,10 @@
return kDexCacheFieldCacheSize;
}
+ static constexpr size_t StaticMethodSize() {
+ return kDexCacheMethodCacheSize;
+ }
+
static constexpr size_t StaticMethodTypeSize() {
return kDexCacheMethodTypeCacheSize;
}
@@ -189,9 +201,6 @@
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::dex_lock_);
- void Fixup(ArtMethod* trampoline, PointerSize pointer_size)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -284,6 +293,8 @@
ArtMethod* resolved,
PointerSize ptr_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
+ REQUIRES_SHARED(Locks::mutator_lock_);
// Pointer sized variant, used for patching.
ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size)
@@ -328,11 +339,11 @@
SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
}
- ArtMethod** GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
- return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset());
+ MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset());
}
- void SetResolvedMethods(ArtMethod** resolved_methods)
+ void SetResolvedMethods(MethodDexCacheType* resolved_methods)
ALWAYS_INLINE
REQUIRES_SHARED(Locks::mutator_lock_) {
SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
@@ -429,6 +440,7 @@
uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+ uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
private:
@@ -438,15 +450,14 @@
uint32_t num_strings,
TypeDexCacheType* resolved_types,
uint32_t num_resolved_types,
- ArtMethod** resolved_methods,
+ MethodDexCacheType* resolved_methods,
uint32_t num_resolved_methods,
FieldDexCacheType* resolved_fields,
uint32_t num_resolved_fields,
MethodTypeDexCacheType* resolved_method_types,
uint32_t num_resolved_method_types,
GcRoot<CallSite>* resolved_call_sites,
- uint32_t num_resolved_call_sites,
- PointerSize pointer_size)
+ uint32_t num_resolved_call_sites)
REQUIRES_SHARED(Locks::mutator_lock_);
// std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
@@ -471,7 +482,7 @@
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
// Due to lack of 16-byte atomics support, we use hand-crafted routines.
-#if defined(__aarch64__)
+#if defined(__aarch64__)
// 16-byte atomics are supported on aarch64.
ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
std::atomic<ConversionPair64>* target) {
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index 194d9bc..d2b9240 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -54,7 +54,8 @@
|| java_lang_dex_file_->NumStringIds() == dex_cache->NumStrings());
EXPECT_TRUE(dex_cache->StaticTypeSize() == dex_cache->NumResolvedTypes()
|| java_lang_dex_file_->NumTypeIds() == dex_cache->NumResolvedTypes());
- EXPECT_EQ(java_lang_dex_file_->NumMethodIds(), dex_cache->NumResolvedMethods());
+ EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
+ || java_lang_dex_file_->NumMethodIds() == dex_cache->NumResolvedMethods());
EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields()
|| java_lang_dex_file_->NumFieldIds() == dex_cache->NumResolvedFields());
EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes()
diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h
index 72f63c6..9d4e9fb 100644
--- a/runtime/utils/dex_cache_arrays_layout-inl.h
+++ b/runtime/utils/dex_cache_arrays_layout-inl.h
@@ -64,7 +64,7 @@
"Expecting alignof(StringDexCacheType) == 8");
static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
"Expecting alignof(MethodTypeDexCacheType) == 8");
- // This is the same as alignof(FieldDexCacheType) for the given pointer size.
+ // This is the same as alignof({Field,Method}DexCacheType) for the given pointer size.
return 2u * static_cast<size_t>(pointer_size);
}
@@ -84,7 +84,7 @@
if (num_elements < cache_size) {
cache_size = num_elements;
}
- return ArraySize(PointerSize::k64, cache_size);
+ return PairArraySize(GcRootAsPointerSize<mirror::Class>(), cache_size);
}
inline size_t DexCacheArraysLayout::TypesAlignment() const {
@@ -96,11 +96,15 @@
}
inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const {
- return ArraySize(pointer_size_, num_elements);
+ size_t cache_size = mirror::DexCache::kDexCacheMethodCacheSize;
+ if (num_elements < cache_size) {
+ cache_size = num_elements;
+ }
+ return PairArraySize(pointer_size_, cache_size);
}
inline size_t DexCacheArraysLayout::MethodsAlignment() const {
- return static_cast<size_t>(pointer_size_);
+ return 2u * static_cast<size_t>(pointer_size_);
}
inline size_t DexCacheArraysLayout::StringOffset(uint32_t string_idx) const {
@@ -113,7 +117,7 @@
if (num_elements < cache_size) {
cache_size = num_elements;
}
- return ArraySize(PointerSize::k64, cache_size);
+ return PairArraySize(GcRootAsPointerSize<mirror::String>(), cache_size);
}
inline size_t DexCacheArraysLayout::StringsAlignment() const {
@@ -132,7 +136,7 @@
if (num_elements < cache_size) {
cache_size = num_elements;
}
- return 2u * static_cast<size_t>(pointer_size_) * cache_size;
+ return PairArraySize(pointer_size_, cache_size);
}
inline size_t DexCacheArraysLayout::FieldsAlignment() const {
@@ -170,6 +174,10 @@
return static_cast<size_t>(element_size) * num_elements;
}
+inline size_t DexCacheArraysLayout::PairArraySize(PointerSize element_size, uint32_t num_elements) {
+ return 2u * static_cast<size_t>(element_size) * num_elements;
+}
+
} // namespace art
#endif // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
diff --git a/runtime/utils/dex_cache_arrays_layout.h b/runtime/utils/dex_cache_arrays_layout.h
index 377a374..fc04159 100644
--- a/runtime/utils/dex_cache_arrays_layout.h
+++ b/runtime/utils/dex_cache_arrays_layout.h
@@ -130,6 +130,7 @@
static size_t ElementOffset(PointerSize element_size, uint32_t idx);
static size_t ArraySize(PointerSize element_size, uint32_t num_elements);
+ static size_t PairArraySize(PointerSize element_size, uint32_t num_elements);
};
} // namespace art
diff --git a/test/497-inlining-and-class-loader/clear_dex_cache.cc b/test/497-inlining-and-class-loader/clear_dex_cache.cc
index 9ba05bc..c113042 100644
--- a/test/497-inlining-and-class-loader/clear_dex_cache.cc
+++ b/test/497-inlining-and-class-loader/clear_dex_cache.cc
@@ -34,22 +34,32 @@
ScopedObjectAccess soa(Thread::Current());
mirror::DexCache* dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
size_t num_methods = dex_cache->NumResolvedMethods();
- ArtMethod** methods = dex_cache->GetResolvedMethods();
+ mirror::MethodDexCacheType* methods = dex_cache->GetResolvedMethods();
CHECK_EQ(num_methods != 0u, methods != nullptr);
if (num_methods == 0u) {
return nullptr;
}
jarray array;
if (sizeof(void*) == 4) {
- array = env->NewIntArray(num_methods);
+ array = env->NewIntArray(2u * num_methods);
} else {
- array = env->NewLongArray(num_methods);
+ array = env->NewLongArray(2u * num_methods);
}
CHECK(array != nullptr);
- mirror::PointerArray* pointer_array = soa.Decode<mirror::PointerArray>(array).Ptr();
+ ObjPtr<mirror::Array> decoded_array = soa.Decode<mirror::Array>(array);
for (size_t i = 0; i != num_methods; ++i) {
- ArtMethod* method = mirror::DexCache::GetElementPtrSize(methods, i, kRuntimePointerSize);
- pointer_array->SetElementPtrSize(i, method, kRuntimePointerSize);
+ auto pair = mirror::DexCache::GetNativePairPtrSize(methods, i, kRuntimePointerSize);
+ uint32_t index = pair.index;
+ ArtMethod* method = pair.object;
+ if (sizeof(void*) == 4) {
+ ObjPtr<mirror::IntArray> int_array = down_cast<mirror::IntArray*>(decoded_array.Ptr());
+ int_array->Set(2u * i, index);
+ int_array->Set(2u * i + 1u, static_cast<jint>(reinterpret_cast<uintptr_t>(method)));
+ } else {
+ ObjPtr<mirror::LongArray> long_array = down_cast<mirror::LongArray*>(decoded_array.Ptr());
+ long_array->Set(2u * i, index);
+ long_array->Set(2u * i + 1u, reinterpret_cast64<jlong>(method));
+ }
}
return array;
}
@@ -59,14 +69,26 @@
ScopedObjectAccess soa(Thread::Current());
mirror::DexCache* dex_cache = soa.Decode<mirror::Class>(cls)->GetDexCache();
size_t num_methods = dex_cache->NumResolvedMethods();
- ArtMethod** methods = soa.Decode<mirror::Class>(cls)->GetDexCache()->GetResolvedMethods();
+ mirror::MethodDexCacheType* methods =
+ soa.Decode<mirror::Class>(cls)->GetDexCache()->GetResolvedMethods();
CHECK_EQ(num_methods != 0u, methods != nullptr);
- ObjPtr<mirror::PointerArray> old = soa.Decode<mirror::PointerArray>(old_cache);
+ ObjPtr<mirror::Array> old = soa.Decode<mirror::Array>(old_cache);
CHECK_EQ(methods != nullptr, old != nullptr);
CHECK_EQ(num_methods, static_cast<size_t>(old->GetLength()));
for (size_t i = 0; i != num_methods; ++i) {
- ArtMethod* method = old->GetElementPtrSize<ArtMethod*>(i, kRuntimePointerSize);
- mirror::DexCache::SetElementPtrSize(methods, i, method, kRuntimePointerSize);
+ uint32_t index;
+ ArtMethod* method;
+ if (sizeof(void*) == 4) {
+ ObjPtr<mirror::IntArray> int_array = down_cast<mirror::IntArray*>(old.Ptr());
+ index = static_cast<uint32_t>(int_array->Get(2u * i));
+ method = reinterpret_cast<ArtMethod*>(static_cast<uint32_t>(int_array->Get(2u * i + 1u)));
+ } else {
+ ObjPtr<mirror::LongArray> long_array = down_cast<mirror::LongArray*>(old.Ptr());
+ index = dchecked_integral_cast<uint32_t>(long_array->Get(2u * i));
+ method = reinterpret_cast64<ArtMethod*>(long_array->Get(2u * i + 1u));
+ }
+ mirror::MethodDexCachePair pair(method, index);
+ mirror::DexCache::SetNativePairPtrSize(methods, i, pair, kRuntimePointerSize);
}
}
diff --git a/tools/cpp-define-generator/constant_dexcache.def b/tools/cpp-define-generator/constant_dexcache.def
index ede16d2..743ebb7 100644
--- a/tools/cpp-define-generator/constant_dexcache.def
+++ b/tools/cpp-define-generator/constant_dexcache.def
@@ -25,4 +25,8 @@
DEFINE_EXPR(STRING_DEX_CACHE_HASH_BITS, int32_t,
art::LeastSignificantBit(art::mirror::DexCache::kDexCacheStringCacheSize))
DEFINE_EXPR(STRING_DEX_CACHE_ELEMENT_SIZE, int32_t,
- sizeof(art::mirror::StringDexCachePair))
\ No newline at end of file
+ sizeof(art::mirror::StringDexCachePair))
+DEFINE_EXPR(METHOD_DEX_CACHE_SIZE_MINUS_ONE, int32_t,
+ art::mirror::DexCache::kDexCacheMethodCacheSize - 1)
+DEFINE_EXPR(METHOD_DEX_CACHE_HASH_BITS, int32_t,
+ art::LeastSignificantBit(art::mirror::DexCache::kDexCacheMethodCacheSize))