diff options
author | 2015-08-03 11:56:49 +0100 | |
---|---|---|
committer | 2015-09-03 17:30:57 +0100 | |
commit | 05792b98980741111b4d0a24d68cff2a8e070a3a (patch) | |
tree | bad79a387bcbdaefc87c07b388099960ca9caff3 | |
parent | c26b4512a01d46756683a4f5e186a0b7f397f251 (diff) |
ART: Move DexCache arrays to native.
This CL has a companion CL in libcore/
https://android-review.googlesource.com/162985
Change-Id: Icbc9e20ad1b565e603195b12714762bb446515fa
60 files changed, 1087 insertions, 601 deletions
diff --git a/compiler/dex/quick/arm/call_arm.cc b/compiler/dex/quick/arm/call_arm.cc index 981ab2c1ee..eb8730cf4b 100644 --- a/compiler/dex/quick/arm/call_arm.cc +++ b/compiler/dex/quick/arm/call_arm.cc @@ -677,10 +677,11 @@ int ArmMir2Lir::ArmNextSDCallInsn(CompilationUnit* cu, CallInfo* info, FALLTHROUGH_INTENDED; case 1: // Get method->dex_cache_resolved_methods_ if (!use_pc_rel) { - cg->LoadRefDisp(arg0_ref, - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(), - arg0_ref, - kNotVolatile); + cg->LoadBaseDisp(arg0_ref, + ArtMethod::DexCacheResolvedMethodsOffset(kArmPointerSize).Int32Value(), + arg0_ref, + k32, + kNotVolatile); } // Set up direct code if known. if (direct_code != 0) { @@ -702,8 +703,8 @@ int ArmMir2Lir::ArmNextSDCallInsn(CompilationUnit* cu, CallInfo* info, CHECK_EQ(cu->dex_file, target_method.dex_file); if (!use_pc_rel) { cg->LoadRefDisp(arg0_ref, - mirror::ObjectArray<mirror::Object>::OffsetOfElement( - target_method.dex_method_index).Int32Value(), + cg->GetCachePointerOffset(target_method.dex_method_index, + kArmPointerSize), arg0_ref, kNotVolatile); } else { diff --git a/compiler/dex/quick/arm64/call_arm64.cc b/compiler/dex/quick/arm64/call_arm64.cc index 83a6affe81..036da2e2b2 100644 --- a/compiler/dex/quick/arm64/call_arm64.cc +++ b/compiler/dex/quick/arm64/call_arm64.cc @@ -511,10 +511,11 @@ int Arm64Mir2Lir::Arm64NextSDCallInsn(CompilationUnit* cu, CallInfo* info, FALLTHROUGH_INTENDED; case 1: // Get method->dex_cache_resolved_methods_ if (!use_pc_rel) { - cg->LoadRefDisp(arg0_ref, - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(), - arg0_ref, - kNotVolatile); + cg->LoadBaseDisp(arg0_ref, + ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value(), + arg0_ref, + k64, + kNotVolatile); } // Set up direct code if known. if (direct_code != 0) { @@ -536,8 +537,9 @@ int Arm64Mir2Lir::Arm64NextSDCallInsn(CompilationUnit* cu, CallInfo* info, CHECK_EQ(cu->dex_file, target_method.dex_file); if (!use_pc_rel) { cg->LoadWordDisp(arg0_ref, - mirror::Array::DataOffset(kArm64PointerSize).Uint32Value() + - target_method.dex_method_index * kArm64PointerSize, arg0_ref); + cg->GetCachePointerOffset(target_method.dex_method_index, + kArm64PointerSize), + arg0_ref); } else { size_t offset = cg->dex_cache_arrays_layout_.MethodOffset(target_method.dex_method_index); cg->OpPcRelDexCacheArrayLoad(cu->dex_file, offset, arg0_ref, true); diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc index af108170e6..2a1d64425b 100644 --- a/compiler/dex/quick/gen_common.cc +++ b/compiler/dex/quick/gen_common.cc @@ -88,24 +88,30 @@ void Mir2Lir::GenIfNullUseHelperImm(RegStorage r_result, QuickEntrypointEnum tra r_result)); } +void Mir2Lir::LoadTypeFromCache(uint32_t type_index, RegStorage class_reg) { + if (CanUseOpPcRelDexCacheArrayLoad()) { + uint32_t offset = dex_cache_arrays_layout_.TypeOffset(type_index); + OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, class_reg, false); + } else { + RegStorage r_method = LoadCurrMethodWithHint(class_reg); + MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset( + GetInstructionSetPointerSize(cu_->instruction_set)); + LoadBaseDisp(r_method, resolved_types_offset.Int32Value(), class_reg, + cu_->target64 ? k64 : k32, kNotVolatile); + int32_t offset_of_type = GetCacheOffset(type_index); + LoadRefDisp(class_reg, offset_of_type, class_reg, kNotVolatile); + } +} + RegStorage Mir2Lir::GenGetOtherTypeForSgetSput(const MirSFieldLoweringInfo& field_info, int opt_flags) { DCHECK_NE(field_info.StorageIndex(), DexFile::kDexNoIndex); // May do runtime call so everything to home locations. FlushAllRegs(); + // Using fixed register to sync with possible call to runtime support. RegStorage r_base = TargetReg(kArg0, kRef); LockTemp(r_base); - if (CanUseOpPcRelDexCacheArrayLoad()) { - uint32_t offset = dex_cache_arrays_layout_.TypeOffset(field_info.StorageIndex()); - OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, r_base, false); - } else { - // Using fixed register to sync with possible call to runtime support. - RegStorage r_method = LoadCurrMethodWithHint(r_base); - LoadRefDisp(r_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), r_base, - kNotVolatile); - int32_t offset_of_field = ObjArray::OffsetOfElement(field_info.StorageIndex()).Int32Value(); - LoadRefDisp(r_base, offset_of_field, r_base, kNotVolatile); - } + LoadTypeFromCache(field_info.StorageIndex(), r_base); // r_base now points at static storage (Class*) or null if the type is not yet resolved. LIR* unresolved_branch = nullptr; if (!field_info.IsClassInDexCache() && (opt_flags & MIR_CLASS_IS_IN_DEX_CACHE) == 0) { @@ -1029,19 +1035,7 @@ void Mir2Lir::GenConstClass(uint32_t type_idx, RegLocation rl_dest) { } else { rl_result = EvalLoc(rl_dest, kRefReg, true); // We don't need access checks, load type from dex cache - if (CanUseOpPcRelDexCacheArrayLoad()) { - size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx); - OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, rl_result.reg, false); - } else { - int32_t dex_cache_offset = - ArtMethod::DexCacheResolvedTypesOffset().Int32Value(); - RegStorage res_reg = AllocTempRef(); - RegStorage r_method = LoadCurrMethodWithHint(res_reg); - LoadRefDisp(r_method, dex_cache_offset, res_reg, kNotVolatile); - int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value(); - LoadRefDisp(res_reg, offset_of_type, rl_result.reg, kNotVolatile); - FreeTemp(res_reg); - } + LoadTypeFromCache(type_idx, rl_result.reg); if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx) || ForceSlowTypePath(cu_)) { // Slow path, at runtime test if type is null and if so initialize @@ -1054,8 +1048,7 @@ void Mir2Lir::GenConstClass(uint32_t type_idx, RegLocation rl_dest) { void Mir2Lir::GenConstString(uint32_t string_idx, RegLocation rl_dest) { /* NOTE: Most strings should be available at compile time */ - int32_t offset_of_string = mirror::ObjectArray<mirror::String>::OffsetOfElement(string_idx). - Int32Value(); + int32_t offset_of_string = GetCacheOffset(string_idx); if (!cu_->compiler_driver->CanAssumeStringIsPresentInDexCache( *cu_->dex_file, string_idx) || ForceSlowStringPath(cu_)) { // slow path, resolve string if not in dex cache @@ -1073,7 +1066,8 @@ void Mir2Lir::GenConstString(uint32_t string_idx, RegLocation rl_dest) { RegStorage r_method = LoadCurrMethodWithHint(arg0); LoadRefDisp(r_method, ArtMethod::DeclaringClassOffset().Int32Value(), arg0, kNotVolatile); // Declaring class to dex cache strings. - LoadRefDisp(arg0, mirror::Class::DexCacheStringsOffset().Int32Value(), arg0, kNotVolatile); + LoadBaseDisp(arg0, mirror::Class::DexCacheStringsOffset().Int32Value(), arg0, + cu_->target64 ? k64 : k32, kNotVolatile); LoadRefDisp(arg0, offset_of_string, ret0, kNotVolatile); } @@ -1091,8 +1085,8 @@ void Mir2Lir::GenConstString(uint32_t string_idx, RegLocation rl_dest) { RegStorage res_reg = AllocTempRef(); LoadRefDisp(rl_method.reg, ArtMethod::DeclaringClassOffset().Int32Value(), res_reg, kNotVolatile); - LoadRefDisp(res_reg, mirror::Class::DexCacheStringsOffset().Int32Value(), res_reg, - kNotVolatile); + LoadBaseDisp(res_reg, mirror::Class::DexCacheStringsOffset().Int32Value(), res_reg, + cu_->target64 ? k64 : k32, kNotVolatile); LoadRefDisp(res_reg, offset_of_string, rl_result.reg, kNotVolatile); FreeTemp(res_reg); } @@ -1176,19 +1170,10 @@ void Mir2Lir::GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, Re kNotVolatile); LoadRefDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class, kNotVolatile); - } else if (CanUseOpPcRelDexCacheArrayLoad()) { - size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx); - OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, check_class, false); - LoadRefDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class, - kNotVolatile); } else { - RegStorage r_method = LoadCurrMethodWithHint(check_class); - LoadRefDisp(r_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), - check_class, kNotVolatile); + LoadTypeFromCache(type_idx, check_class); LoadRefDisp(object.reg, mirror::Object::ClassOffset().Int32Value(), object_class, kNotVolatile); - int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value(); - LoadRefDisp(check_class, offset_of_type, check_class, kNotVolatile); } // FIXME: what should we be comparing here? compressed or decompressed references? @@ -1239,17 +1224,8 @@ void Mir2Lir::GenInstanceofCallingHelper(bool needs_access_check, bool type_know LoadValueDirectFixed(rl_src, ref_reg); // kArg0 <= ref } - if (CanUseOpPcRelDexCacheArrayLoad()) { - size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx); - OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, class_reg, false); - } else { - RegStorage r_method = LoadCurrMethodWithHint(class_reg); - // Load dex cache entry into class_reg (kArg2) - LoadRefDisp(r_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), - class_reg, kNotVolatile); - int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value(); - LoadRefDisp(class_reg, offset_of_type, class_reg, kNotVolatile); - } + // Load dex cache entry into class_reg (kArg2) + LoadTypeFromCache(type_idx, class_reg); if (!can_assume_type_is_in_dex_cache) { GenIfNullUseHelperImm(class_reg, kQuickInitializeType, type_idx); @@ -1370,17 +1346,7 @@ void Mir2Lir::GenCheckCast(int opt_flags, uint32_t insn_idx, uint32_t type_idx, class_reg, kNotVolatile); } else { // Load dex cache entry into class_reg (kArg2) - if (CanUseOpPcRelDexCacheArrayLoad()) { - size_t offset = dex_cache_arrays_layout_.TypeOffset(type_idx); - OpPcRelDexCacheArrayLoad(cu_->dex_file, offset, class_reg, false); - } else { - RegStorage r_method = LoadCurrMethodWithHint(class_reg); - - LoadRefDisp(r_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), - class_reg, kNotVolatile); - int32_t offset_of_type = ClassArray::OffsetOfElement(type_idx).Int32Value(); - LoadRefDisp(class_reg, offset_of_type, class_reg, kNotVolatile); - } + LoadTypeFromCache(type_idx, class_reg); if (!cu_->compiler_driver->CanAssumeTypeIsPresentInDexCache(*cu_->dex_file, type_idx)) { // Need to test presence of type in dex cache at runtime GenIfNullUseHelperImm(class_reg, kQuickInitializeType, type_idx); diff --git a/compiler/dex/quick/mips/call_mips.cc b/compiler/dex/quick/mips/call_mips.cc index 853980d10a..8863c058a1 100644 --- a/compiler/dex/quick/mips/call_mips.cc +++ b/compiler/dex/quick/mips/call_mips.cc @@ -415,10 +415,11 @@ void MipsMir2Lir::GenSpecialExitForSuspend() { * Bit of a hack here - in the absence of a real scheduling pass, * emit the next instruction in static & direct invoke sequences. */ -static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, - const MethodReference& target_method, uint32_t, uintptr_t direct_code, - uintptr_t direct_method, InvokeType type) { - Mir2Lir* cg = static_cast<Mir2Lir*>(cu->cg.get()); +int MipsMir2Lir::MipsNextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, + const MethodReference& target_method, uint32_t, + uintptr_t direct_code, uintptr_t direct_method, + InvokeType type) { + MipsMir2Lir* cg = static_cast<MipsMir2Lir*>(cu->cg.get()); if (info->string_init_offset != 0) { RegStorage arg0_ref = cg->TargetReg(kArg0, kRef); switch (state) { @@ -469,10 +470,12 @@ static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, cg->LoadCurrMethodDirect(arg0_ref); break; case 1: // Get method->dex_cache_resolved_methods_ - cg->LoadRefDisp(arg0_ref, - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(), - arg0_ref, - kNotVolatile); + cg->LoadBaseDisp(arg0_ref, + ArtMethod::DexCacheResolvedMethodsOffset( + cu->target64 ? kMips64PointerSize : kMipsPointerSize).Int32Value(), + arg0_ref, + cu->target64 ? k64 : k32, + kNotVolatile); // Set up direct code if known. if (direct_code != 0) { if (direct_code != static_cast<uintptr_t>(-1)) { @@ -492,8 +495,9 @@ static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, CHECK_EQ(cu->dex_file, target_method.dex_file); const size_t pointer_size = GetInstructionSetPointerSize(cu->instruction_set); cg->LoadWordDisp(arg0_ref, - mirror::Array::DataOffset(pointer_size).Uint32Value() + - target_method.dex_method_index * pointer_size, arg0_ref); + cg->GetCachePointerOffset(target_method.dex_method_index, + pointer_size), + arg0_ref); break; } case 3: // Grab the code from the method* @@ -512,7 +516,7 @@ static int NextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, } NextCallInsn MipsMir2Lir::GetNextSDCallInsn() { - return NextSDCallInsn; + return MipsNextSDCallInsn; } LIR* MipsMir2Lir::GenCallInsn(const MirMethodLoweringInfo& method_info ATTRIBUTE_UNUSED) { diff --git a/compiler/dex/quick/mips/codegen_mips.h b/compiler/dex/quick/mips/codegen_mips.h index 2173253363..378b9a0e05 100644 --- a/compiler/dex/quick/mips/codegen_mips.h +++ b/compiler/dex/quick/mips/codegen_mips.h @@ -269,6 +269,11 @@ class MipsMir2Lir FINAL : public Mir2Lir { const bool fpuIs32Bit_; private: + static int MipsNextSDCallInsn(CompilationUnit* cu, CallInfo* info, int state, + const MethodReference& target_method, uint32_t, + uintptr_t direct_code, uintptr_t direct_method, + InvokeType type); + void GenNegLong(RegLocation rl_dest, RegLocation rl_src); void GenAddLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2); void GenSubLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2); diff --git a/compiler/dex/quick/mir_to_lir-inl.h b/compiler/dex/quick/mir_to_lir-inl.h index 767fe250d8..f96816c3d9 100644 --- a/compiler/dex/quick/mir_to_lir-inl.h +++ b/compiler/dex/quick/mir_to_lir-inl.h @@ -21,6 +21,7 @@ #include "base/logging.h" #include "dex/compiler_ir.h" +#include "gc_root.h" #include "utils.h" namespace art { @@ -278,6 +279,14 @@ inline void Mir2Lir::CheckRegStorage(RegStorage rs, WidenessCheck wide, RefCheck } } +inline size_t Mir2Lir::GetCacheOffset(uint32_t index) { + return sizeof(GcRoot<mirror::Object>) * index; +} + +inline size_t Mir2Lir::GetCachePointerOffset(uint32_t index, size_t pointer_size) { + return pointer_size * index; +} + inline Mir2Lir::ShortyIterator::ShortyIterator(const char* shorty, bool is_static) : cur_(shorty + 1), pending_this_(!is_static), initialized_(false) { DCHECK(shorty != nullptr); diff --git a/compiler/dex/quick/mir_to_lir.h b/compiler/dex/quick/mir_to_lir.h index 73787e958e..4e3aab2f0b 100644 --- a/compiler/dex/quick/mir_to_lir.h +++ b/compiler/dex/quick/mir_to_lir.h @@ -1771,6 +1771,11 @@ class Mir2Lir { return (core_spill_mask_ & (1u << reg)) != 0; } + size_t GetCacheOffset(uint32_t index); + size_t GetCachePointerOffset(uint32_t index, size_t pointer_size); + + void LoadTypeFromCache(uint32_t type_index, RegStorage class_reg); + public: // TODO: add accessors for these. LIR* literal_list_; // Constants. diff --git a/compiler/dex/quick/x86/call_x86.cc b/compiler/dex/quick/x86/call_x86.cc index 43167a187f..9cb45a4528 100644 --- a/compiler/dex/quick/x86/call_x86.cc +++ b/compiler/dex/quick/x86/call_x86.cc @@ -394,18 +394,19 @@ int X86Mir2Lir::X86NextSDCallInsn(CompilationUnit* cu, CallInfo* info, cg->LoadCurrMethodDirect(arg0_ref); break; case 1: // Get method->dex_cache_resolved_methods_ - cg->LoadRefDisp(arg0_ref, - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value(), - arg0_ref, - kNotVolatile); + cg->LoadBaseDisp(arg0_ref, + ArtMethod::DexCacheResolvedMethodsOffset( + cu->target64 ? kX86_64PointerSize : kX86PointerSize).Int32Value(), + arg0_ref, + cu->target64 ? k64 : k32, + kNotVolatile); break; case 2: { // Grab target method* CHECK_EQ(cu->dex_file, target_method.dex_file); const size_t pointer_size = GetInstructionSetPointerSize(cu->instruction_set); cg->LoadWordDisp(arg0_ref, - mirror::Array::DataOffset(pointer_size).Uint32Value() + - target_method.dex_method_index * pointer_size, + cg->GetCachePointerOffset(target_method.dex_method_index, pointer_size), arg0_ref); break; } diff --git a/compiler/dex/quick/x86/int_x86.cc b/compiler/dex/quick/x86/int_x86.cc index d1fe167bb4..ecd23e9ef0 100755 --- a/compiler/dex/quick/x86/int_x86.cc +++ b/compiler/dex/quick/x86/int_x86.cc @@ -3031,31 +3031,12 @@ void X86Mir2Lir::GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, // The LoadRefDisp(s) below will work normally, even in 64 bit mode. RegStorage check_class = AllocTemp(); - // If Method* is already in a register, we can save a copy. - RegLocation rl_method = mir_graph_->GetMethodLoc(); - int32_t offset_of_type = mirror::Array::DataOffset( - sizeof(mirror::HeapReference<mirror::Class*>)).Int32Value() + - (sizeof(mirror::HeapReference<mirror::Class*>) * type_idx); - - if (rl_method.location == kLocPhysReg) { - if (use_declaring_class) { - LoadRefDisp(rl_method.reg, ArtMethod::DeclaringClassOffset().Int32Value(), - check_class, kNotVolatile); - } else { - LoadRefDisp(rl_method.reg, ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), - check_class, kNotVolatile); - LoadRefDisp(check_class, offset_of_type, check_class, kNotVolatile); - } + if (use_declaring_class) { + RegStorage r_method = LoadCurrMethodWithHint(check_class); + LoadRefDisp(r_method, ArtMethod::DeclaringClassOffset().Int32Value(), + check_class, kNotVolatile); } else { - LoadCurrMethodDirect(check_class); - if (use_declaring_class) { - LoadRefDisp(check_class, ArtMethod::DeclaringClassOffset().Int32Value(), - check_class, kNotVolatile); - } else { - LoadRefDisp(check_class, ArtMethod::DexCacheResolvedTypesOffset().Int32Value(), - check_class, kNotVolatile); - LoadRefDisp(check_class, offset_of_type, check_class, kNotVolatile); - } + LoadTypeFromCache(type_idx, check_class); } // Compare the computed class to the class in the object. diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index 6d3a960048..950f8249cd 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -836,16 +836,17 @@ class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor { virtual bool Visit(mirror::Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) { const auto pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); for (auto& m : c->GetVirtualMethods(pointer_size)) { - ResolveExceptionsForMethod(&m); + ResolveExceptionsForMethod(&m, pointer_size); } for (auto& m : c->GetDirectMethods(pointer_size)) { - ResolveExceptionsForMethod(&m); + ResolveExceptionsForMethod(&m, pointer_size); } return true; } private: - void ResolveExceptionsForMethod(ArtMethod* method_handle) SHARED_REQUIRES(Locks::mutator_lock_) { + void ResolveExceptionsForMethod(ArtMethod* method_handle, size_t pointer_size) + SHARED_REQUIRES(Locks::mutator_lock_) { const DexFile::CodeItem* code_item = method_handle->GetCodeItem(); if (code_item == nullptr) { return; // native or abstract method @@ -866,7 +867,8 @@ class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor { uint16_t encoded_catch_handler_handlers_type_idx = DecodeUnsignedLeb128(&encoded_catch_handler_list); // Add to set of types to resolve if not already in the dex cache resolved types - if (!method_handle->IsResolvedTypeIdx(encoded_catch_handler_handlers_type_idx)) { + if (!method_handle->IsResolvedTypeIdx(encoded_catch_handler_handlers_type_idx, + pointer_size)) { exceptions_to_resolve_.emplace(encoded_catch_handler_handlers_type_idx, method_handle->GetDexFile()); } diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index dbd3366c1b..9172c8347b 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -309,32 +309,29 @@ void ImageWriter::PrepareDexCacheArraySlots() { dex_cache_array_starts_.Put(dex_file, size); DexCacheArraysLayout layout(target_ptr_size_, dex_file); DCHECK(layout.Valid()); - auto types_size = layout.TypesSize(dex_file->NumTypeIds()); - auto methods_size = layout.MethodsSize(dex_file->NumMethodIds()); - auto fields_size = layout.FieldsSize(dex_file->NumFieldIds()); - auto strings_size = layout.StringsSize(dex_file->NumStringIds()); - dex_cache_array_indexes_.Put( - dex_cache->GetResolvedTypes(), - DexCacheArrayLocation {size + layout.TypesOffset(), types_size, kBinRegular}); - dex_cache_array_indexes_.Put( - dex_cache->GetResolvedMethods(), - DexCacheArrayLocation {size + layout.MethodsOffset(), methods_size, kBinArtMethodClean}); - AddMethodPointerArray(dex_cache->GetResolvedMethods()); - dex_cache_array_indexes_.Put( - dex_cache->GetResolvedFields(), - DexCacheArrayLocation {size + layout.FieldsOffset(), fields_size, kBinArtField}); - pointer_arrays_.emplace(dex_cache->GetResolvedFields(), kBinArtField); - dex_cache_array_indexes_.Put( - dex_cache->GetStrings(), - DexCacheArrayLocation {size + layout.StringsOffset(), strings_size, kBinRegular}); + DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr); + AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), size + layout.TypesOffset()); + DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr); + AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), size + layout.MethodsOffset()); + DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr); + AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), size + layout.FieldsOffset()); + DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr); + AddDexCacheArrayRelocation(dex_cache->GetStrings(), size + layout.StringsOffset()); size += layout.Size(); - CHECK_EQ(layout.Size(), types_size + methods_size + fields_size + strings_size); } // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned() // when AssignImageBinSlot() assigns their indexes out or order. bin_slot_sizes_[kBinDexCacheArray] = size; } +void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset) { + if (array != nullptr) { + native_object_relocations_.emplace( + array, + NativeObjectRelocation { offset, kNativeObjectRelocationTypeDexCacheArray }); + } +} + void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) { DCHECK(arr != nullptr); if (kIsDebugBuild) { @@ -381,7 +378,7 @@ void ImageWriter::AssignImageBinSlot(mirror::Object* object) { // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots(). // Since these arrays are huge, most pages do not overlap other objects and it's not // really important where they are for the clean/dirty separation. Due to their - // special PC-relative addressing, we arbitrarily keep them at the beginning. + // special PC-relative addressing, we arbitrarily keep them at the end. // * Class'es which are verified [their clinit runs only at runtime] // - classes in general [because their static fields get overwritten] // - initialized classes with all-final statics are unlikely to be ever dirty, @@ -443,28 +440,13 @@ void ImageWriter::AssignImageBinSlot(mirror::Object* object) { } } else if (object->GetClass<kVerifyNone>()->IsStringClass()) { bin = kBinString; // Strings are almost always immutable (except for object header). - } else if (object->IsArrayInstance()) { - mirror::Class* klass = object->GetClass<kVerifyNone>(); - if (klass->IsObjectArrayClass() || klass->IsIntArrayClass() || klass->IsLongArrayClass()) { - auto it = dex_cache_array_indexes_.find(object); - if (it != dex_cache_array_indexes_.end()) { - bin = kBinDexCacheArray; - // Use prepared offset defined by the DexCacheLayout. - current_offset = it->second.offset_; - // Override incase of cross compilation. - object_size = it->second.length_; - } // else bin = kBinRegular - } } // else bin = kBinRegular } size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment - if (bin != kBinDexCacheArray) { - DCHECK(dex_cache_array_indexes_.find(object) == dex_cache_array_indexes_.end()) << object; - current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned). - // Move the current bin size up to accomodate the object we just assigned a bin slot. - bin_slot_sizes_[bin] += offset_delta; - } + current_offset = bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned). + // Move the current bin size up to accomodate the object we just assigned a bin slot. + bin_slot_sizes_[bin] += offset_delta; BinSlot new_bin_slot(bin, current_offset); SetImageBinSlot(object, new_bin_slot); @@ -595,7 +577,7 @@ void ImageWriter::PruneNonImageClasses() { } // Clear references to removed classes from the DexCaches. - const ArtMethod* resolution_method = runtime->GetResolutionMethod(); + ArtMethod* resolution_method = runtime->GetResolutionMethod(); ScopedAssertNoThreadSuspension sa(self, __FUNCTION__); ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable @@ -611,16 +593,20 @@ void ImageWriter::PruneNonImageClasses() { dex_cache->SetResolvedType(i, nullptr); } } - auto* resolved_methods = down_cast<mirror::PointerArray*>(dex_cache->GetResolvedMethods()); - for (size_t i = 0, len = resolved_methods->GetLength(); i < len; i++) { - auto* method = resolved_methods->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_); + ArtMethod** resolved_methods = dex_cache->GetResolvedMethods(); + for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) { + ArtMethod* method = + mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_); if (method != nullptr) { auto* declaring_class = method->GetDeclaringClass(); // Miranda methods may be held live by a class which was not an image class but have a // declaring class which is an image class. Set it to the resolution method to be safe and // prevent dangling pointers. if (method->IsMiranda() || !IsImageClass(declaring_class)) { - resolved_methods->SetElementPtrSize(i, resolution_method, target_ptr_size_); + mirror::DexCache::SetElementPtrSize(resolved_methods, + i, + resolution_method, + target_ptr_size_); } else { // Check that the class is still in the classes table. DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class " @@ -922,8 +908,6 @@ void ImageWriter::CalculateNewObjectOffsets() { image_end_ += RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment image_objects_offset_begin_ = image_end_; - // Prepare bin slots for dex cache arrays. - PrepareDexCacheArraySlots(); // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots. heap->VisitObjects(WalkFieldsCallback, this); // Write the image runtime methods. @@ -953,6 +937,8 @@ void ImageWriter::CalculateNewObjectOffsets() { CHECK(m->IsRuntimeMethod()); AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean); } + // Calculate size of the dex cache arrays slot and prepare offsets. + PrepareDexCacheArraySlots(); // Calculate bin slot offsets. size_t bin_offset = image_objects_offset_begin_; @@ -1019,6 +1005,11 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) { bin_slot_sizes_[kBinArtMethodDirty]); CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset()); cur_pos = methods_section->End(); + // Add dex cache arrays section. + auto* dex_cache_arrays_section = §ions[ImageHeader::kSectionDexCacheArrays]; + *dex_cache_arrays_section = ImageSection(cur_pos, bin_slot_sizes_[kBinDexCacheArray]); + CHECK_EQ(bin_slot_offsets_[kBinDexCacheArray], dex_cache_arrays_section->Offset()); + cur_pos = dex_cache_arrays_section->End(); // Round up to the alignment the string table expects. See HashSet::WriteToMemory. cur_pos = RoundUp(cur_pos, sizeof(uint64_t)); // Calculate the size of the interned strings. @@ -1120,6 +1111,9 @@ void ImageWriter::CopyAndFixupNativeData() { ArtMethod::Size(target_ptr_size_), ArtMethod::Alignment(target_ptr_size_))); break; + case kNativeObjectRelocationTypeDexCacheArray: + // Nothing to copy here, everything is done in FixupDexCache(). + break; } } } @@ -1187,7 +1181,7 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_); if (elem != nullptr) { auto it = native_object_relocations_.find(elem); - if (it == native_object_relocations_.end()) { + if (UNLIKELY(it == native_object_relocations_.end())) { if (it->second.IsArtMethodRelocation()) { auto* method = reinterpret_cast<ArtMethod*>(elem); LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ " @@ -1200,6 +1194,7 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a << field << " idx=" << i << "/" << num_elements << " with declaring class " << PrettyClass(field->GetDeclaringClass()); } + UNREACHABLE(); } else { elem = image_begin_ + it->second.offset; } @@ -1280,27 +1275,31 @@ class FixupClassVisitor FINAL : public FixupVisitor { } }; -void* ImageWriter::NativeLocationInImage(void* obj) { - if (obj == nullptr) { - return nullptr; - } +uintptr_t ImageWriter::NativeOffsetInImage(void* obj) { + DCHECK(obj != nullptr); auto it = native_object_relocations_.find(obj); CHECK(it != native_object_relocations_.end()) << obj; const NativeObjectRelocation& relocation = it->second; - return reinterpret_cast<void*>(image_begin_ + relocation.offset); + return relocation.offset; +} + +template <typename T> +T* ImageWriter::NativeLocationInImage(T* obj) { + if (obj == nullptr) { + return nullptr; + } + return reinterpret_cast<T*>(image_begin_ + NativeOffsetInImage(obj)); } void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) { // Update the field arrays. - copy->SetSFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>( - NativeLocationInImage(orig->GetSFieldsPtr()))); - copy->SetIFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>( - NativeLocationInImage(orig->GetIFieldsPtr()))); + copy->SetSFieldsPtrUnchecked(NativeLocationInImage(orig->GetSFieldsPtr())); + copy->SetIFieldsPtrUnchecked(NativeLocationInImage(orig->GetIFieldsPtr())); // Update direct and virtual method arrays. - copy->SetDirectMethodsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>( - NativeLocationInImage(orig->GetDirectMethodsPtr()))); - copy->SetVirtualMethodsPtr(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>( - NativeLocationInImage(orig->GetVirtualMethodsPtr()))); + copy->SetDirectMethodsPtrUnchecked(NativeLocationInImage(orig->GetDirectMethodsPtr())); + copy->SetVirtualMethodsPtr(NativeLocationInImage(orig->GetVirtualMethodsPtr())); + // Update dex cache strings. + copy->SetDexCacheStrings(NativeLocationInImage(orig->GetDexCacheStrings())); // Fix up embedded tables. if (orig->ShouldHaveEmbeddedImtAndVTable()) { for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) { @@ -1333,7 +1332,7 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { } auto* klass = orig->GetClass(); if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) { - // Is this a native dex cache array? + // Is this a native pointer array? auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig)); if (it != pointer_arrays_.end()) { // Should only need to fixup every pointer array exactly once. @@ -1341,8 +1340,6 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { pointer_arrays_.erase(it); return; } - CHECK(dex_cache_array_indexes_.find(orig) == dex_cache_array_indexes_.end()) - << "Should have been pointer array."; } if (orig->IsClass()) { FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy)); @@ -1357,17 +1354,81 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method); dest->SetArtMethod( reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset)); - } else if (!klass->IsArrayClass() && klass->IsSubClass(down_cast<mirror::Class*>( - Thread::Current()->DecodeJObject(WellKnownClasses::java_lang_ClassLoader)))) { - // If src is a ClassLoader, set the class table to null so that it gets recreated by the - // ClassLoader. - down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr); + } else if (!klass->IsArrayClass()) { + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) { + FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy)); + } else if (klass->IsSubClass(down_cast<mirror::Class*>( + class_linker->GetClassRoot(ClassLinker::kJavaLangClassLoader)))) { + // If src is a ClassLoader, set the class table to null so that it gets recreated by the + // ClassLoader. + down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr); + } } FixupVisitor visitor(this, copy); orig->VisitReferences(visitor, visitor); } } +void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, + mirror::DexCache* copy_dex_cache) { + // Though the DexCache array fields are usually treated as native pointers, we set the full + // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is + // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e. + // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))). + GcRoot<mirror::String>* orig_strings = orig_dex_cache->GetStrings(); + if (orig_strings != nullptr) { + uintptr_t copy_strings_offset = NativeOffsetInImage(orig_strings); + copy_dex_cache->SetField64<false>( + mirror::DexCache::StringsOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_strings_offset))); + GcRoot<mirror::String>* copy_strings = + reinterpret_cast<GcRoot<mirror::String>*>(image_->Begin() + copy_strings_offset); + for (size_t i = 0, num = orig_dex_cache->NumStrings(); i != num; ++i) { + copy_strings[i] = GcRoot<mirror::String>(GetImageAddress(orig_strings[i].Read())); + } + } + GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes(); + if (orig_types != nullptr) { + uintptr_t copy_types_offset = NativeOffsetInImage(orig_types); + copy_dex_cache->SetField64<false>( + mirror::DexCache::ResolvedTypesOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_types_offset))); + GcRoot<mirror::Class>* copy_types = + reinterpret_cast<GcRoot<mirror::Class>*>(image_->Begin() + copy_types_offset); + for (size_t i = 0, num = orig_dex_cache->NumResolvedTypes(); i != num; ++i) { + copy_types[i] = GcRoot<mirror::Class>(GetImageAddress(orig_types[i].Read())); + } + } + ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods(); + if (orig_methods != nullptr) { + uintptr_t copy_methods_offset = NativeOffsetInImage(orig_methods); + copy_dex_cache->SetField64<false>( + mirror::DexCache::ResolvedMethodsOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_methods_offset))); + ArtMethod** copy_methods = + reinterpret_cast<ArtMethod**>(image_->Begin() + copy_methods_offset); + for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) { + ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_); + ArtMethod* copy = NativeLocationInImage(orig); + mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_); + } + } + ArtField** orig_fields = orig_dex_cache->GetResolvedFields(); + if (orig_fields != nullptr) { + uintptr_t copy_fields_offset = NativeOffsetInImage(orig_fields); + copy_dex_cache->SetField64<false>( + mirror::DexCache::ResolvedFieldsOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + copy_fields_offset))); + ArtField** copy_fields = reinterpret_cast<ArtField**>(image_->Begin() + copy_fields_offset); + for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) { + ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_); + ArtField* copy = NativeLocationInImage(orig); + mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_); + } + } +} + const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, bool* quick_is_interpreted) { DCHECK(!method->IsResolutionMethod() && !method->IsImtConflictMethod() && !method->IsImtUnimplementedMethod() && !method->IsAbstract()) << PrettyMethod(method); @@ -1430,8 +1491,11 @@ void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) { memcpy(copy, orig, ArtMethod::Size(target_ptr_size_)); copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked())); - copy->SetDexCacheResolvedMethods(GetImageAddress(orig->GetDexCacheResolvedMethods())); - copy->SetDexCacheResolvedTypes(GetImageAddress(orig->GetDexCacheResolvedTypes())); + + ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_); + copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_); + GcRoot<mirror::Class>* orig_resolved_types = orig->GetDexCacheResolvedTypes(target_ptr_size_); + copy->SetDexCacheResolvedTypes(NativeLocationInImage(orig_resolved_types), target_ptr_size_); // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to // oat_begin_ @@ -1534,9 +1598,11 @@ uint32_t ImageWriter::BinSlot::GetIndex() const { uint8_t* ImageWriter::GetOatFileBegin() const { DCHECK_GT(intern_table_bytes_, 0u); - return image_begin_ + RoundUp( - image_end_ + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] + - bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize); + size_t native_sections_size = + bin_slot_sizes_[kBinArtField] + bin_slot_sizes_[kBinArtMethodDirty] + + bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinDexCacheArray] + + intern_table_bytes_; + return image_begin_ + RoundUp(image_end_ + native_sections_size, kPageSize); } ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) { @@ -1550,6 +1616,8 @@ ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocat case kNativeObjectRelocationTypeArtMethodDirty: case kNativeObjectRelocationTypeArtMethodArrayDirty: return kBinArtMethodDirty; + case kNativeObjectRelocationTypeDexCacheArray: + return kBinDexCacheArray; } UNREACHABLE(); } diff --git a/compiler/image_writer.h b/compiler/image_writer.h index 778521c606..e235bc4553 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -78,12 +78,13 @@ class ImageWriter FINAL { ArtMethod* GetImageMethodAddress(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_); - mirror::HeapReference<mirror::Object>* GetDexCacheArrayElementImageAddress( - const DexFile* dex_file, uint32_t offset) const SHARED_REQUIRES(Locks::mutator_lock_) { + template <typename PtrType> + PtrType GetDexCacheArrayElementImageAddress(const DexFile* dex_file, uint32_t offset) + const SHARED_REQUIRES(Locks::mutator_lock_) { auto it = dex_cache_array_starts_.find(dex_file); DCHECK(it != dex_cache_array_starts_.end()); - return reinterpret_cast<mirror::HeapReference<mirror::Object>*>( - image_begin_ + RoundUp(sizeof(ImageHeader), kObjectAlignment) + it->second + offset); + return reinterpret_cast<PtrType>( + image_begin_ + bin_slot_offsets_[kBinDexCacheArray] + it->second + offset); } uint8_t* GetOatFileBegin() const; @@ -104,13 +105,8 @@ class ImageWriter FINAL { // Classify different kinds of bins that objects end up getting packed into during image writing. enum Bin { - // Dex cache arrays have a special slot for PC-relative addressing. Since they are - // huge, and as such their dirtiness is not important for the clean/dirty separation, - // we arbitrarily keep them at the beginning. - kBinDexCacheArray, // Object arrays belonging to dex cache. // Likely-clean: kBinString, // [String] Almost always immutable (except for obj header). - kBinArtMethodsManagedInitialized, // [ArtMethod] Not-native, and initialized. Unlikely to dirty // Unknown mix of clean/dirty: kBinRegular, // Likely-dirty: @@ -127,6 +123,10 @@ class ImageWriter FINAL { // ArtMethods may be dirty if the class has native methods or a declaring class that isn't // initialized. kBinArtMethodDirty, + // Dex cache arrays have a special slot for PC-relative addressing. Since they are + // huge, and as such their dirtiness is not important for the clean/dirty separation, + // we arbitrarily keep them at the end of the native data. + kBinDexCacheArray, // Arrays belonging to dex cache. kBinSize, // Number of bins which are for mirror objects. kBinMirrorCount = kBinArtField, @@ -140,6 +140,7 @@ class ImageWriter FINAL { kNativeObjectRelocationTypeArtMethodArrayClean, kNativeObjectRelocationTypeArtMethodDirty, kNativeObjectRelocationTypeArtMethodArrayDirty, + kNativeObjectRelocationTypeDexCacheArray, }; friend std::ostream& operator<<(std::ostream& stream, const NativeObjectRelocationType& type); @@ -193,6 +194,7 @@ class ImageWriter FINAL { SHARED_REQUIRES(Locks::mutator_lock_); BinSlot GetImageBinSlot(mirror::Object* object) const SHARED_REQUIRES(Locks::mutator_lock_); + void AddDexCacheArrayRelocation(void* array, size_t offset) SHARED_REQUIRES(Locks::mutator_lock_); void AddMethodPointerArray(mirror::PointerArray* arr) SHARED_REQUIRES(Locks::mutator_lock_); static void* GetImageAddressCallback(void* writer, mirror::Object* obj) @@ -266,6 +268,8 @@ class ImageWriter FINAL { SHARED_REQUIRES(Locks::mutator_lock_); void FixupObject(mirror::Object* orig, mirror::Object* copy) SHARED_REQUIRES(Locks::mutator_lock_); + void FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache) + SHARED_REQUIRES(Locks::mutator_lock_); void FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr, mirror::Class* klass, Bin array_type) SHARED_REQUIRES(Locks::mutator_lock_); @@ -291,7 +295,10 @@ class ImageWriter FINAL { static Bin BinTypeForNativeRelocationType(NativeObjectRelocationType type); - void* NativeLocationInImage(void* obj); + uintptr_t NativeOffsetInImage(void* obj); + + template <typename T> + T* NativeLocationInImage(T* obj); const CompilerDriver& compiler_driver_; @@ -313,15 +320,6 @@ class ImageWriter FINAL { // Memory mapped for generating the image. std::unique_ptr<MemMap> image_; - // Indexes, lengths for dex cache arrays (objects are inside of the image so that they don't - // move). - struct DexCacheArrayLocation { - size_t offset_; - size_t length_; - Bin bin_type_; - }; - SafeMap<mirror::Object*, DexCacheArrayLocation> dex_cache_array_indexes_; - // Pointer arrays that need to be updated. Since these are only some int and long arrays, we need // to keep track. These include vtable arrays, iftable arrays, and dex caches. std::unordered_map<mirror::PointerArray*, Bin> pointer_arrays_; diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc index fdf904d1f0..4ddd457ac5 100644 --- a/compiler/oat_writer.cc +++ b/compiler/oat_writer.cc @@ -842,10 +842,10 @@ class OatWriter::WriteCodeMethodVisitor : public OatDexMethodVisitor { uint32_t GetDexCacheOffset(const LinkerPatch& patch) SHARED_REQUIRES(Locks::mutator_lock_) { if (writer_->image_writer_ != nullptr) { - auto* element = writer_->image_writer_->GetDexCacheArrayElementImageAddress( + auto* element = writer_->image_writer_->GetDexCacheArrayElementImageAddress<const uint8_t*>( patch.TargetDexCacheDexFile(), patch.TargetDexCacheElementOffset()); const uint8_t* oat_data = writer_->image_writer_->GetOatFileBegin() + file_offset_; - return reinterpret_cast<const uint8_t*>(element) - oat_data; + return element - oat_data; } else { LOG(FATAL) << "Unimplemented."; UNREACHABLE(); diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 503187bd3d..f4cf9b5130 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -129,12 +129,12 @@ static bool CheckTypeConsistency(HInstruction* instruction) { } size_t CodeGenerator::GetCacheOffset(uint32_t index) { - return mirror::ObjectArray<mirror::Object>::OffsetOfElement(index).SizeValue(); + return sizeof(GcRoot<mirror::Object>) * index; } size_t CodeGenerator::GetCachePointerOffset(uint32_t index) { auto pointer_size = InstructionSetPointerSize(GetInstructionSet()); - return mirror::Array::DataOffset(pointer_size).Uint32Value() + pointer_size * index; + return pointer_size * index; } void CodeGenerator::CompileBaseline(CodeAllocator* allocator, bool is_leaf) { diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 9de9abffd8..5b7eea6713 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -4231,9 +4231,9 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { __ LoadFromOffset(kLoadWord, out, current_method, - ArtMethod::DexCacheResolvedTypesOffset().Int32Value()); + ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value()); __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())); - __ MaybeUnpoisonHeapReference(out); + // TODO: We will need a read barrier here. SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); @@ -4293,9 +4293,8 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { __ LoadFromOffset( kLoadWord, out, current_method, ArtMethod::DeclaringClassOffset().Int32Value()); __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value()); - __ MaybeUnpoisonHeapReference(out); __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex())); - __ MaybeUnpoisonHeapReference(out); + // TODO: We will need a read barrier here. __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel()); __ Bind(slow_path->GetExitLabel()); } @@ -4570,7 +4569,8 @@ void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, } // temp = current_method->dex_cache_resolved_methods_; __ LoadFromOffset( - kLoadWord, reg, method_reg, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); + kLoadWord, reg, method_reg, ArtMethod::DexCacheResolvedMethodsOffset( + kArmPointerSize).Int32Value()); // temp = temp[index_in_cache] uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index; __ LoadFromOffset(kLoadWord, reg, reg, CodeGenerator::GetCachePointerOffset(index_in_cache)); diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 25b3ea2f5f..b18fb6e900 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -2446,8 +2446,9 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invok } // temp = current_method->dex_cache_resolved_methods_; - __ Ldr(reg.W(), MemOperand(method_reg.X(), - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); + __ Ldr(reg.X(), + MemOperand(method_reg.X(), + ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value())); // temp = temp[index_in_cache]; uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index; __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache))); @@ -2620,9 +2621,10 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { __ Ldr(out, MemOperand(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); } else { DCHECK(cls->CanCallRuntime()); - __ Ldr(out, MemOperand(current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value())); - __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); - GetAssembler()->MaybeUnpoisonHeapReference(out.W()); + MemberOffset resolved_types_offset = ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize); + __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value())); + __ Ldr(out, MemOperand(out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); + // TODO: We will need a read barrier here. SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64( cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); @@ -2681,10 +2683,9 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { Register out = OutputRegister(load); Register current_method = InputRegisterAt(load, 0); __ Ldr(out, MemOperand(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); - __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset())); - GetAssembler()->MaybeUnpoisonHeapReference(out.W()); - __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex()))); - GetAssembler()->MaybeUnpoisonHeapReference(out.W()); + __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset())); + __ Ldr(out, MemOperand(out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()))); + // TODO: We will need a read barrier here. __ Cbz(out, slow_path->GetEntryLabel()); __ Bind(slow_path->GetExitLabel()); } diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index 093d786dfe..1528d09da1 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -2445,10 +2445,10 @@ void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invo } // temp = temp->dex_cache_resolved_methods_; - __ LoadFromOffset(kLoadUnsignedWord, + __ LoadFromOffset(kLoadDoubleword, reg, method_reg, - ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); + ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value()); // temp = temp[index_in_cache] uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index; __ LoadFromOffset(kLoadDoubleword, @@ -2549,9 +2549,10 @@ void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) { ArtMethod::DeclaringClassOffset().Int32Value()); } else { DCHECK(cls->CanCallRuntime()); - __ LoadFromOffset(kLoadUnsignedWord, out, current_method, - ArtMethod::DexCacheResolvedTypesOffset().Int32Value()); + __ LoadFromOffset(kLoadDoubleword, out, current_method, + ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value()); __ LoadFromOffset(kLoadUnsignedWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())); + // TODO: We will need a read barrier here. SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64( cls, cls, @@ -2614,8 +2615,9 @@ void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) { GpuRegister current_method = locations->InAt(0).AsRegister<GpuRegister>(); __ LoadFromOffset(kLoadUnsignedWord, out, current_method, ArtMethod::DeclaringClassOffset().Int32Value()); - __ LoadFromOffset(kLoadUnsignedWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value()); + __ LoadFromOffset(kLoadDoubleword, out, out, mirror::Class::DexCacheStringsOffset().Int32Value()); __ LoadFromOffset(kLoadUnsignedWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex())); + // TODO: We will need a read barrier here. __ Beqzc(out, slow_path->GetEntryLabel()); __ Bind(slow_path->GetExitLabel()); } diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 72c690de9a..4fa7b28685 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -3545,7 +3545,8 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, __ movl(reg, Address(ESP, kCurrentMethodStackOffset)); } // temp = temp->dex_cache_resolved_methods_; - __ movl(reg, Address(method_reg, ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); + __ movl(reg, Address(method_reg, + ArtMethod::DexCacheResolvedMethodsOffset(kX86PointerSize).Int32Value())); // temp = temp[index_in_cache] uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index; __ movl(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache))); @@ -4719,9 +4720,9 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) { } else { DCHECK(cls->CanCallRuntime()); __ movl(out, Address( - current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value())); + current_method, ArtMethod::DexCacheResolvedTypesOffset(kX86PointerSize).Int32Value())); __ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); - __ MaybeUnpoisonHeapReference(out); + // TODO: We will need a read barrier here. SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86( cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); @@ -4779,9 +4780,8 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { Register current_method = locations->InAt(0).AsRegister<Register>(); __ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); __ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value())); - __ MaybeUnpoisonHeapReference(out); __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex()))); - __ MaybeUnpoisonHeapReference(out); + // TODO: We will need a read barrier here. __ testl(out, out); __ j(kEqual, slow_path->GetEntryLabel()); __ Bind(slow_path->GetExitLabel()); diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 820ec781bb..29bad125f9 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -450,8 +450,9 @@ void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invo __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset)); } // temp = temp->dex_cache_resolved_methods_; - __ movl(reg, Address(CpuRegister(method_reg), - ArtMethod::DexCacheResolvedMethodsOffset().SizeValue())); + __ movq(reg, + Address(CpuRegister(method_reg), + ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue())); // temp = temp[index_in_cache] uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index; __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache))); @@ -4550,10 +4551,10 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) { __ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); } else { DCHECK(cls->CanCallRuntime()); - __ movl(out, Address( - current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value())); + __ movq(out, Address( + current_method, ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value())); __ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); - __ MaybeUnpoisonHeapReference(out); + // TODO: We will need a read barrier here. SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64( cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); @@ -4601,10 +4602,9 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { CpuRegister out = locations->Out().AsRegister<CpuRegister>(); CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>(); __ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); - __ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value())); - __ MaybeUnpoisonHeapReference(out); + __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value())); __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex()))); - __ MaybeUnpoisonHeapReference(out); + // TODO: We will need a read barrier here. __ testl(out, out); __ j(kEqual, slow_path->GetEntryLabel()); __ Bind(slow_path->GetExitLabel()); diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 112d42e904..3f90676583 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -496,8 +496,9 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, // TODO: we could be more precise by merging the phi inputs but that requires // some functionality from the reference type propagation. DCHECK(return_replacement->IsPhi()); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); ReferenceTypeInfo::TypeHandle return_handle = - handles_->NewHandle(resolved_method->GetReturnType()); + handles_->NewHandle(resolved_method->GetReturnType(true /* resolve */, pointer_size)); return_replacement->SetReferenceTypeInfo(ReferenceTypeInfo::Create( return_handle, return_handle->IsFinal() /* is_exact */)); } diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc index 516638b33c..ef753ed2b6 100644 --- a/compiler/optimizing/reference_type_propagation.cc +++ b/compiler/optimizing/reference_type_propagation.cc @@ -637,9 +637,9 @@ void RTPVisitor::VisitInvoke(HInvoke* instr) { ScopedObjectAccess soa(Thread::Current()); ClassLinker* cl = Runtime::Current()->GetClassLinker(); mirror::DexCache* dex_cache = cl->FindDexCache(soa.Self(), instr->GetDexFile()); - ArtMethod* method = dex_cache->GetResolvedMethod( - instr->GetDexMethodIndex(), cl->GetImagePointerSize()); - mirror::Class* klass = (method == nullptr) ? nullptr : method->GetReturnType(false); + size_t pointer_size = cl->GetImagePointerSize(); + ArtMethod* method = dex_cache->GetResolvedMethod(instr->GetDexMethodIndex(), pointer_size); + mirror::Class* klass = (method == nullptr) ? nullptr : method->GetReturnType(false, pointer_size); SetClassAsTypeInfo(instr, klass, /* is_exact */ false); } diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc index c553a18561..2353dcfdaf 100644 --- a/oatdump/oatdump.cc +++ b/oatdump/oatdump.cc @@ -44,6 +44,7 @@ #include "mapping_table.h" #include "mirror/array-inl.h" #include "mirror/class-inl.h" +#include "mirror/dex_cache-inl.h" #include "mirror/object-inl.h" #include "mirror/object_array-inl.h" #include "oat.h" @@ -1615,15 +1616,14 @@ class ImageDumper { } { // Mark dex caches. - dex_cache_arrays_.clear(); + dex_caches_.clear(); { ReaderMutexLock mu(self, *class_linker->DexLock()); for (jobject weak_root : class_linker->GetDexCaches()) { mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root)); if (dex_cache != nullptr) { - dex_cache_arrays_.insert(dex_cache->GetResolvedFields()); - dex_cache_arrays_.insert(dex_cache->GetResolvedMethods()); + dex_caches_.insert(dex_cache); } } } @@ -1659,22 +1659,25 @@ class ImageDumper { const auto& bitmap_section = image_header_.GetImageSection(ImageHeader::kSectionImageBitmap); const auto& field_section = image_header_.GetImageSection(ImageHeader::kSectionArtFields); const auto& method_section = image_header_.GetMethodsSection(); + const auto& dex_cache_arrays_section = image_header_.GetImageSection( + ImageHeader::kSectionDexCacheArrays); const auto& intern_section = image_header_.GetImageSection( ImageHeader::kSectionInternedStrings); stats_.header_bytes = header_bytes; stats_.alignment_bytes += RoundUp(header_bytes, kObjectAlignment) - header_bytes; // Add padding between the field and method section. // (Field section is 4-byte aligned, method section is 8-byte aligned on 64-bit targets.) - stats_.alignment_bytes += - method_section.Offset() - (field_section.Offset() + field_section.Size()); - // Add padding between the method section and the intern table. - // (Method section is 4-byte aligned on 32-bit targets, intern table is 8-byte aligned.) - stats_.alignment_bytes += - intern_section.Offset() - (method_section.Offset() + method_section.Size()); + stats_.alignment_bytes += method_section.Offset() - + (field_section.Offset() + field_section.Size()); + // Add padding between the dex cache arrays section and the intern table. (Dex cache + // arrays section is 4-byte aligned on 32-bit targets, intern table is 8-byte aligned.) + stats_.alignment_bytes += intern_section.Offset() - + (dex_cache_arrays_section.Offset() + dex_cache_arrays_section.Size()); stats_.alignment_bytes += bitmap_section.Offset() - image_header_.GetImageSize(); stats_.bitmap_bytes += bitmap_section.Size(); stats_.art_field_bytes += field_section.Size(); stats_.art_method_bytes += method_section.Size(); + stats_.dex_cache_arrays_bytes += dex_cache_arrays_section.Size(); stats_.interned_strings_bytes += intern_section.Size(); stats_.Dump(os, indent_os); os << "\n"; @@ -1881,33 +1884,73 @@ class ImageDumper { } } } else { - auto it = state->dex_cache_arrays_.find(obj); - if (it != state->dex_cache_arrays_.end()) { + auto it = state->dex_caches_.find(obj); + if (it != state->dex_caches_.end()) { + auto* dex_cache = down_cast<mirror::DexCache*>(obj); const auto& field_section = state->image_header_.GetImageSection( ImageHeader::kSectionArtFields); const auto& method_section = state->image_header_.GetMethodsSection(); - auto* arr = down_cast<mirror::PointerArray*>(obj); - for (int32_t i = 0, length = arr->GetLength(); i < length; i++) { - void* elem = arr->GetElementPtrSize<void*>(i, image_pointer_size); - size_t run = 0; - for (int32_t j = i + 1; j < length && - elem == arr->GetElementPtrSize<void*>(j, image_pointer_size); j++, run++) { } - if (run == 0) { - os << StringPrintf("%d: ", i); - } else { - os << StringPrintf("%d to %zd: ", i, i + run); - i = i + run; + size_t num_methods = dex_cache->NumResolvedMethods(); + if (num_methods != 0u) { + os << "Methods (size=" << num_methods << "):"; + ScopedIndentation indent2(&state->vios_); + auto* resolved_methods = dex_cache->GetResolvedMethods(); + for (size_t i = 0, length = dex_cache->NumResolvedMethods(); i < length; ++i) { + auto* elem = mirror::DexCache::GetElementPtrSize(resolved_methods, i, image_pointer_size); + size_t run = 0; + for (size_t j = i + 1; + j != length && elem == mirror::DexCache::GetElementPtrSize(resolved_methods, + j, + image_pointer_size); + ++j, ++run) {} + if (run == 0) { + os << StringPrintf("%zd: ", i); + } else { + os << StringPrintf("%zd to %zd: ", i, i + run); + i = i + run; + } + std::string msg; + if (elem == nullptr) { + msg = "null"; + } else if (method_section.Contains( + reinterpret_cast<uint8_t*>(elem) - state->image_space_.Begin())) { + msg = PrettyMethod(reinterpret_cast<ArtMethod*>(elem)); + } else { + msg = "<not in method section>"; + } + os << StringPrintf("%p %s\n", elem, msg.c_str()); } - auto offset = reinterpret_cast<uint8_t*>(elem) - state->image_space_.Begin(); - std::string msg; - if (field_section.Contains(offset)) { - msg = PrettyField(reinterpret_cast<ArtField*>(elem)); - } else if (method_section.Contains(offset)) { - msg = PrettyMethod(reinterpret_cast<ArtMethod*>(elem)); - } else { - msg = "Unknown type"; + } + size_t num_fields = dex_cache->NumResolvedFields(); + if (num_fields != 0u) { + os << "Fields (size=" << num_fields << "):"; + ScopedIndentation indent2(&state->vios_); + auto* resolved_fields = dex_cache->GetResolvedFields(); + for (size_t i = 0, length = dex_cache->NumResolvedFields(); i < length; ++i) { + auto* elem = mirror::DexCache::GetElementPtrSize(resolved_fields, i, image_pointer_size); + size_t run = 0; + for (size_t j = i + 1; + j != length && elem == mirror::DexCache::GetElementPtrSize(resolved_fields, + j, + image_pointer_size); + ++j, ++run) {} + if (run == 0) { + os << StringPrintf("%zd: ", i); + } else { + os << StringPrintf("%zd to %zd: ", i, i + run); + i = i + run; + } + std::string msg; + if (elem == nullptr) { + msg = "null"; + } else if (field_section.Contains( + reinterpret_cast<uint8_t*>(elem) - state->image_space_.Begin())) { + msg = PrettyField(reinterpret_cast<ArtField*>(elem)); + } else { + msg = "<not in field section>"; + } + os << StringPrintf("%p %s\n", elem, msg.c_str()); } - os << StringPrintf("%p %s\n", elem, msg.c_str()); } } } @@ -2022,6 +2065,7 @@ class ImageDumper { size_t object_bytes; size_t art_field_bytes; size_t art_method_bytes; + size_t dex_cache_arrays_bytes; size_t interned_strings_bytes; size_t bitmap_bytes; size_t alignment_bytes; @@ -2052,6 +2096,7 @@ class ImageDumper { object_bytes(0), art_field_bytes(0), art_method_bytes(0), + dex_cache_arrays_bytes(0), interned_strings_bytes(0), bitmap_bytes(0), alignment_bytes(0), @@ -2209,24 +2254,27 @@ class ImageDumper { { os << "art_file_bytes = " << PrettySize(file_bytes) << "\n\n" << "art_file_bytes = header_bytes + object_bytes + alignment_bytes\n"; - indent_os << StringPrintf("header_bytes = %8zd (%2.0f%% of art file bytes)\n" - "object_bytes = %8zd (%2.0f%% of art file bytes)\n" - "art_field_bytes = %8zd (%2.0f%% of art file bytes)\n" - "art_method_bytes = %8zd (%2.0f%% of art file bytes)\n" - "interned_string_bytes = %8zd (%2.0f%% of art file bytes)\n" - "bitmap_bytes = %8zd (%2.0f%% of art file bytes)\n" - "alignment_bytes = %8zd (%2.0f%% of art file bytes)\n\n", + indent_os << StringPrintf("header_bytes = %8zd (%2.0f%% of art file bytes)\n" + "object_bytes = %8zd (%2.0f%% of art file bytes)\n" + "art_field_bytes = %8zd (%2.0f%% of art file bytes)\n" + "art_method_bytes = %8zd (%2.0f%% of art file bytes)\n" + "dex_cache_arrays_bytes = %8zd (%2.0f%% of art file bytes)\n" + "interned_string_bytes = %8zd (%2.0f%% of art file bytes)\n" + "bitmap_bytes = %8zd (%2.0f%% of art file bytes)\n" + "alignment_bytes = %8zd (%2.0f%% of art file bytes)\n\n", header_bytes, PercentOfFileBytes(header_bytes), object_bytes, PercentOfFileBytes(object_bytes), art_field_bytes, PercentOfFileBytes(art_field_bytes), art_method_bytes, PercentOfFileBytes(art_method_bytes), + dex_cache_arrays_bytes, + PercentOfFileBytes(dex_cache_arrays_bytes), interned_strings_bytes, PercentOfFileBytes(interned_strings_bytes), bitmap_bytes, PercentOfFileBytes(bitmap_bytes), alignment_bytes, PercentOfFileBytes(alignment_bytes)) << std::flush; CHECK_EQ(file_bytes, header_bytes + object_bytes + art_field_bytes + art_method_bytes + - interned_strings_bytes + bitmap_bytes + alignment_bytes); + dex_cache_arrays_bytes + interned_strings_bytes + bitmap_bytes + alignment_bytes); } os << "object_bytes breakdown:\n"; @@ -2312,7 +2360,7 @@ class ImageDumper { const ImageHeader& image_header_; std::unique_ptr<OatDumper> oat_dumper_; OatDumperOptions* oat_dumper_options_; - std::set<mirror::Object*> dex_cache_arrays_; + std::set<mirror::Object*> dex_caches_; DISALLOW_COPY_AND_ASSIGN(ImageDumper); }; diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc index a71197a6ce..88622ccc9b 100644 --- a/patchoat/patchoat.cc +++ b/patchoat/patchoat.cc @@ -523,18 +523,60 @@ void PatchOat::PatchDexFileArrays(mirror::ObjectArray<mirror::Object>* img_roots auto* dex_caches = down_cast<mirror::ObjectArray<mirror::DexCache>*>( img_roots->Get(ImageHeader::kDexCaches)); for (size_t i = 0, count = dex_caches->GetLength(); i < count; ++i) { - auto* dex_cache = dex_caches->GetWithoutChecks(i); - auto* fields = dex_cache->GetResolvedFields(); - if (fields != nullptr) { - CHECK(!fields->IsObjectArray()); - CHECK(fields->IsArrayInstance()); - FixupNativePointerArray(fields); + auto* orig_dex_cache = dex_caches->GetWithoutChecks(i); + auto* copy_dex_cache = RelocatedCopyOf(orig_dex_cache); + const size_t pointer_size = InstructionSetPointerSize(isa_); + // Though the DexCache array fields are usually treated as native pointers, we set the full + // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is + // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e. + // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))). + GcRoot<mirror::String>* orig_strings = orig_dex_cache->GetStrings(); + GcRoot<mirror::String>* relocated_strings = RelocatedAddressOfPointer(orig_strings); + copy_dex_cache->SetField64<false>( + mirror::DexCache::StringsOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(relocated_strings))); + if (orig_strings != nullptr) { + GcRoot<mirror::String>* copy_strings = RelocatedCopyOf(orig_strings); + for (size_t j = 0, num = orig_dex_cache->NumStrings(); j != num; ++j) { + copy_strings[j] = GcRoot<mirror::String>(RelocatedAddressOfPointer(orig_strings[j].Read())); + } + } + GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes(); + GcRoot<mirror::Class>* relocated_types = RelocatedAddressOfPointer(orig_types); + copy_dex_cache->SetField64<false>( + mirror::DexCache::ResolvedTypesOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(relocated_types))); + if (orig_types != nullptr) { + GcRoot<mirror::Class>* copy_types = RelocatedCopyOf(orig_types); + for (size_t j = 0, num = orig_dex_cache->NumResolvedTypes(); j != num; ++j) { + copy_types[j] = GcRoot<mirror::Class>(RelocatedAddressOfPointer(orig_types[j].Read())); + } } - auto* methods = dex_cache->GetResolvedMethods(); - if (methods != nullptr) { - CHECK(!methods->IsObjectArray()); - CHECK(methods->IsArrayInstance()); - FixupNativePointerArray(methods); + ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods(); + ArtMethod** relocated_methods = RelocatedAddressOfPointer(orig_methods); + copy_dex_cache->SetField64<false>( + mirror::DexCache::ResolvedMethodsOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(relocated_methods))); + if (orig_methods != nullptr) { + ArtMethod** copy_methods = RelocatedCopyOf(orig_methods); + for (size_t j = 0, num = orig_dex_cache->NumResolvedMethods(); j != num; ++j) { + ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, j, pointer_size); + ArtMethod* copy = RelocatedAddressOfPointer(orig); + mirror::DexCache::SetElementPtrSize(copy_methods, j, copy, pointer_size); + } + } + ArtField** orig_fields = orig_dex_cache->GetResolvedFields(); + ArtField** relocated_fields = RelocatedAddressOfPointer(orig_fields); + copy_dex_cache->SetField64<false>( + mirror::DexCache::ResolvedFieldsOffset(), + static_cast<int64_t>(reinterpret_cast<uintptr_t>(relocated_fields))); + if (orig_fields != nullptr) { + ArtField** copy_fields = RelocatedCopyOf(orig_fields); + for (size_t j = 0, num = orig_dex_cache->NumResolvedFields(); j != num; ++j) { + ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, j, pointer_size); + ArtField* copy = RelocatedAddressOfPointer(orig); + mirror::DexCache::SetElementPtrSize(copy_fields, j, copy, pointer_size); + } } } } @@ -627,6 +669,7 @@ void PatchOat::VisitObject(mirror::Object* object) { if (object->IsClass<kVerifyNone>()) { auto* klass = object->AsClass(); auto* copy_klass = down_cast<mirror::Class*>(copy); + copy_klass->SetDexCacheStrings(RelocatedAddressOfPointer(klass->GetDexCacheStrings())); copy_klass->SetSFieldsPtrUnchecked(RelocatedAddressOfPointer(klass->GetSFieldsPtr())); copy_klass->SetIFieldsPtrUnchecked(RelocatedAddressOfPointer(klass->GetIFieldsPtr())); copy_klass->SetDirectMethodsPtrUnchecked( @@ -673,8 +716,10 @@ void PatchOat::FixupMethod(ArtMethod* object, ArtMethod* copy) { // Just update the entry points if it looks like we should. // TODO: sanity check all the pointers' values copy->SetDeclaringClass(RelocatedAddressOfPointer(object->GetDeclaringClass())); - copy->SetDexCacheResolvedMethods(RelocatedAddressOfPointer(object->GetDexCacheResolvedMethods())); - copy->SetDexCacheResolvedTypes(RelocatedAddressOfPointer(object->GetDexCacheResolvedTypes())); + copy->SetDexCacheResolvedMethods( + RelocatedAddressOfPointer(object->GetDexCacheResolvedMethods(pointer_size)), pointer_size); + copy->SetDexCacheResolvedTypes( + RelocatedAddressOfPointer(object->GetDexCacheResolvedTypes(pointer_size)), pointer_size); copy->SetEntryPointFromQuickCompiledCodePtrSize(RelocatedAddressOfPointer( object->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size)), pointer_size); copy->SetEntryPointFromJniPtrSize(RelocatedAddressOfPointer( diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S index 1498a4b7e3..9303b00cb9 100644 --- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S +++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S @@ -926,7 +926,7 @@ DEFINE_FUNCTION art_quick_alloc_object_tlab int3 #endif // Might need a special macro since rsi and edx is 32b/64b mismatched. - movl ART_METHOD_DEX_CACHE_TYPES_OFFSET(%rsi), %edx // Load dex cache resolved types array + movl ART_METHOD_DEX_CACHE_TYPES_OFFSET_64(%rsi), %edx // Load dex cache resolved types array UNPOISON_HEAP_REF edx // TODO: Add read barrier when this function is used. // Might need to break down into multiple instructions to get the base address in a register. diff --git a/runtime/art_field-inl.h b/runtime/art_field-inl.h index 5138cc99bf..4166e22daa 100644 --- a/runtime/art_field-inl.h +++ b/runtime/art_field-inl.h @@ -24,7 +24,7 @@ #include "gc_root-inl.h" #include "gc/accounting/card_table-inl.h" #include "jvalue.h" -#include "mirror/dex_cache.h" +#include "mirror/dex_cache-inl.h" #include "mirror/object-inl.h" #include "primitive.h" #include "thread-inl.h" diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h index d38cc56067..cfd7fcd0d6 100644 --- a/runtime/art_method-inl.h +++ b/runtime/art_method-inl.h @@ -22,11 +22,12 @@ #include "art_field.h" #include "base/logging.h" #include "class_linker-inl.h" +#include "common_throws.h" #include "dex_file.h" #include "dex_file-inl.h" #include "gc_root-inl.h" #include "mirror/class-inl.h" -#include "mirror/dex_cache.h" +#include "mirror/dex_cache-inl.h" #include "mirror/object-inl.h" #include "mirror/object_array.h" #include "oat.h" @@ -95,14 +96,20 @@ inline uint32_t ArtMethod::GetDexMethodIndex() { return dex_method_index_; } -inline mirror::PointerArray* ArtMethod::GetDexCacheResolvedMethods() { - GcRootSource gc_root_source(this); - return dex_cache_resolved_methods_.Read(&gc_root_source); +inline ArtMethod** ArtMethod::GetDexCacheResolvedMethods(size_t pointer_size) { + return GetNativePointer<ArtMethod**>(DexCacheResolvedMethodsOffset(pointer_size), + pointer_size); } inline ArtMethod* ArtMethod::GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size) { - auto* method = GetDexCacheResolvedMethods()->GetElementPtrSize<ArtMethod*>( - method_index, ptr_size); + // NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here + // without accessing the DexCache and we don't want to do that in release build. + DCHECK_LT(method_index, + GetInterfaceMethodIfProxy(ptr_size)->GetDeclaringClass() + ->GetDexCache()->NumResolvedMethods()); + ArtMethod* method = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(ptr_size), + method_index, + ptr_size); if (LIKELY(method != nullptr)) { auto* declaring_class = method->GetDeclaringClass(); if (LIKELY(declaring_class == nullptr || !declaring_class->IsErroneous())) { @@ -112,52 +119,70 @@ inline ArtMethod* ArtMethod::GetDexCacheResolvedMethod(uint16_t method_index, si return nullptr; } -inline void ArtMethod::SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method, +inline void ArtMethod::SetDexCacheResolvedMethod(uint16_t method_index, ArtMethod* new_method, size_t ptr_size) { + // NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here + // without accessing the DexCache and we don't want to do that in release build. + DCHECK_LT(method_index, + GetInterfaceMethodIfProxy(ptr_size)->GetDeclaringClass() + ->GetDexCache()->NumResolvedMethods()); DCHECK(new_method == nullptr || new_method->GetDeclaringClass() != nullptr); - GetDexCacheResolvedMethods()->SetElementPtrSize(method_idx, new_method, ptr_size); + mirror::DexCache::SetElementPtrSize(GetDexCacheResolvedMethods(ptr_size), + method_index, + new_method, + ptr_size); } -inline bool ArtMethod::HasDexCacheResolvedMethods() { - return GetDexCacheResolvedMethods() != nullptr; +inline bool ArtMethod::HasDexCacheResolvedMethods(size_t pointer_size) { + return GetDexCacheResolvedMethods(pointer_size) != nullptr; } -inline bool ArtMethod::HasSameDexCacheResolvedMethods(mirror::PointerArray* other_cache) { - return GetDexCacheResolvedMethods() == other_cache; +inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod** other_cache, + size_t pointer_size) { + return GetDexCacheResolvedMethods(pointer_size) == other_cache; } -inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod* other) { - return GetDexCacheResolvedMethods() == other->GetDexCacheResolvedMethods(); +inline bool ArtMethod::HasSameDexCacheResolvedMethods(ArtMethod* other, size_t pointer_size) { + return GetDexCacheResolvedMethods(pointer_size) == + other->GetDexCacheResolvedMethods(pointer_size); } -inline mirror::ObjectArray<mirror::Class>* ArtMethod::GetDexCacheResolvedTypes() { - GcRootSource gc_root_source(this); - return dex_cache_resolved_types_.Read(&gc_root_source); +inline GcRoot<mirror::Class>* ArtMethod::GetDexCacheResolvedTypes(size_t pointer_size) { + return GetNativePointer<GcRoot<mirror::Class>*>(DexCacheResolvedTypesOffset(pointer_size), + pointer_size); } template <bool kWithCheck> -inline mirror::Class* ArtMethod::GetDexCacheResolvedType(uint32_t type_index) { - mirror::Class* klass = kWithCheck ? - GetDexCacheResolvedTypes()->Get(type_index) : - GetDexCacheResolvedTypes()->GetWithoutChecks(type_index); +inline mirror::Class* ArtMethod::GetDexCacheResolvedType(uint32_t type_index, size_t ptr_size) { + if (kWithCheck) { + mirror::DexCache* dex_cache = + GetInterfaceMethodIfProxy(ptr_size)->GetDeclaringClass()->GetDexCache(); + if (UNLIKELY(type_index >= dex_cache->NumResolvedTypes())) { + ThrowArrayIndexOutOfBoundsException(type_index, dex_cache->NumResolvedTypes()); + return nullptr; + } + } + mirror::Class* klass = GetDexCacheResolvedTypes(ptr_size)[type_index].Read(); return (klass != nullptr && !klass->IsErroneous()) ? klass : nullptr; } -inline bool ArtMethod::HasDexCacheResolvedTypes() { - return GetDexCacheResolvedTypes() != nullptr; +inline bool ArtMethod::HasDexCacheResolvedTypes(size_t pointer_size) { + return GetDexCacheResolvedTypes(pointer_size) != nullptr; } -inline bool ArtMethod::HasSameDexCacheResolvedTypes( - mirror::ObjectArray<mirror::Class>* other_cache) { - return GetDexCacheResolvedTypes() == other_cache; +inline bool ArtMethod::HasSameDexCacheResolvedTypes(GcRoot<mirror::Class>* other_cache, + size_t pointer_size) { + return GetDexCacheResolvedTypes(pointer_size) == other_cache; } -inline bool ArtMethod::HasSameDexCacheResolvedTypes(ArtMethod* other) { - return GetDexCacheResolvedTypes() == other->GetDexCacheResolvedTypes(); +inline bool ArtMethod::HasSameDexCacheResolvedTypes(ArtMethod* other, size_t pointer_size) { + return GetDexCacheResolvedTypes(pointer_size) == other->GetDexCacheResolvedTypes(pointer_size); } -inline mirror::Class* ArtMethod::GetClassFromTypeIndex(uint16_t type_idx, bool resolve) { - mirror::Class* type = GetDexCacheResolvedType(type_idx); +inline mirror::Class* ArtMethod::GetClassFromTypeIndex(uint16_t type_idx, + bool resolve, + size_t ptr_size) { + mirror::Class* type = GetDexCacheResolvedType(type_idx, ptr_size); if (type == nullptr && resolve) { type = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, this); CHECK(type != nullptr || Thread::Current()->IsExceptionPending()); @@ -391,9 +416,9 @@ inline const DexFile::CodeItem* ArtMethod::GetCodeItem() { return GetDeclaringClass()->GetDexFile().GetCodeItem(GetCodeItemOffset()); } -inline bool ArtMethod::IsResolvedTypeIdx(uint16_t type_idx) { +inline bool ArtMethod::IsResolvedTypeIdx(uint16_t type_idx, size_t ptr_size) { DCHECK(!IsProxyMethod()); - return GetDexCacheResolvedType(type_idx) != nullptr; + return GetDexCacheResolvedType(type_idx, ptr_size) != nullptr; } inline int32_t ArtMethod::GetLineNumFromDexPC(uint32_t dex_pc) { @@ -467,30 +492,33 @@ inline ArtMethod* ArtMethod::GetInterfaceMethodIfProxy(size_t pointer_size) { return this; } mirror::Class* klass = GetDeclaringClass(); - auto interface_method = GetDexCacheResolvedMethods()->GetElementPtrSize<ArtMethod*>( - GetDexMethodIndex(), pointer_size); + ArtMethod* interface_method = mirror::DexCache::GetElementPtrSize( + GetDexCacheResolvedMethods(pointer_size), + GetDexMethodIndex(), + pointer_size); DCHECK(interface_method != nullptr); DCHECK_EQ(interface_method, Runtime::Current()->GetClassLinker()->FindMethodForProxy(klass, this)); return interface_method; } -inline void ArtMethod::SetDexCacheResolvedMethods(mirror::PointerArray* new_dex_cache_methods) { - dex_cache_resolved_methods_ = GcRoot<mirror::PointerArray>(new_dex_cache_methods); +inline void ArtMethod::SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, + size_t ptr_size) { + SetNativePointer(DexCacheResolvedMethodsOffset(ptr_size), new_dex_cache_methods, ptr_size); } -inline void ArtMethod::SetDexCacheResolvedTypes( - mirror::ObjectArray<mirror::Class>* new_dex_cache_types) { - dex_cache_resolved_types_ = GcRoot<mirror::ObjectArray<mirror::Class>>(new_dex_cache_types); +inline void ArtMethod::SetDexCacheResolvedTypes(GcRoot<mirror::Class>* new_dex_cache_types, + size_t ptr_size) { + SetNativePointer(DexCacheResolvedTypesOffset(ptr_size), new_dex_cache_types, ptr_size); } -inline mirror::Class* ArtMethod::GetReturnType(bool resolve) { +inline mirror::Class* ArtMethod::GetReturnType(bool resolve, size_t ptr_size) { DCHECK(!IsProxyMethod()); const DexFile* dex_file = GetDexFile(); const DexFile::MethodId& method_id = dex_file->GetMethodId(GetDexMethodIndex()); const DexFile::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id); uint16_t return_type_idx = proto_id.return_type_idx_; - mirror::Class* type = GetDexCacheResolvedType(return_type_idx); + mirror::Class* type = GetDexCacheResolvedType(return_type_idx, ptr_size); if (type == nullptr && resolve) { type = Runtime::Current()->GetClassLinker()->ResolveType(return_type_idx, this); CHECK(type != nullptr || Thread::Current()->IsExceptionPending()); @@ -500,19 +528,29 @@ inline mirror::Class* ArtMethod::GetReturnType(bool resolve) { template<typename RootVisitorType> void ArtMethod::VisitRoots(RootVisitorType& visitor) { + ArtMethod* interface_method = nullptr; + mirror::Class* klass = declaring_class_.Read(); + if (UNLIKELY(klass != nullptr && klass->IsProxyClass())) { + // For normal methods, dex cache shortcuts will be visited through the declaring class. + // However, for proxies we need to keep the interface method alive, so we visit its roots. + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + interface_method = mirror::DexCache::GetElementPtrSize( + GetDexCacheResolvedMethods(pointer_size), + GetDexMethodIndex(), + pointer_size); + DCHECK(interface_method != nullptr); + DCHECK_EQ(interface_method, + Runtime::Current()->GetClassLinker()->FindMethodForProxy(klass, this)); + interface_method->VisitRoots(visitor); + } + visitor.VisitRootIfNonNull(declaring_class_.AddressWithoutBarrier()); - visitor.VisitRootIfNonNull(dex_cache_resolved_methods_.AddressWithoutBarrier()); - visitor.VisitRootIfNonNull(dex_cache_resolved_types_.AddressWithoutBarrier()); } inline void ArtMethod::CopyFrom(const ArtMethod* src, size_t image_pointer_size) { memcpy(reinterpret_cast<void*>(this), reinterpret_cast<const void*>(src), Size(image_pointer_size)); declaring_class_ = GcRoot<mirror::Class>(const_cast<ArtMethod*>(src)->GetDeclaringClass()); - dex_cache_resolved_methods_ = GcRoot<mirror::PointerArray>( - const_cast<ArtMethod*>(src)->GetDexCacheResolvedMethods()); - dex_cache_resolved_types_ = GcRoot<mirror::ObjectArray<mirror::Class>>( - const_cast<ArtMethod*>(src)->GetDexCacheResolvedTypes()); } } // namespace art diff --git a/runtime/art_method.cc b/runtime/art_method.cc index e46402d882..64416d2137 100644 --- a/runtime/art_method.cc +++ b/runtime/art_method.cc @@ -125,8 +125,9 @@ ArtMethod* ArtMethod::FindOverriddenMethod(size_t pointer_size) { } else { // Method didn't override superclass method so search interfaces if (IsProxyMethod()) { - result = GetDexCacheResolvedMethods()->GetElementPtrSize<ArtMethod*>( - GetDexMethodIndex(), pointer_size); + result = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size), + GetDexMethodIndex(), + pointer_size); CHECK_EQ(result, Runtime::Current()->GetClassLinker()->FindMethodForProxy(GetDeclaringClass(), this)); } else { @@ -261,6 +262,7 @@ uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type, // Default to handler not found. uint32_t found_dex_pc = DexFile::kDexNoIndex; // Iterate over the catch handlers associated with dex_pc. + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); for (CatchHandlerIterator it(*code_item, dex_pc); it.HasNext(); it.Next()) { uint16_t iter_type_idx = it.GetHandlerTypeIndex(); // Catch all case @@ -269,7 +271,9 @@ uint32_t ArtMethod::FindCatchBlock(Handle<mirror::Class> exception_type, break; } // Does this catch exception type apply? - mirror::Class* iter_exception_type = GetClassFromTypeIndex(iter_type_idx, true); + mirror::Class* iter_exception_type = GetClassFromTypeIndex(iter_type_idx, + true /* resolve */, + pointer_size); if (UNLIKELY(iter_exception_type == nullptr)) { // Now have a NoClassDefFoundError as exception. Ignore in case the exception class was // removed by a pro-guard like tool. diff --git a/runtime/art_method.h b/runtime/art_method.h index 6cdc4a6bb0..e0b11d0e56 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -17,6 +17,7 @@ #ifndef ART_RUNTIME_ART_METHOD_H_ #define ART_RUNTIME_ART_METHOD_H_ +#include "base/casts.h" #include "dex_file.h" #include "gc_root.h" #include "invoke_type.h" @@ -212,41 +213,35 @@ class ArtMethod FINAL { dex_method_index_ = new_idx; } - static MemberOffset DexCacheResolvedMethodsOffset() { - return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_); - } - - static MemberOffset DexCacheResolvedTypesOffset() { - return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_); - } - - ALWAYS_INLINE mirror::PointerArray* GetDexCacheResolvedMethods() + ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); - ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_idx, size_t ptr_size) + ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); - ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_idx, ArtMethod* new_method, + ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index, + ArtMethod* new_method, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); - ALWAYS_INLINE void SetDexCacheResolvedMethods(mirror::PointerArray* new_dex_cache_methods) + ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); - bool HasDexCacheResolvedMethods() SHARED_REQUIRES(Locks::mutator_lock_); - bool HasSameDexCacheResolvedMethods(ArtMethod* other) + bool HasDexCacheResolvedMethods(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); + bool HasSameDexCacheResolvedMethods(ArtMethod* other, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); - bool HasSameDexCacheResolvedMethods(mirror::PointerArray* other_cache) + bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); template <bool kWithCheck = true> - mirror::Class* GetDexCacheResolvedType(uint32_t type_idx) + mirror::Class* GetDexCacheResolvedType(uint32_t type_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); - void SetDexCacheResolvedTypes(mirror::ObjectArray<mirror::Class>* new_dex_cache_types) + void SetDexCacheResolvedTypes(GcRoot<mirror::Class>* new_dex_cache_types, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); - bool HasDexCacheResolvedTypes() SHARED_REQUIRES(Locks::mutator_lock_); - bool HasSameDexCacheResolvedTypes(ArtMethod* other) SHARED_REQUIRES(Locks::mutator_lock_); - bool HasSameDexCacheResolvedTypes(mirror::ObjectArray<mirror::Class>* other_cache) + bool HasDexCacheResolvedTypes(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); + bool HasSameDexCacheResolvedTypes(ArtMethod* other, size_t pointer_size) + SHARED_REQUIRES(Locks::mutator_lock_); + bool HasSameDexCacheResolvedTypes(GcRoot<mirror::Class>* other_cache, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); // Get the Class* from the type index into this method's dex cache. - mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve) + mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); // Find the method that this method overrides. @@ -267,7 +262,7 @@ class ArtMethod FINAL { return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*)); } ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) { - return GetEntryPoint<const void*>( + return GetNativePointer<const void*>( EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size); } @@ -277,8 +272,8 @@ class ArtMethod FINAL { } ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize( const void* entry_point_from_quick_compiled_code, size_t pointer_size) { - SetEntryPoint(EntryPointFromQuickCompiledCodeOffset(pointer_size), - entry_point_from_quick_compiled_code, pointer_size); + SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size), + entry_point_from_quick_compiled_code, pointer_size); } uint32_t GetCodeSize() SHARED_REQUIRES(Locks::mutator_lock_); @@ -374,6 +369,16 @@ class ArtMethod FINAL { void UnregisterNative() SHARED_REQUIRES(Locks::mutator_lock_); + static MemberOffset DexCacheResolvedMethodsOffset(size_t pointer_size) { + return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( + PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*) * pointer_size); + } + + static MemberOffset DexCacheResolvedTypesOffset(size_t pointer_size) { + return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( + PtrSizedFields, dex_cache_resolved_types_) / sizeof(void*) * pointer_size); + } + static MemberOffset EntryPointFromJniOffset(size_t pointer_size) { return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER( PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size); @@ -388,14 +393,14 @@ class ArtMethod FINAL { return GetEntryPointFromJniPtrSize(sizeof(void*)); } ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size) { - return GetEntryPoint<void*>(EntryPointFromJniOffset(pointer_size), pointer_size); + return GetNativePointer<void*>(EntryPointFromJniOffset(pointer_size), pointer_size); } void SetEntryPointFromJni(const void* entrypoint) SHARED_REQUIRES(Locks::mutator_lock_) { SetEntryPointFromJniPtrSize(entrypoint, sizeof(void*)); } ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size) { - SetEntryPoint(EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size); + SetNativePointer(EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size); } // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal @@ -464,7 +469,7 @@ class ArtMethod FINAL { const DexFile::CodeItem* GetCodeItem() SHARED_REQUIRES(Locks::mutator_lock_); - bool IsResolvedTypeIdx(uint16_t type_idx) SHARED_REQUIRES(Locks::mutator_lock_); + bool IsResolvedTypeIdx(uint16_t type_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_); @@ -485,7 +490,8 @@ class ArtMethod FINAL { // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large // number of bugs at call sites. - mirror::Class* GetReturnType(bool resolve = true) SHARED_REQUIRES(Locks::mutator_lock_); + mirror::Class* GetReturnType(bool resolve, size_t ptr_size) + SHARED_REQUIRES(Locks::mutator_lock_); mirror::ClassLoader* GetClassLoader() SHARED_REQUIRES(Locks::mutator_lock_); @@ -514,7 +520,7 @@ class ArtMethod FINAL { void CopyFrom(const ArtMethod* src, size_t image_pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); - ALWAYS_INLINE mirror::ObjectArray<mirror::Class>* GetDexCacheResolvedTypes() + ALWAYS_INLINE GcRoot<mirror::Class>* GetDexCacheResolvedTypes(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); protected: @@ -522,12 +528,6 @@ class ArtMethod FINAL { // The class we are a part of. GcRoot<mirror::Class> declaring_class_; - // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. - GcRoot<mirror::PointerArray> dex_cache_resolved_methods_; - - // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. - GcRoot<mirror::ObjectArray<mirror::Class>> dex_cache_resolved_types_; - // Access flags; low 16 bits are defined by spec. uint32_t access_flags_; @@ -552,6 +552,12 @@ class ArtMethod FINAL { // PACKED(4) is necessary for the correctness of // RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size). struct PACKED(4) PtrSizedFields { + // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. + ArtMethod** dex_cache_resolved_methods_; + + // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access. + GcRoot<mirror::Class>* dex_cache_resolved_types_; + // Pointer to JNI function registered to this method, or a function to resolve the JNI function. void* entry_point_from_jni_; @@ -567,26 +573,26 @@ class ArtMethod FINAL { } template<typename T> - ALWAYS_INLINE T GetEntryPoint(MemberOffset offset, size_t pointer_size) const { + ALWAYS_INLINE T GetNativePointer(MemberOffset offset, size_t pointer_size) const { + static_assert(std::is_pointer<T>::value, "T must be a pointer type"); DCHECK(ValidPointerSize(pointer_size)) << pointer_size; const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value(); if (pointer_size == sizeof(uint32_t)) { return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr)); } else { auto v = *reinterpret_cast<const uint64_t*>(addr); - DCHECK_EQ(reinterpret_cast<uint64_t>(reinterpret_cast<T>(v)), v) << "Conversion lost bits"; - return reinterpret_cast<T>(v); + return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v)); } } template<typename T> - ALWAYS_INLINE void SetEntryPoint(MemberOffset offset, T new_value, size_t pointer_size) { + ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, size_t pointer_size) { + static_assert(std::is_pointer<T>::value, "T must be a pointer type"); DCHECK(ValidPointerSize(pointer_size)) << pointer_size; const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value(); if (pointer_size == sizeof(uint32_t)) { uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value); - DCHECK_EQ(static_cast<uint32_t>(ptr), ptr) << "Conversion lost bits"; - *reinterpret_cast<uint32_t*>(addr) = static_cast<uint32_t>(ptr); + *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr); } else { *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value); } diff --git a/runtime/asm_support.h b/runtime/asm_support.h index 5c1922eea3..04ff1201dc 100644 --- a/runtime/asm_support.h +++ b/runtime/asm_support.h @@ -138,13 +138,13 @@ ADD_TEST_EQ(size_t(MIRROR_OBJECT_HEADER_SIZE), sizeof(art::mirror::Object)) #define MIRROR_CLASS_COMPONENT_TYPE_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) ADD_TEST_EQ(MIRROR_CLASS_COMPONENT_TYPE_OFFSET, art::mirror::Class::ComponentTypeOffset().Int32Value()) -#define MIRROR_CLASS_ACCESS_FLAGS_OFFSET (36 + MIRROR_OBJECT_HEADER_SIZE) +#define MIRROR_CLASS_ACCESS_FLAGS_OFFSET (72 + MIRROR_OBJECT_HEADER_SIZE) ADD_TEST_EQ(MIRROR_CLASS_ACCESS_FLAGS_OFFSET, art::mirror::Class::AccessFlagsOffset().Int32Value()) -#define MIRROR_CLASS_OBJECT_SIZE_OFFSET (100 + MIRROR_OBJECT_HEADER_SIZE) +#define MIRROR_CLASS_OBJECT_SIZE_OFFSET (104 + MIRROR_OBJECT_HEADER_SIZE) ADD_TEST_EQ(MIRROR_CLASS_OBJECT_SIZE_OFFSET, art::mirror::Class::ObjectSizeOffset().Int32Value()) -#define MIRROR_CLASS_STATUS_OFFSET (112 + MIRROR_OBJECT_HEADER_SIZE) +#define MIRROR_CLASS_STATUS_OFFSET (116 + MIRROR_OBJECT_HEADER_SIZE) ADD_TEST_EQ(MIRROR_CLASS_STATUS_OFFSET, art::mirror::Class::StatusOffset().Int32Value()) @@ -184,19 +184,27 @@ ADD_TEST_EQ(MIRROR_STRING_COUNT_OFFSET, art::mirror::String::CountOffset().Int32 ADD_TEST_EQ(MIRROR_STRING_VALUE_OFFSET, art::mirror::String::ValueOffset().Int32Value()) // Offsets within java.lang.reflect.ArtMethod. -#define ART_METHOD_DEX_CACHE_METHODS_OFFSET 4 -ADD_TEST_EQ(ART_METHOD_DEX_CACHE_METHODS_OFFSET, - art::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()) +#define ART_METHOD_DEX_CACHE_METHODS_OFFSET_32 20 +ADD_TEST_EQ(ART_METHOD_DEX_CACHE_METHODS_OFFSET_32, + art::ArtMethod::DexCacheResolvedMethodsOffset(4).Int32Value()) -#define ART_METHOD_DEX_CACHE_TYPES_OFFSET 8 -ADD_TEST_EQ(ART_METHOD_DEX_CACHE_TYPES_OFFSET, - art::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()) +#define ART_METHOD_DEX_CACHE_METHODS_OFFSET_64 24 +ADD_TEST_EQ(ART_METHOD_DEX_CACHE_METHODS_OFFSET_64, + art::ArtMethod::DexCacheResolvedMethodsOffset(8).Int32Value()) + +#define ART_METHOD_DEX_CACHE_TYPES_OFFSET_32 24 +ADD_TEST_EQ(ART_METHOD_DEX_CACHE_TYPES_OFFSET_32, + art::ArtMethod::DexCacheResolvedTypesOffset(4).Int32Value()) + +#define ART_METHOD_DEX_CACHE_TYPES_OFFSET_64 32 +ADD_TEST_EQ(ART_METHOD_DEX_CACHE_TYPES_OFFSET_64, + art::ArtMethod::DexCacheResolvedTypesOffset(8).Int32Value()) #define ART_METHOD_QUICK_CODE_OFFSET_32 32 ADD_TEST_EQ(ART_METHOD_QUICK_CODE_OFFSET_32, art::ArtMethod::EntryPointFromQuickCompiledCodeOffset(4).Int32Value()) -#define ART_METHOD_QUICK_CODE_OFFSET_64 40 +#define ART_METHOD_QUICK_CODE_OFFSET_64 48 ADD_TEST_EQ(ART_METHOD_QUICK_CODE_OFFSET_64, art::ArtMethod::EntryPointFromQuickCompiledCodeOffset(8).Int32Value()) diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h index d2dbff634e..6a55c32138 100644 --- a/runtime/class_linker-inl.h +++ b/runtime/class_linker-inl.h @@ -63,7 +63,9 @@ inline mirror::Class* ClassLinker::FindArrayClass(Thread* self, mirror::Class** inline mirror::String* ClassLinker::ResolveString(uint32_t string_idx, ArtMethod* referrer) { mirror::Class* declaring_class = referrer->GetDeclaringClass(); - mirror::String* resolved_string = declaring_class->GetDexCacheStrings()->Get(string_idx); + // MethodVerifier refuses methods with string_idx out of bounds. + DCHECK_LT(string_idx, declaring_class->GetDexCache()->NumStrings()); + mirror::String* resolved_string = declaring_class->GetDexCacheStrings()[string_idx].Read(); if (UNLIKELY(resolved_string == nullptr)) { StackHandleScope<1> hs(Thread::Current()); Handle<mirror::DexCache> dex_cache(hs.NewHandle(declaring_class->GetDexCache())); @@ -76,9 +78,8 @@ inline mirror::String* ClassLinker::ResolveString(uint32_t string_idx, return resolved_string; } -inline mirror::Class* ClassLinker::ResolveType(uint16_t type_idx, - ArtMethod* referrer) { - mirror::Class* resolved_type = referrer->GetDexCacheResolvedType(type_idx); +inline mirror::Class* ClassLinker::ResolveType(uint16_t type_idx, ArtMethod* referrer) { + mirror::Class* resolved_type = referrer->GetDexCacheResolvedType(type_idx, image_pointer_size_); if (UNLIKELY(resolved_type == nullptr)) { mirror::Class* declaring_class = referrer->GetDeclaringClass(); StackHandleScope<2> hs(Thread::Current()); diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index e78914c3a3..fc1127c6a2 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -80,6 +80,7 @@ #include "handle_scope-inl.h" #include "thread-inl.h" #include "utils.h" +#include "utils/dex_cache_arrays_layout-inl.h" #include "verifier/method_verifier.h" #include "well_known_classes.h" @@ -420,6 +421,7 @@ void ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> b Handle<mirror::Class> java_lang_DexCache(hs.NewHandle( AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_)))); SetClassRoot(kJavaLangDexCache, java_lang_DexCache.Get()); + java_lang_DexCache->SetDexCacheClass(); java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize()); mirror::Class::SetStatus(java_lang_DexCache, mirror::Class::kStatusResolved, self); @@ -1059,6 +1061,26 @@ static void SanityCheckArtMethodPointerArray( } } +static void SanityCheckArtMethodPointerArray( + ArtMethod** arr, + size_t size, + size_t pointer_size, + gc::space::ImageSpace* space) SHARED_REQUIRES(Locks::mutator_lock_) { + CHECK_EQ(arr != nullptr, size != 0u); + if (arr != nullptr) { + auto offset = reinterpret_cast<uint8_t*>(arr) - space->Begin(); + CHECK(space->GetImageHeader().GetImageSection( + ImageHeader::kSectionDexCacheArrays).Contains(offset)); + } + for (size_t j = 0; j < size; ++j) { + ArtMethod* method = mirror::DexCache::GetElementPtrSize(arr, j, pointer_size); + // expected_class == null means we are a dex cache. + if (method != nullptr) { + SanityCheckArtMethod(method, nullptr, space); + } + } +} + static void SanityCheckObjectsCallback(mirror::Object* obj, void* arg ATTRIBUTE_UNUSED) SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(obj != nullptr); @@ -1188,8 +1210,10 @@ void ClassLinker::InitFromImage() { } if (kSanityCheckObjects) { - SanityCheckArtMethodPointerArray(dex_cache->GetResolvedMethods(), nullptr, - image_pointer_size_, space); + SanityCheckArtMethodPointerArray(dex_cache->GetResolvedMethods(), + dex_cache->NumResolvedMethods(), + image_pointer_size_, + space); } CHECK_EQ(dex_file->GetLocationChecksum(), oat_dex_file->GetDexFileLocationChecksum()); @@ -1476,28 +1500,44 @@ mirror::DexCache* ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_fi self->AssertPendingOOMException(); return nullptr; } - auto strings(hs.NewHandle(AllocStringArray(self, dex_file.NumStringIds()))); - if (strings.Get() == nullptr) { - self->AssertPendingOOMException(); - return nullptr; - } - auto types(hs.NewHandle(AllocClassArray(self, dex_file.NumTypeIds()))); - if (types.Get() == nullptr) { - self->AssertPendingOOMException(); - return nullptr; - } - auto methods(hs.NewHandle(AllocPointerArray(self, dex_file.NumMethodIds()))); - if (methods.Get() == nullptr) { - self->AssertPendingOOMException(); - return nullptr; - } - auto fields(hs.NewHandle(AllocPointerArray(self, dex_file.NumFieldIds()))); - if (fields.Get() == nullptr) { - self->AssertPendingOOMException(); - return nullptr; + DexCacheArraysLayout layout(image_pointer_size_, &dex_file); + uint8_t* raw_arrays = nullptr; + if (dex_file.NumStringIds() != 0u || dex_file.NumTypeIds() != 0u || + dex_file.NumMethodIds() != 0u || dex_file.NumFieldIds() != 0u) { + // NOTE: We "leak" the raw_arrays because we never destroy the dex cache. + DCHECK(image_pointer_size_ == 4u || image_pointer_size_ == 8u); + if (sizeof(void*) == 8u && image_pointer_size_ == 4u) { + // When cross-compiling for a 32-bit target on a 64-bit host, we need these arrays + // in the low 4GiB address space so that we can store pointers in 32-bit fields. + // This is conveniently provided by the linear allocator. + raw_arrays = reinterpret_cast<uint8_t*>( + Runtime::Current()->GetLinearAlloc()->Alloc(self, layout.Size())); // Zero-initialized. + } else { + raw_arrays = reinterpret_cast<uint8_t*>(calloc(layout.Size(), 1u)); // Zero-initialized. + if (raw_arrays == nullptr) { + return nullptr; + } + } } - dex_cache->Init(&dex_file, location.Get(), strings.Get(), types.Get(), methods.Get(), - fields.Get(), image_pointer_size_); + GcRoot<mirror::String>* strings = (dex_file.NumStringIds() == 0u) ? nullptr : + reinterpret_cast<GcRoot<mirror::String>*>(raw_arrays + layout.StringsOffset()); + GcRoot<mirror::Class>* types = (dex_file.NumTypeIds() == 0u) ? nullptr : + reinterpret_cast<GcRoot<mirror::Class>*>(raw_arrays + layout.TypesOffset()); + ArtMethod** methods = (dex_file.NumMethodIds() == 0u) ? nullptr : + reinterpret_cast<ArtMethod**>(raw_arrays + layout.MethodsOffset()); + ArtField** fields = (dex_file.NumFieldIds() == 0u) ? nullptr : + reinterpret_cast<ArtField**>(raw_arrays + layout.FieldsOffset()); + dex_cache->Init(&dex_file, + location.Get(), + strings, + dex_file.NumStringIds(), + types, + dex_file.NumTypeIds(), + methods, + dex_file.NumMethodIds(), + fields, + dex_file.NumFieldIds(), + image_pointer_size_); return dex_cache.Get(); } @@ -2418,8 +2458,8 @@ void ClassLinker::LoadMethod(Thread* self, const DexFile& dex_file, const ClassD dst->SetDeclaringClass(klass.Get()); dst->SetCodeItemOffset(it.GetMethodCodeItemOffset()); - dst->SetDexCacheResolvedMethods(klass->GetDexCache()->GetResolvedMethods()); - dst->SetDexCacheResolvedTypes(klass->GetDexCache()->GetResolvedTypes()); + dst->SetDexCacheResolvedMethods(klass->GetDexCache()->GetResolvedMethods(), image_pointer_size_); + dst->SetDexCacheResolvedTypes(klass->GetDexCache()->GetResolvedTypes(), image_pointer_size_); uint32_t access_flags = it.GetMethodAccessFlags(); @@ -2895,9 +2935,9 @@ void ClassLinker::MoveImageClassesToClassTable() { ClassTable* const class_table = InsertClassTableForClassLoader(nullptr); for (int32_t i = 0; i < dex_caches->GetLength(); i++) { mirror::DexCache* dex_cache = dex_caches->Get(i); - mirror::ObjectArray<mirror::Class>* types = dex_cache->GetResolvedTypes(); - for (int32_t j = 0; j < types->GetLength(); j++) { - mirror::Class* klass = types->Get(j); + GcRoot<mirror::Class>* types = dex_cache->GetResolvedTypes(); + for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) { + mirror::Class* klass = types[j].Read(); if (klass != nullptr) { DCHECK(klass->GetClassLoader() == nullptr); const char* descriptor = klass->GetDescriptor(&temp); @@ -3418,7 +3458,8 @@ ArtMethod* ClassLinker::FindMethodForProxy(mirror::Class* proxy_class, ArtMethod for (jobject weak_root : dex_caches_) { mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root)); if (dex_cache != nullptr && - proxy_method->HasSameDexCacheResolvedTypes(dex_cache->GetResolvedTypes())) { + proxy_method->HasSameDexCacheResolvedTypes(dex_cache->GetResolvedTypes(), + image_pointer_size_)) { ArtMethod* resolved_method = dex_cache->GetResolvedMethod( proxy_method->GetDexMethodIndex(), image_pointer_size_); CHECK(resolved_method != nullptr); @@ -3491,8 +3532,8 @@ void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) cons // The proxy method doesn't have its own dex cache or dex file and so it steals those of its // interface prototype. The exception to this are Constructors and the Class of the Proxy itself. - CHECK(prototype->HasSameDexCacheResolvedMethods(method)); - CHECK(prototype->HasSameDexCacheResolvedTypes(method)); + CHECK(prototype->HasSameDexCacheResolvedMethods(method, image_pointer_size_)); + CHECK(prototype->HasSameDexCacheResolvedTypes(method, image_pointer_size_)); auto* np = method->GetInterfaceMethodIfProxy(image_pointer_size_); CHECK_EQ(prototype->GetDeclaringClass()->GetDexCache(), np->GetDexCache()); CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex()); @@ -3500,7 +3541,8 @@ void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) cons CHECK_STREQ(np->GetName(), prototype->GetName()); CHECK_STREQ(np->GetShorty(), prototype->GetShorty()); // More complex sanity - via dex cache - CHECK_EQ(np->GetReturnType(), prototype->GetReturnType()); + CHECK_EQ(np->GetReturnType(true /* resolve */, image_pointer_size_), + prototype->GetReturnType(true /* resolve */, image_pointer_size_)); } bool ClassLinker::CanWeInitializeClass(mirror::Class* klass, bool can_init_statics, @@ -3838,6 +3880,7 @@ static void ThrowSignatureMismatch(Handle<mirror::Class> klass, } static bool HasSameSignatureWithDifferentClassLoaders(Thread* self, + size_t pointer_size, Handle<mirror::Class> klass, Handle<mirror::Class> super_klass, ArtMethod* method1, @@ -3845,12 +3888,14 @@ static bool HasSameSignatureWithDifferentClassLoaders(Thread* self, SHARED_REQUIRES(Locks::mutator_lock_) { { StackHandleScope<1> hs(self); - Handle<mirror::Class> return_type(hs.NewHandle(method1->GetReturnType())); + Handle<mirror::Class> return_type(hs.NewHandle(method1->GetReturnType(true /* resolve */, + pointer_size))); if (UNLIKELY(return_type.Get() == nullptr)) { ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1); return false; } - mirror::Class* other_return_type = method2->GetReturnType(); + mirror::Class* other_return_type = method2->GetReturnType(true /* resolve */, + pointer_size); if (UNLIKELY(other_return_type == nullptr)) { ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2); return false; @@ -3895,7 +3940,7 @@ static bool HasSameSignatureWithDifferentClassLoaders(Thread* self, StackHandleScope<1> hs(self); uint32_t param_type_idx = types1->GetTypeItem(i).type_idx_; Handle<mirror::Class> param_type(hs.NewHandle( - method1->GetClassFromTypeIndex(param_type_idx, true))); + method1->GetClassFromTypeIndex(param_type_idx, true /* resolve */, pointer_size))); if (UNLIKELY(param_type.Get() == nullptr)) { ThrowSignatureCheckResolveArgException(klass, super_klass, method1, method1, i, param_type_idx); @@ -3903,7 +3948,7 @@ static bool HasSameSignatureWithDifferentClassLoaders(Thread* self, } uint32_t other_param_type_idx = types2->GetTypeItem(i).type_idx_; mirror::Class* other_param_type = - method2->GetClassFromTypeIndex(other_param_type_idx, true); + method2->GetClassFromTypeIndex(other_param_type_idx, true /* resolve */, pointer_size); if (UNLIKELY(other_param_type == nullptr)) { ThrowSignatureCheckResolveArgException(klass, super_klass, method1, method2, i, other_param_type_idx); @@ -3939,7 +3984,8 @@ bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) { auto* m = klass->GetVTableEntry(i, image_pointer_size_); auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_); if (m != super_m) { - if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self, klass, super_klass, + if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self, image_pointer_size_, + klass, super_klass, m, super_m))) { self->AssertPendingException(); return false; @@ -3956,7 +4002,8 @@ bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) { j, image_pointer_size_); auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_); if (m != super_m) { - if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self, klass, super_klass, + if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self, image_pointer_size_, + klass, super_klass, m, super_m))) { self->AssertPendingException(); return false; @@ -5091,8 +5138,8 @@ bool ClassLinker::LinkInterfaceMethods(Thread* self, Handle<mirror::Class> klass // Check that there are no stale methods are in the dex cache array. if (kIsDebugBuild) { auto* resolved_methods = klass->GetDexCache()->GetResolvedMethods(); - for (size_t i = 0, count = resolved_methods->GetLength(); i < count; ++i) { - auto* m = resolved_methods->GetElementPtrSize<ArtMethod*>(i, image_pointer_size_); + for (size_t i = 0, count = klass->GetDexCache()->NumResolvedMethods(); i < count; ++i) { + auto* m = mirror::DexCache::GetElementPtrSize(resolved_methods, i, image_pointer_size_); CHECK(move_table.find(m) == move_table.end()) << PrettyMethod(m); } } diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index c3191fad3f..b4ea3b3460 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -165,12 +165,14 @@ class ClassLinkerTest : public CommonRuntimeTest { EXPECT_TRUE(method->GetName() != nullptr); EXPECT_TRUE(method->GetSignature() != Signature::NoSignature()); - EXPECT_TRUE(method->HasDexCacheResolvedMethods()); - EXPECT_TRUE(method->HasDexCacheResolvedTypes()); + EXPECT_TRUE(method->HasDexCacheResolvedMethods(sizeof(void*))); + EXPECT_TRUE(method->HasDexCacheResolvedTypes(sizeof(void*))); EXPECT_TRUE(method->HasSameDexCacheResolvedMethods( - method->GetDeclaringClass()->GetDexCache()->GetResolvedMethods())); + method->GetDeclaringClass()->GetDexCache()->GetResolvedMethods(), + sizeof(void*))); EXPECT_TRUE(method->HasSameDexCacheResolvedTypes( - method->GetDeclaringClass()->GetDexCache()->GetResolvedTypes())); + method->GetDeclaringClass()->GetDexCache()->GetResolvedTypes(), + sizeof(void*))); } void AssertField(mirror::Class* klass, ArtField* field) @@ -357,8 +359,9 @@ class ClassLinkerTest : public CommonRuntimeTest { // Verify the dex cache has resolution methods in all resolved method slots mirror::DexCache* dex_cache = class_linker_->FindDexCache(Thread::Current(), dex); auto* resolved_methods = dex_cache->GetResolvedMethods(); - for (size_t i = 0; i < static_cast<size_t>(resolved_methods->GetLength()); i++) { - EXPECT_TRUE(resolved_methods->GetElementPtrSize<ArtMethod*>(i, sizeof(void*)) != nullptr) + for (size_t i = 0, num_methods = dex_cache->NumResolvedMethods(); i != num_methods; ++i) { + EXPECT_TRUE( + mirror::DexCache::GetElementPtrSize(resolved_methods, i, sizeof(void*)) != nullptr) << dex.GetLocation() << " i=" << i; } } @@ -565,6 +568,10 @@ struct DexCacheOffsets : public CheckOffsets<mirror::DexCache> { addOffset(OFFSETOF_MEMBER(mirror::DexCache, dex_), "dex"); addOffset(OFFSETOF_MEMBER(mirror::DexCache, dex_file_), "dexFile"); addOffset(OFFSETOF_MEMBER(mirror::DexCache, location_), "location"); + addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_fields_), "numResolvedFields"); + addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_methods_), "numResolvedMethods"); + addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_resolved_types_), "numResolvedTypes"); + addOffset(OFFSETOF_MEMBER(mirror::DexCache, num_strings_), "numStrings"); addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_fields_), "resolvedFields"); addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_methods_), "resolvedMethods"); addOffset(OFFSETOF_MEMBER(mirror::DexCache, resolved_types_), "resolvedTypes"); diff --git a/runtime/debugger.cc b/runtime/debugger.cc index 8d34f5a78f..8afb96856b 100644 --- a/runtime/debugger.cc +++ b/runtime/debugger.cc @@ -3821,7 +3821,9 @@ JDWP::JdwpError Dbg::PrepareInvokeMethod(uint32_t request_id, JDWP::ObjectId thr if (shorty[i + 1] == 'L') { // Did we really get an argument of an appropriate reference type? mirror::Class* parameter_type = - m->GetClassFromTypeIndex(types->GetTypeItem(i).type_idx_, true); + m->GetClassFromTypeIndex(types->GetTypeItem(i).type_idx_, + true /* resolve */, + sizeof(void*)); mirror::Object* argument = gRegistry->Get<mirror::Object*>(arg_values[i], &error); if (error != JDWP::ERR_NONE) { return JDWP::ERR_INVALID_OBJECT; diff --git a/runtime/dex_file.cc b/runtime/dex_file.cc index 213f25dfdb..85274cde5c 100644 --- a/runtime/dex_file.cc +++ b/runtime/dex_file.cc @@ -1227,7 +1227,9 @@ mirror::Object* DexFile::GetAnnotationDefaultValue(ArtMethod* method) const { AnnotationValue annotation_value; StackHandleScope<2> hs(Thread::Current()); Handle<mirror::Class> h_klass(hs.NewHandle(klass)); - Handle<mirror::Class> return_type(hs.NewHandle(method->GetReturnType())); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + Handle<mirror::Class> return_type(hs.NewHandle( + method->GetReturnType(true /* resolve */, pointer_size))); if (!ProcessAnnotationValue(h_klass, &annotation, &annotation_value, return_type, kAllObjects)) { return nullptr; } @@ -1343,7 +1345,9 @@ mirror::Object* DexFile::CreateAnnotationMember(Handle<mirror::Class> klass, if (annotation_method == nullptr) { return nullptr; } - Handle<mirror::Class> method_return(hs.NewHandle(annotation_method->GetReturnType())); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + Handle<mirror::Class> method_return(hs.NewHandle( + annotation_method->GetReturnType(true /* resolve */, pointer_size))); AnnotationValue annotation_value; if (!ProcessAnnotationValue(klass, annotation, &annotation_value, method_return, kAllObjects)) { diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h index 66e88ba885..cc3eefed34 100644 --- a/runtime/entrypoints/entrypoint_utils-inl.h +++ b/runtime/entrypoints/entrypoint_utils-inl.h @@ -120,9 +120,11 @@ ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, ArtMethod* method, Thread* self, bool* slow_path) { - mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + size_t pointer_size = class_linker->GetImagePointerSize(); + mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size); if (UNLIKELY(klass == nullptr)) { - klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); + klass = class_linker->ResolveType(type_idx, method); *slow_path = true; if (klass == nullptr) { DCHECK(self->IsExceptionPending()); @@ -258,9 +260,11 @@ inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, *slow_path = true; return nullptr; // Failure } - mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + size_t pointer_size = class_linker->GetImagePointerSize(); + mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size); if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve - klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); + klass = class_linker->ResolveType(type_idx, method); *slow_path = true; if (klass == nullptr) { // Error DCHECK(Thread::Current()->IsExceptionPending()); diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc index eaf33f6b7f..94aced27ed 100644 --- a/runtime/entrypoints/entrypoint_utils.cc +++ b/runtime/entrypoints/entrypoint_utils.cc @@ -43,9 +43,11 @@ static inline mirror::Class* CheckFilledNewArrayAlloc(uint32_t type_idx, ThrowNegativeArraySizeException(component_count); return nullptr; // Failure } - mirror::Class* klass = referrer->GetDexCacheResolvedType<false>(type_idx); + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + size_t pointer_size = class_linker->GetImagePointerSize(); + mirror::Class* klass = referrer->GetDexCacheResolvedType<false>(type_idx, pointer_size); if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve - klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, referrer); + klass = class_linker->ResolveType(type_idx, referrer); if (klass == nullptr) { // Error DCHECK(self->IsExceptionPending()); return nullptr; // Failure @@ -214,7 +216,8 @@ void CheckReferenceResult(mirror::Object* o, Thread* self) { return; } // Make sure that the result is an instance of the type this method was expected to return. - mirror::Class* return_type = self->GetCurrentMethod(nullptr)->GetReturnType(); + mirror::Class* return_type = self->GetCurrentMethod(nullptr)->GetReturnType(true /* resolve */, + sizeof(void*)); if (!o->InstanceOf(return_type)) { Runtime::Current()->GetJavaVM()->JniAbortF(nullptr, @@ -277,7 +280,9 @@ JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, cons StackHandleScope<1> hs(soa.Self()); auto h_interface_method(hs.NewHandle(soa.Decode<mirror::Method*>(interface_method_jobj))); // This can cause thread suspension. - mirror::Class* result_type = h_interface_method->GetArtMethod()->GetReturnType(); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + mirror::Class* result_type = + h_interface_method->GetArtMethod()->GetReturnType(true /* resolve */, pointer_size); mirror::Object* result_ref = soa.Decode<mirror::Object*>(result); JValue result_unboxed; if (!UnboxPrimitiveForResult(result_ref, result_type, &result_unboxed)) { diff --git a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc index 9311791a42..28c62a8524 100644 --- a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc @@ -33,7 +33,7 @@ extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ SHARED_REQUIRES(Locks::mutator_lock_) { \ ScopedQuickEntrypointChecks sqec(self); \ if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ - mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); \ + mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, sizeof(void*)); \ if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ size_t byte_count = klass->GetObjectSize(); \ byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 0c7caf38b9..1302c5f17b 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -980,7 +980,7 @@ extern "C" const void* artQuickResolutionTrampoline( // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares // about the name and signature. uint32_t update_dex_cache_method_index = called->GetDexMethodIndex(); - if (!called->HasSameDexCacheResolvedMethods(caller)) { + if (!called->HasSameDexCacheResolvedMethods(caller, sizeof(void*))) { // Calling from one dex file to another, need to compute the method index appropriate to // the caller's dex file. Since we get here only if the original called was a runtime // method, we've got the correct dex_file and a dex_method_idx from above. diff --git a/runtime/image.cc b/runtime/image.cc index 8df17c6929..42b348ac58 100644 --- a/runtime/image.cc +++ b/runtime/image.cc @@ -24,7 +24,7 @@ namespace art { const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' }; -const uint8_t ImageHeader::kImageVersion[] = { '0', '2', '0', '\0' }; +const uint8_t ImageHeader::kImageVersion[] = { '0', '2', '1', '\0' }; ImageHeader::ImageHeader(uint32_t image_begin, uint32_t image_size, diff --git a/runtime/image.h b/runtime/image.h index e2d59f9aa5..20e4159b09 100644 --- a/runtime/image.h +++ b/runtime/image.h @@ -168,6 +168,7 @@ class PACKED(4) ImageHeader { kSectionObjects, kSectionArtFields, kSectionArtMethods, + kSectionDexCacheArrays, kSectionInternedStrings, kSectionImageBitmap, kSectionCount, // Number of elements in enum. diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc index 2be570ac85..6d22fe06ee 100644 --- a/runtime/intern_table.cc +++ b/runtime/intern_table.cc @@ -22,7 +22,7 @@ #include "gc/collector/garbage_collector.h" #include "gc/space/image_space.h" #include "gc/weak_root_state.h" -#include "mirror/dex_cache.h" +#include "mirror/dex_cache-inl.h" #include "mirror/object_array-inl.h" #include "mirror/object-inl.h" #include "mirror/string-inl.h" @@ -165,8 +165,7 @@ void InternTable::AddImageStringsToTable(gc::space::ImageSpace* image_space) { mirror::ObjectArray<mirror::DexCache>* dex_caches = root->AsObjectArray<mirror::DexCache>(); for (int32_t i = 0; i < dex_caches->GetLength(); ++i) { mirror::DexCache* dex_cache = dex_caches->Get(i); - const DexFile* dex_file = dex_cache->GetDexFile(); - const size_t num_strings = dex_file->NumStringIds(); + const size_t num_strings = dex_cache->NumStrings(); for (size_t j = 0; j < num_strings; ++j) { mirror::String* image_string = dex_cache->GetResolvedString(j); if (image_string != nullptr) { diff --git a/runtime/interpreter/interpreter_common.cc b/runtime/interpreter/interpreter_common.cc index f923b848fa..af67379375 100644 --- a/runtime/interpreter/interpreter_common.cc +++ b/runtime/interpreter/interpreter_common.cc @@ -616,9 +616,10 @@ static inline bool DoCallCommon(ArtMethod* called_method, case 'L': { Object* o = shadow_frame.GetVRegReference(src_reg); if (do_assignability_check && o != nullptr) { + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); Class* arg_type = new_shadow_frame->GetMethod()->GetClassFromTypeIndex( - params->GetTypeItem(shorty_pos).type_idx_, true); + params->GetTypeItem(shorty_pos).type_idx_, true /* resolve */, pointer_size); if (arg_type == nullptr) { CHECK(self->IsExceptionPending()); return false; diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h index 6468659d9f..fdefb9f74c 100644 --- a/runtime/interpreter/interpreter_common.h +++ b/runtime/interpreter/interpreter_common.h @@ -344,7 +344,9 @@ static inline String* ResolveString(Thread* self, ShadowFrame& shadow_frame, uin } ArtMethod* method = shadow_frame.GetMethod(); mirror::Class* declaring_class = method->GetDeclaringClass(); - mirror::String* s = declaring_class->GetDexCacheStrings()->Get(string_idx); + // MethodVerifier refuses methods with string_idx out of bounds. + DCHECK_LT(string_idx, declaring_class->GetDexCache()->NumStrings()); + mirror::String* s = declaring_class->GetDexCacheStrings()[string_idx].Read(); if (UNLIKELY(s == nullptr)) { StackHandleScope<1> hs(self); Handle<mirror::DexCache> dex_cache(hs.NewHandle(declaring_class->GetDexCache())); diff --git a/runtime/interpreter/interpreter_goto_table_impl.cc b/runtime/interpreter/interpreter_goto_table_impl.cc index 7027cbfc52..72e2ba0e7b 100644 --- a/runtime/interpreter/interpreter_goto_table_impl.cc +++ b/runtime/interpreter/interpreter_goto_table_impl.cc @@ -327,7 +327,9 @@ JValue ExecuteGotoImpl(Thread* self, const DexFile::CodeItem* code_item, ShadowF const uint8_t vreg_index = inst->VRegA_11x(inst_data); Object* obj_result = shadow_frame.GetVRegReference(vreg_index); if (do_assignability_check && obj_result != nullptr) { - Class* return_type = shadow_frame.GetMethod()->GetReturnType(); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + Class* return_type = shadow_frame.GetMethod()->GetReturnType(true /* resolve */, + pointer_size); obj_result = shadow_frame.GetVRegReference(vreg_index); if (return_type == nullptr) { // Return the pending exception. diff --git a/runtime/interpreter/interpreter_switch_impl.cc b/runtime/interpreter/interpreter_switch_impl.cc index 544f7886e9..b5cc11e070 100644 --- a/runtime/interpreter/interpreter_switch_impl.cc +++ b/runtime/interpreter/interpreter_switch_impl.cc @@ -225,7 +225,9 @@ JValue ExecuteSwitchImpl(Thread* self, const DexFile::CodeItem* code_item, const size_t ref_idx = inst->VRegA_11x(inst_data); Object* obj_result = shadow_frame.GetVRegReference(ref_idx); if (do_assignability_check && obj_result != nullptr) { - Class* return_type = shadow_frame.GetMethod()->GetReturnType(); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + Class* return_type = shadow_frame.GetMethod()->GetReturnType(true /* resolve */, + pointer_size); // Re-load since it might have moved. obj_result = shadow_frame.GetVRegReference(ref_idx); if (return_type == nullptr) { diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h index b2c6e4da12..10b381d59d 100644 --- a/runtime/mirror/class-inl.h +++ b/runtime/mirror/class-inl.h @@ -817,12 +817,12 @@ inline uint32_t Class::NumDirectInterfaces() { } } -inline void Class::SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings) { - SetFieldObject<false>(DexCacheStringsOffset(), new_dex_cache_strings); +inline void Class::SetDexCacheStrings(GcRoot<String>* new_dex_cache_strings) { + SetFieldPtr<false>(DexCacheStringsOffset(), new_dex_cache_strings); } -inline ObjectArray<String>* Class::GetDexCacheStrings() { - return GetFieldObject<ObjectArray<String>>(DexCacheStringsOffset()); +inline GcRoot<String>* Class::GetDexCacheStrings() { + return GetFieldPtr<GcRoot<String>*>(DexCacheStringsOffset()); } template<class Visitor> diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h index 1420e5bdf8..94224327b2 100644 --- a/runtime/mirror/class.h +++ b/runtime/mirror/class.h @@ -250,6 +250,14 @@ class MANAGED Class FINAL : public Object { SetClassFlags(kClassFlagClassLoader); } + ALWAYS_INLINE bool IsDexCacheClass() SHARED_REQUIRES(Locks::mutator_lock_) { + return (GetClassFlags() & kClassFlagDexCache) != 0; + } + + ALWAYS_INLINE void SetDexCacheClass() SHARED_REQUIRES(Locks::mutator_lock_) { + SetClassFlags(GetClassFlags() | kClassFlagDexCache); + } + // Returns true if the class is abstract. ALWAYS_INLINE bool IsAbstract() SHARED_REQUIRES(Locks::mutator_lock_) { return (GetAccessFlags() & kAccAbstract) != 0; @@ -1077,8 +1085,8 @@ class MANAGED Class FINAL : public Object { bool GetSlowPathEnabled() SHARED_REQUIRES(Locks::mutator_lock_); void SetSlowPath(bool enabled) SHARED_REQUIRES(Locks::mutator_lock_); - ObjectArray<String>* GetDexCacheStrings() SHARED_REQUIRES(Locks::mutator_lock_); - void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings) + GcRoot<String>* GetDexCacheStrings() SHARED_REQUIRES(Locks::mutator_lock_); + void SetDexCacheStrings(GcRoot<String>* new_dex_cache_strings) SHARED_REQUIRES(Locks::mutator_lock_); static MemberOffset DexCacheStringsOffset() { return OFFSET_OF_OBJECT_MEMBER(Class, dex_cache_strings_); @@ -1173,9 +1181,6 @@ class MANAGED Class FINAL : public Object { // runtime such as arrays and primitive classes). HeapReference<DexCache> dex_cache_; - // Short cuts to dex_cache_ member for fast compiled code access. - HeapReference<ObjectArray<String>> dex_cache_strings_; - // The interface table (iftable_) contains pairs of a interface class and an array of the // interface methods. There is one pair per interface supported by this class. That means one // pair for each interface we support directly, indirectly via superclass, or indirectly via a @@ -1209,9 +1214,8 @@ class MANAGED Class FINAL : public Object { // virtual_ methods_ for miranda methods. HeapReference<PointerArray> vtable_; - // Access flags; low 16 bits are defined by VM spec. - // Note: Shuffled back. - uint32_t access_flags_; + // Short cuts to dex_cache_ member for fast compiled code access. + uint64_t dex_cache_strings_; // static, private, and <init> methods. Pointer to an ArtMethod length-prefixed array. uint64_t direct_methods_; @@ -1234,6 +1238,9 @@ class MANAGED Class FINAL : public Object { // length-prefixed array. uint64_t virtual_methods_; + // Access flags; low 16 bits are defined by VM spec. + uint32_t access_flags_; + // Class flags to help speed up visiting object references. uint32_t class_flags_; diff --git a/runtime/mirror/class_flags.h b/runtime/mirror/class_flags.h index eb2e2ebf81..139c4cb67a 100644 --- a/runtime/mirror/class_flags.h +++ b/runtime/mirror/class_flags.h @@ -41,17 +41,20 @@ static constexpr uint32_t kClassFlagClass = 0x00000010; // Class is ClassLoader or one of its subclasses. static constexpr uint32_t kClassFlagClassLoader = 0x00000020; +// Class is DexCache. +static constexpr uint32_t kClassFlagDexCache = 0x00000040; + // Class is a soft/weak/phantom class. -static constexpr uint32_t kClassFlagSoftReference = 0x00000040; +static constexpr uint32_t kClassFlagSoftReference = 0x00000080; // Class is a weak reference class. -static constexpr uint32_t kClassFlagWeakReference = 0x00000080; +static constexpr uint32_t kClassFlagWeakReference = 0x00000100; // Class is a finalizer reference class. -static constexpr uint32_t kClassFlagFinalizerReference = 0x00000100; +static constexpr uint32_t kClassFlagFinalizerReference = 0x00000200; // Class is the phantom reference class. -static constexpr uint32_t kClassFlagPhantomReference = 0x00000200; +static constexpr uint32_t kClassFlagPhantomReference = 0x00000400; // Combination of flags to figure out if the class is either the weak/soft/phantom/finalizer // reference class. diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h index 4b5063acd2..f8ccfb1b39 100644 --- a/runtime/mirror/dex_cache-inl.h +++ b/runtime/mirror/dex_cache-inl.h @@ -21,6 +21,7 @@ #include "art_field-inl.h" #include "art_method-inl.h" +#include "base/casts.h" #include "base/logging.h" #include "mirror/class.h" #include "runtime.h" @@ -33,29 +34,53 @@ inline uint32_t DexCache::ClassSize(size_t pointer_size) { return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size); } +inline String* DexCache::GetResolvedString(uint32_t string_idx) { + DCHECK_LT(string_idx, NumStrings()); + return GetStrings()[string_idx].Read(); +} + +inline void DexCache::SetResolvedString(uint32_t string_idx, String* resolved) { + DCHECK_LT(string_idx, NumStrings()); + // TODO default transaction support. + GetStrings()[string_idx] = GcRoot<String>(resolved); + // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this); +} + +inline Class* DexCache::GetResolvedType(uint32_t type_idx) { + DCHECK_LT(type_idx, NumResolvedTypes()); + return GetResolvedTypes()[type_idx].Read(); +} + inline void DexCache::SetResolvedType(uint32_t type_idx, Class* resolved) { + DCHECK_LT(type_idx, NumResolvedTypes()); // NOTE: Unchecked, i.e. not throwing AIOOB. // TODO default transaction support. DCHECK(resolved == nullptr || !resolved->IsErroneous()); - GetResolvedTypes()->Set(type_idx, resolved); + GetResolvedTypes()[type_idx] = GcRoot<Class>(resolved); + // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. + Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this); } -inline ArtField* DexCache::GetResolvedField(uint32_t idx, size_t ptr_size) { +inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, size_t ptr_size) { DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); - auto* field = GetResolvedFields()->GetElementPtrSize<ArtField*>(idx, ptr_size); + DCHECK_LT(field_idx, NumResolvedFields()); // NOTE: Unchecked, i.e. not throwing AIOOB. + ArtField* field = GetElementPtrSize(GetResolvedFields(), field_idx, ptr_size); if (field == nullptr || field->GetDeclaringClass()->IsErroneous()) { return nullptr; } return field; } -inline void DexCache::SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size) { +inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field, size_t ptr_size) { DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); - GetResolvedFields()->SetElementPtrSize(idx, field, ptr_size); + DCHECK_LT(field_idx, NumResolvedFields()); // NOTE: Unchecked, i.e. not throwing AIOOB. + SetElementPtrSize(GetResolvedFields(), field_idx, field, ptr_size); } inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, size_t ptr_size) { DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); - auto* method = GetResolvedMethods()->GetElementPtrSize<ArtMethod*>(method_idx, ptr_size); + DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB. + ArtMethod* method = GetElementPtrSize<ArtMethod*>(GetResolvedMethods(), method_idx, ptr_size); // Hide resolution trampoline methods from the caller if (method != nullptr && method->IsRuntimeMethod()) { DCHECK_EQ(method, Runtime::Current()->GetResolutionMethod()); @@ -64,9 +89,52 @@ inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, size_t ptr_si return method; } -inline void DexCache::SetResolvedMethod(uint32_t idx, ArtMethod* method, size_t ptr_size) { +inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method, size_t ptr_size) { DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); - GetResolvedMethods()->SetElementPtrSize(idx, method, ptr_size); + DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB. + SetElementPtrSize(GetResolvedMethods(), method_idx, method, ptr_size); +} + +template <typename PtrType> +inline PtrType DexCache::GetElementPtrSize(PtrType* ptr_array, size_t idx, size_t ptr_size) { + if (ptr_size == 8u) { + uint64_t element = reinterpret_cast<const uint64_t*>(ptr_array)[idx]; + return reinterpret_cast<PtrType>(dchecked_integral_cast<uintptr_t>(element)); + } else { + DCHECK_EQ(ptr_size, 4u); + uint32_t element = reinterpret_cast<const uint32_t*>(ptr_array)[idx]; + return reinterpret_cast<PtrType>(dchecked_integral_cast<uintptr_t>(element)); + } +} + +template <typename PtrType> +inline void DexCache::SetElementPtrSize(PtrType* ptr_array, + size_t idx, + PtrType ptr, + size_t ptr_size) { + if (ptr_size == 8u) { + reinterpret_cast<uint64_t*>(ptr_array)[idx] = + dchecked_integral_cast<uint64_t>(reinterpret_cast<uintptr_t>(ptr)); + } else { + DCHECK_EQ(ptr_size, 4u); + reinterpret_cast<uint32_t*>(ptr_array)[idx] = + dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(ptr)); + } +} + +template <VerifyObjectFlags kVerifyFlags, typename Visitor> +inline void DexCache::VisitReferences(mirror::Class* klass, const Visitor& visitor) { + // Visit instance fields first. + VisitInstanceFieldsReferences(klass, visitor); + // Visit arrays after. + GcRoot<mirror::String>* strings = GetStrings(); + for (size_t i = 0, num_strings = NumStrings(); i != num_strings; ++i) { + visitor.VisitRootIfNonNull(strings[i].AddressWithoutBarrier()); + } + GcRoot<mirror::Class>* resolved_types = GetResolvedTypes(); + for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) { + visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier()); + } } } // namespace mirror diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc index 630faee356..349a319992 100644 --- a/runtime/mirror/dex_cache.cc +++ b/runtime/mirror/dex_cache.cc @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "dex_cache.h" +#include "dex_cache-inl.h" #include "art_method-inl.h" #include "base/logging.h" @@ -31,22 +31,34 @@ namespace art { namespace mirror { -void DexCache::Init(const DexFile* dex_file, String* location, ObjectArray<String>* strings, - ObjectArray<Class>* resolved_types, PointerArray* resolved_methods, - PointerArray* resolved_fields, size_t pointer_size) { +void DexCache::Init(const DexFile* dex_file, + String* location, + GcRoot<String>* strings, + uint32_t num_strings, + GcRoot<Class>* resolved_types, + uint32_t num_resolved_types, + ArtMethod** resolved_methods, + uint32_t num_resolved_methods, + ArtField** resolved_fields, + uint32_t num_resolved_fields, + size_t pointer_size) { CHECK(dex_file != nullptr); CHECK(location != nullptr); - CHECK(strings != nullptr); - CHECK(resolved_types != nullptr); - CHECK(resolved_methods != nullptr); - CHECK(resolved_fields != nullptr); + CHECK_EQ(num_strings != 0u, strings != nullptr); + CHECK_EQ(num_resolved_types != 0u, resolved_types != nullptr); + CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr); + CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr); SetDexFile(dex_file); SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_), location); - SetFieldObject<false>(StringsOffset(), strings); - SetFieldObject<false>(ResolvedFieldsOffset(), resolved_fields); - SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_), resolved_types); - SetFieldObject<false>(ResolvedMethodsOffset(), resolved_methods); + SetField64<false>(StringsOffset(), reinterpret_cast<uintptr_t>(strings)); + SetField64<false>(ResolvedTypesOffset(), reinterpret_cast<uintptr_t>(resolved_types)); + SetField64<false>(ResolvedMethodsOffset(), reinterpret_cast<uintptr_t>(resolved_methods)); + SetField64<false>(ResolvedFieldsOffset(), reinterpret_cast<uintptr_t>(resolved_fields)); + SetField32<false>(NumStringsOffset(), num_strings); + SetField32<false>(NumResolvedTypesOffset(), num_resolved_types); + SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods); + SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields); Runtime* const runtime = Runtime::Current(); if (runtime->HasResolutionMethod()) { @@ -60,9 +72,9 @@ void DexCache::Fixup(ArtMethod* trampoline, size_t pointer_size) { CHECK(trampoline != nullptr); CHECK(trampoline->IsRuntimeMethod()); auto* resolved_methods = GetResolvedMethods(); - for (size_t i = 0, length = resolved_methods->GetLength(); i < length; i++) { - if (resolved_methods->GetElementPtrSize<ArtMethod*>(i, pointer_size) == nullptr) { - resolved_methods->SetElementPtrSize(i, trampoline, pointer_size); + for (size_t i = 0, length = NumResolvedMethods(); i < length; i++) { + if (GetElementPtrSize<ArtMethod*>(resolved_methods, i, pointer_size) == nullptr) { + SetElementPtrSize(resolved_methods, i, trampoline, pointer_size); } } } diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index ba49a15f22..3144553657 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -46,8 +46,16 @@ class MANAGED DexCache FINAL : public Object { return sizeof(DexCache); } - void Init(const DexFile* dex_file, String* location, ObjectArray<String>* strings, - ObjectArray<Class>* types, PointerArray* methods, PointerArray* fields, + void Init(const DexFile* dex_file, + String* location, + GcRoot<String>* strings, + uint32_t num_strings, + GcRoot<Class>* resolved_types, + uint32_t num_resolved_types, + ArtMethod** resolved_methods, + uint32_t num_resolved_methods, + ArtField** resolved_fields, + uint32_t num_resolved_fields, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); void Fixup(ArtMethod* trampoline, size_t pointer_size) @@ -65,6 +73,10 @@ class MANAGED DexCache FINAL : public Object { return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_); } + static MemberOffset ResolvedTypesOffset() { + return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_); + } + static MemberOffset ResolvedFieldsOffset() { return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_); } @@ -73,39 +85,31 @@ class MANAGED DexCache FINAL : public Object { return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_); } - size_t NumStrings() SHARED_REQUIRES(Locks::mutator_lock_) { - return GetStrings()->GetLength(); + static MemberOffset NumStringsOffset() { + return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_); } - size_t NumResolvedTypes() SHARED_REQUIRES(Locks::mutator_lock_) { - return GetResolvedTypes()->GetLength(); + static MemberOffset NumResolvedTypesOffset() { + return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_); } - size_t NumResolvedMethods() SHARED_REQUIRES(Locks::mutator_lock_) { - return GetResolvedMethods()->GetLength(); + static MemberOffset NumResolvedFieldsOffset() { + return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_); } - size_t NumResolvedFields() SHARED_REQUIRES(Locks::mutator_lock_) { - return GetResolvedFields()->GetLength(); + static MemberOffset NumResolvedMethodsOffset() { + return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_); } - String* GetResolvedString(uint32_t string_idx) SHARED_REQUIRES(Locks::mutator_lock_) { - return GetStrings()->Get(string_idx); - } + String* GetResolvedString(uint32_t string_idx) ALWAYS_INLINE + SHARED_REQUIRES(Locks::mutator_lock_); void SetResolvedString(uint32_t string_idx, String* resolved) ALWAYS_INLINE - SHARED_REQUIRES(Locks::mutator_lock_) { - // TODO default transaction support. - GetStrings()->Set(string_idx, resolved); - } + SHARED_REQUIRES(Locks::mutator_lock_); - Class* GetResolvedType(uint32_t type_idx) ALWAYS_INLINE - SHARED_REQUIRES(Locks::mutator_lock_) { - return GetResolvedTypes()->Get(type_idx); - } + Class* GetResolvedType(uint32_t type_idx) SHARED_REQUIRES(Locks::mutator_lock_); - void SetResolvedType(uint32_t type_idx, Class* resolved) - SHARED_REQUIRES(Locks::mutator_lock_); + void SetResolvedType(uint32_t type_idx, Class* resolved) SHARED_REQUIRES(Locks::mutator_lock_); ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); @@ -121,21 +125,36 @@ class MANAGED DexCache FINAL : public Object { ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_); - ObjectArray<String>* GetStrings() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { - return GetFieldObject<ObjectArray<String>>(StringsOffset()); + GcRoot<String>* GetStrings() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { + return GetFieldPtr<GcRoot<String>*>(StringsOffset()); + } + + GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { + return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset()); } - ObjectArray<Class>* GetResolvedTypes() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { - return GetFieldObject<ObjectArray<Class>>( - OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_)); + ArtMethod** GetResolvedMethods() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { + return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset()); } - PointerArray* GetResolvedMethods() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { - return GetFieldObject<PointerArray>(ResolvedMethodsOffset()); + ArtField** GetResolvedFields() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { + return GetFieldPtr<ArtField**>(ResolvedFieldsOffset()); } - PointerArray* GetResolvedFields() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { - return GetFieldObject<PointerArray>(ResolvedFieldsOffset()); + size_t NumStrings() SHARED_REQUIRES(Locks::mutator_lock_) { + return GetField32(NumStringsOffset()); + } + + size_t NumResolvedTypes() SHARED_REQUIRES(Locks::mutator_lock_) { + return GetField32(NumResolvedTypesOffset()); + } + + size_t NumResolvedMethods() SHARED_REQUIRES(Locks::mutator_lock_) { + return GetField32(NumResolvedMethodsOffset()); + } + + size_t NumResolvedFields() SHARED_REQUIRES(Locks::mutator_lock_) { + return GetField32(NumResolvedFieldsOffset()); } const DexFile* GetDexFile() ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { @@ -147,17 +166,36 @@ class MANAGED DexCache FINAL : public Object { return SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); } + // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField** + // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(), + // so they need to be public. + + template <typename PtrType> + static PtrType GetElementPtrSize(PtrType* ptr_array, size_t idx, size_t ptr_size); + + template <typename PtrType> + static void SetElementPtrSize(PtrType* ptr_array, size_t idx, PtrType ptr, size_t ptr_size); + private: + // Visit instance fields of the dex cache as well as its associated arrays. + template <VerifyObjectFlags kVerifyFlags, typename Visitor> + void VisitReferences(mirror::Class* klass, const Visitor& visitor) + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); + HeapReference<Object> dex_; HeapReference<String> location_; - // Either an int array or long array based on runtime ISA since these arrays hold pointers. - HeapReference<PointerArray> resolved_fields_; - HeapReference<PointerArray> resolved_methods_; - HeapReference<ObjectArray<Class>> resolved_types_; - HeapReference<ObjectArray<String>> strings_; - uint64_t dex_file_; + uint64_t dex_file_; // const DexFile* + uint64_t resolved_fields_; // ArtField*, array with num_resolved_fields_ elements. + uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. + uint64_t resolved_types_; // GcRoot<Class>*, array with num_resolved_types_ elements. + uint64_t strings_; // GcRoot<String>*, array with num_strings_ elements. + uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array. + uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array. + uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array. + uint32_t num_strings_; // Number of elements in the strings_ array. friend struct art::DexCacheOffsets; // for verifying offset information + friend class Object; // For VisitReferences DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache); }; diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc index 228fce5314..8fb860fa6b 100644 --- a/runtime/mirror/dex_cache_test.cc +++ b/runtime/mirror/dex_cache_test.cc @@ -43,19 +43,6 @@ TEST_F(DexCacheTest, Open) { EXPECT_EQ(java_lang_dex_file_->NumTypeIds(), dex_cache->NumResolvedTypes()); EXPECT_EQ(java_lang_dex_file_->NumMethodIds(), dex_cache->NumResolvedMethods()); EXPECT_EQ(java_lang_dex_file_->NumFieldIds(), dex_cache->NumResolvedFields()); - - EXPECT_LE(0, dex_cache->GetStrings()->GetLength()); - EXPECT_LE(0, dex_cache->GetResolvedTypes()->GetLength()); - EXPECT_LE(0, dex_cache->GetResolvedMethods()->GetLength()); - EXPECT_LE(0u, dex_cache->NumResolvedFields()); - - EXPECT_EQ(java_lang_dex_file_->NumStringIds(), - static_cast<uint32_t>(dex_cache->GetStrings()->GetLength())); - EXPECT_EQ(java_lang_dex_file_->NumTypeIds(), - static_cast<uint32_t>(dex_cache->GetResolvedTypes()->GetLength())); - EXPECT_EQ(java_lang_dex_file_->NumMethodIds(), - static_cast<uint32_t>(dex_cache->GetResolvedMethods()->GetLength())); - EXPECT_EQ(java_lang_dex_file_->NumFieldIds(), dex_cache->NumResolvedFields()); } } // namespace mirror diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h index e35ddccd0d..90180c545b 100644 --- a/runtime/mirror/object-inl.h +++ b/runtime/mirror/object-inl.h @@ -27,6 +27,7 @@ #include "class_flags.h" #include "class_linker.h" #include "class_loader-inl.h" +#include "dex_cache-inl.h" #include "lock_word-inl.h" #include "monitor.h" #include "object_array-inl.h" @@ -1006,6 +1007,17 @@ inline mirror::ClassLoader* Object::AsClassLoader() { return down_cast<mirror::ClassLoader*>(this); } +template<VerifyObjectFlags kVerifyFlags> +inline bool Object::IsDexCache() { + return GetClass<kVerifyFlags>()->IsDexCacheClass(); +} + +template<VerifyObjectFlags kVerifyFlags> +inline mirror::DexCache* Object::AsDexCache() { + DCHECK(IsDexCache<kVerifyFlags>()); + return down_cast<mirror::DexCache*>(this); +} + template <VerifyObjectFlags kVerifyFlags, typename Visitor, typename JavaLangRefVisitor> inline void Object::VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor) { @@ -1031,6 +1043,9 @@ inline void Object::VisitReferences(const Visitor& visitor, } else if ((class_flags & kClassFlagReference) != 0) { VisitInstanceFieldsReferences(klass, visitor); ref_visitor(klass, AsReference()); + } else if (class_flags == kClassFlagDexCache) { + mirror::DexCache* const dex_cache = AsDexCache<kVerifyFlags>(); + dex_cache->VisitReferences<kVerifyFlags>(klass, visitor); } else { mirror::ClassLoader* const class_loader = AsClassLoader<kVerifyFlags>(); class_loader->VisitReferences<kVerifyFlags>(klass, visitor); diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h index 3cec29cd43..50490bbcae 100644 --- a/runtime/mirror/object.h +++ b/runtime/mirror/object.h @@ -38,6 +38,7 @@ namespace mirror { class Array; class Class; class ClassLoader; +class DexCache; class FinalizerReference; template<class T> class ObjectArray; template<class T> class PrimitiveArray; @@ -162,6 +163,11 @@ class MANAGED LOCKABLE Object { template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> ClassLoader* AsClassLoader() SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + bool IsDexCache() SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> + DexCache* AsDexCache() SHARED_REQUIRES(Locks::mutator_lock_); + template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, ReadBarrierOption kReadBarrierOption = kWithReadBarrier> bool IsArrayInstance() SHARED_REQUIRES(Locks::mutator_lock_); diff --git a/runtime/native/java_lang_DexCache.cc b/runtime/native/java_lang_DexCache.cc index a2d9797114..994ccb1ad9 100644 --- a/runtime/native/java_lang_DexCache.cc +++ b/runtime/native/java_lang_DexCache.cc @@ -52,12 +52,14 @@ static jobject DexCache_getDexNative(JNIEnv* env, jobject javaDexCache) { static jobject DexCache_getResolvedType(JNIEnv* env, jobject javaDexCache, jint type_index) { ScopedFastNativeObjectAccess soa(env); mirror::DexCache* dex_cache = soa.Decode<mirror::DexCache*>(javaDexCache); + CHECK_LT(static_cast<size_t>(type_index), dex_cache->NumResolvedTypes()); return soa.AddLocalReference<jobject>(dex_cache->GetResolvedType(type_index)); } static jobject DexCache_getResolvedString(JNIEnv* env, jobject javaDexCache, jint string_index) { ScopedFastNativeObjectAccess soa(env); mirror::DexCache* dex_cache = soa.Decode<mirror::DexCache*>(javaDexCache); + CHECK_LT(static_cast<size_t>(string_index), dex_cache->NumStrings()); return soa.AddLocalReference<jobject>(dex_cache->GetResolvedString(string_index)); } @@ -65,6 +67,7 @@ static void DexCache_setResolvedType(JNIEnv* env, jobject javaDexCache, jint typ jobject type) { ScopedFastNativeObjectAccess soa(env); mirror::DexCache* dex_cache = soa.Decode<mirror::DexCache*>(javaDexCache); + CHECK_LT(static_cast<size_t>(type_index), dex_cache->NumResolvedTypes()); dex_cache->SetResolvedType(type_index, soa.Decode<mirror::Class*>(type)); } @@ -72,6 +75,7 @@ static void DexCache_setResolvedString(JNIEnv* env, jobject javaDexCache, jint s jobject string) { ScopedFastNativeObjectAccess soa(env); mirror::DexCache* dex_cache = soa.Decode<mirror::DexCache*>(javaDexCache); + CHECK_LT(static_cast<size_t>(string_index), dex_cache->NumStrings()); dex_cache->SetResolvedString(string_index, soa.Decode<mirror::String*>(string)); } diff --git a/runtime/reflection.cc b/runtime/reflection.cc index 2fe1e64fe7..019917c077 100644 --- a/runtime/reflection.cc +++ b/runtime/reflection.cc @@ -223,8 +223,11 @@ class ArgArray { for (size_t i = 1, args_offset = 0; i < shorty_len_; ++i, ++args_offset) { mirror::Object* arg = args->Get(args_offset); if (((shorty_[i] == 'L') && (arg != nullptr)) || ((arg == nullptr && shorty_[i] != 'L'))) { + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); mirror::Class* dst_class = - m->GetClassFromTypeIndex(classes->GetTypeItem(args_offset).type_idx_, true); + m->GetClassFromTypeIndex(classes->GetTypeItem(args_offset).type_idx_, + true /* resolve */, + pointer_size); if (UNLIKELY(arg == nullptr || !arg->InstanceOf(dst_class))) { ThrowIllegalArgumentException( StringPrintf("method %s argument %zd has type %s, got %s", @@ -356,9 +359,12 @@ static void CheckMethodArguments(JavaVMExt* vm, ArtMethod* m, uint32_t* args) } // TODO: If args contain object references, it may cause problems. Thread* const self = Thread::Current(); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); for (uint32_t i = 0; i < num_params; i++) { uint16_t type_idx = params->GetTypeItem(i).type_idx_; - mirror::Class* param_type = m->GetClassFromTypeIndex(type_idx, true); + mirror::Class* param_type = m->GetClassFromTypeIndex(type_idx, + true /* resolve*/, + pointer_size); if (param_type == nullptr) { CHECK(self->IsExceptionPending()); LOG(ERROR) << "Internal error: unresolvable type for argument type in JNI invoke: " diff --git a/compiler/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h index fec981a03c..4f662d5a8f 100644 --- a/compiler/utils/dex_cache_arrays_layout-inl.h +++ b/runtime/utils/dex_cache_arrays_layout-inl.h @@ -14,35 +14,52 @@ * limitations under the License. */ -#ifndef ART_COMPILER_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_ -#define ART_COMPILER_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_ +#ifndef ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_ +#define ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_ #include "dex_cache_arrays_layout.h" #include "base/bit_utils.h" #include "base/logging.h" +#include "gc_root.h" #include "globals.h" -#include "mirror/array-inl.h" #include "primitive.h" namespace art { inline DexCacheArraysLayout::DexCacheArraysLayout(size_t pointer_size, const DexFile* dex_file) - : /* types_offset_ is always 0u */ - pointer_size_(pointer_size), - methods_offset_(types_offset_ + TypesSize(dex_file->NumTypeIds())), - strings_offset_(methods_offset_ + MethodsSize(dex_file->NumMethodIds())), - fields_offset_(strings_offset_ + StringsSize(dex_file->NumStringIds())), - size_(fields_offset_ + FieldsSize(dex_file->NumFieldIds())) { + : pointer_size_(pointer_size), + /* types_offset_ is always 0u, so it's constexpr */ + methods_offset_(types_offset_ + + RoundUp(TypesSize(dex_file->NumTypeIds()), MethodsAlignment())), + strings_offset_(methods_offset_ + + RoundUp(MethodsSize(dex_file->NumMethodIds()), StringsAlignment())), + fields_offset_(strings_offset_ + + RoundUp(StringsSize(dex_file->NumStringIds()), FieldsAlignment())), + size_(fields_offset_ + + RoundUp(FieldsSize(dex_file->NumFieldIds()), Alignment())) { DCHECK(ValidPointerSize(pointer_size)) << pointer_size; } +inline size_t DexCacheArraysLayout::Alignment() const { + // GcRoot<> alignment is 4, i.e. lower than or equal to the pointer alignment. + static_assert(alignof(GcRoot<mirror::Class>) == 4, "Expecting alignof(GcRoot<>) == 4"); + static_assert(alignof(GcRoot<mirror::String>) == 4, "Expecting alignof(GcRoot<>) == 4"); + DCHECK(pointer_size_ == 4u || pointer_size_ == 8u); + // Pointer alignment is the same as pointer size. + return pointer_size_; +} + inline size_t DexCacheArraysLayout::TypeOffset(uint32_t type_idx) const { - return types_offset_ + ElementOffset(sizeof(mirror::HeapReference<mirror::Class>), type_idx); + return types_offset_ + ElementOffset(sizeof(GcRoot<mirror::Class>), type_idx); } inline size_t DexCacheArraysLayout::TypesSize(size_t num_elements) const { - return ArraySize(sizeof(mirror::HeapReference<mirror::Class>), num_elements); + return ArraySize(sizeof(GcRoot<mirror::Class>), num_elements); +} + +inline size_t DexCacheArraysLayout::TypesAlignment() const { + return alignof(GcRoot<mirror::Class>); } inline size_t DexCacheArraysLayout::MethodOffset(uint32_t method_idx) const { @@ -53,12 +70,20 @@ inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const { return ArraySize(pointer_size_, num_elements); } +inline size_t DexCacheArraysLayout::MethodsAlignment() const { + return pointer_size_; +} + inline size_t DexCacheArraysLayout::StringOffset(uint32_t string_idx) const { - return strings_offset_ + ElementOffset(sizeof(mirror::HeapReference<mirror::String>), string_idx); + return strings_offset_ + ElementOffset(sizeof(GcRoot<mirror::String>), string_idx); } inline size_t DexCacheArraysLayout::StringsSize(size_t num_elements) const { - return ArraySize(sizeof(mirror::HeapReference<mirror::String>), num_elements); + return ArraySize(sizeof(GcRoot<mirror::String>), num_elements); +} + +inline size_t DexCacheArraysLayout::StringsAlignment() const { + return alignof(GcRoot<mirror::String>); } inline size_t DexCacheArraysLayout::FieldOffset(uint32_t field_idx) const { @@ -69,16 +94,18 @@ inline size_t DexCacheArraysLayout::FieldsSize(size_t num_elements) const { return ArraySize(pointer_size_, num_elements); } +inline size_t DexCacheArraysLayout::FieldsAlignment() const { + return pointer_size_; +} + inline size_t DexCacheArraysLayout::ElementOffset(size_t element_size, uint32_t idx) { - return mirror::Array::DataOffset(element_size).Uint32Value() + element_size * idx; + return element_size * idx; } inline size_t DexCacheArraysLayout::ArraySize(size_t element_size, uint32_t num_elements) { - size_t array_size = mirror::ComputeArraySize(num_elements, ComponentSizeShiftWidth(element_size)); - DCHECK_NE(array_size, 0u); // No overflow expected for dex cache arrays. - return RoundUp(array_size, kObjectAlignment); + return element_size * num_elements; } } // namespace art -#endif // ART_COMPILER_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_ +#endif // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_ diff --git a/compiler/utils/dex_cache_arrays_layout.h b/runtime/utils/dex_cache_arrays_layout.h index 2a109bd11e..d50be5ac03 100644 --- a/compiler/utils/dex_cache_arrays_layout.h +++ b/runtime/utils/dex_cache_arrays_layout.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ART_COMPILER_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_ -#define ART_COMPILER_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_ +#ifndef ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_ +#define ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_ namespace art { @@ -47,6 +47,8 @@ class DexCacheArraysLayout { return size_; } + size_t Alignment() const; + size_t TypesOffset() const { return types_offset_; } @@ -55,6 +57,8 @@ class DexCacheArraysLayout { size_t TypesSize(size_t num_elements) const; + size_t TypesAlignment() const; + size_t MethodsOffset() const { return methods_offset_; } @@ -63,6 +67,8 @@ class DexCacheArraysLayout { size_t MethodsSize(size_t num_elements) const; + size_t MethodsAlignment() const; + size_t StringsOffset() const { return strings_offset_; } @@ -71,6 +77,8 @@ class DexCacheArraysLayout { size_t StringsSize(size_t num_elements) const; + size_t StringsAlignment() const; + size_t FieldsOffset() const { return fields_offset_; } @@ -79,6 +87,8 @@ class DexCacheArraysLayout { size_t FieldsSize(size_t num_elements) const; + size_t FieldsAlignment() const; + private: static constexpr size_t types_offset_ = 0u; const size_t pointer_size_; // Must be first for construction initialization order. @@ -87,6 +97,8 @@ class DexCacheArraysLayout { const size_t fields_offset_; const size_t size_; + static size_t Alignment(size_t pointer_size); + static size_t ElementOffset(size_t element_size, uint32_t idx); static size_t ArraySize(size_t element_size, uint32_t num_elements); @@ -94,4 +106,4 @@ class DexCacheArraysLayout { } // namespace art -#endif // ART_COMPILER_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_ +#endif // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_ diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc index 4f921bdbc8..35cc4e3db9 100644 --- a/runtime/verifier/method_verifier.cc +++ b/runtime/verifier/method_verifier.cc @@ -2541,8 +2541,9 @@ bool MethodVerifier::CodeFlowVerifyInstruction(uint32_t* start_guess) { ArtMethod* called_method = VerifyInvocationArgs(inst, METHOD_VIRTUAL, is_range, is_super); const RegType* return_type = nullptr; if (called_method != nullptr) { - StackHandleScope<1> hs(self_); - mirror::Class* return_type_class = called_method->GetReturnType(can_load_classes_); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + mirror::Class* return_type_class = called_method->GetReturnType(can_load_classes_, + pointer_size); if (return_type_class != nullptr) { return_type = &FromClass(called_method->GetReturnTypeDescriptor(), return_type_class, @@ -2583,8 +2584,9 @@ bool MethodVerifier::CodeFlowVerifyInstruction(uint32_t* start_guess) { } else { is_constructor = called_method->IsConstructor(); return_type_descriptor = called_method->GetReturnTypeDescriptor(); - StackHandleScope<1> hs(self_); - mirror::Class* return_type_class = called_method->GetReturnType(can_load_classes_); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + mirror::Class* return_type_class = called_method->GetReturnType(can_load_classes_, + pointer_size); if (return_type_class != nullptr) { return_type = &FromClass(return_type_descriptor, return_type_class, @@ -4494,7 +4496,9 @@ InstructionFlags* MethodVerifier::CurrentInsnFlags() { const RegType& MethodVerifier::GetMethodReturnType() { if (return_type_ == nullptr) { if (mirror_method_ != nullptr) { - mirror::Class* return_type_class = mirror_method_->GetReturnType(can_load_classes_); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + mirror::Class* return_type_class = mirror_method_->GetReturnType(can_load_classes_, + pointer_size); if (return_type_class != nullptr) { return_type_ = &FromClass(mirror_method_->GetReturnTypeDescriptor(), return_type_class, diff --git a/test/497-inlining-and-class-loader/clear_dex_cache.cc b/test/497-inlining-and-class-loader/clear_dex_cache.cc index f9b33a2874..50d1a6361d 100644 --- a/test/497-inlining-and-class-loader/clear_dex_cache.cc +++ b/test/497-inlining-and-class-loader/clear_dex_cache.cc @@ -24,20 +24,45 @@ namespace art { namespace { -extern "C" JNIEXPORT jobject JNICALL Java_Main_cloneResolvedMethods(JNIEnv*, jclass, jclass cls) { +extern "C" JNIEXPORT jobject JNICALL Java_Main_cloneResolvedMethods(JNIEnv* env, + jclass, + jclass cls) { ScopedObjectAccess soa(Thread::Current()); - return soa.Vm()->AddGlobalRef( - soa.Self(), - soa.Decode<mirror::Class*>(cls)->GetDexCache()->GetResolvedMethods()->Clone(soa.Self())); + mirror::DexCache* dex_cache = soa.Decode<mirror::Class*>(cls)->GetDexCache(); + size_t num_methods = dex_cache->NumResolvedMethods(); + ArtMethod** methods = dex_cache->GetResolvedMethods(); + CHECK_EQ(num_methods != 0u, methods != nullptr); + if (num_methods == 0u) { + return nullptr; + } + jarray array; + if (sizeof(void*) == 4) { + array = env->NewIntArray(num_methods); + } else { + array = env->NewLongArray(num_methods); + } + CHECK(array != nullptr); + mirror::PointerArray* pointer_array = soa.Decode<mirror::PointerArray*>(array); + for (size_t i = 0; i != num_methods; ++i) { + ArtMethod* method = mirror::DexCache::GetElementPtrSize(methods, i, sizeof(void*)); + pointer_array->SetElementPtrSize(i, method, sizeof(void*)); + } + return array; } extern "C" JNIEXPORT void JNICALL Java_Main_restoreResolvedMethods( JNIEnv*, jclass, jclass cls, jobject old_cache) { ScopedObjectAccess soa(Thread::Current()); - mirror::PointerArray* now = soa.Decode<mirror::Class*>(cls)->GetDexCache()->GetResolvedMethods(); + mirror::DexCache* dex_cache = soa.Decode<mirror::Class*>(cls)->GetDexCache(); + size_t num_methods = dex_cache->NumResolvedMethods(); + ArtMethod** methods = soa.Decode<mirror::Class*>(cls)->GetDexCache()->GetResolvedMethods(); + CHECK_EQ(num_methods != 0u, methods != nullptr); mirror::PointerArray* old = soa.Decode<mirror::PointerArray*>(old_cache); - for (size_t i = 0, e = old->GetLength(); i < e; ++i) { - now->SetElementPtrSize(i, old->GetElementPtrSize<void*>(i, sizeof(void*)), sizeof(void*)); + CHECK_EQ(methods != nullptr, old != nullptr); + CHECK_EQ(num_methods, static_cast<size_t>(old->GetLength())); + for (size_t i = 0; i != num_methods; ++i) { + ArtMethod* method = old->GetElementPtrSize<ArtMethod*>(i, sizeof(void*)); + mirror::DexCache::SetElementPtrSize(methods, i, method, sizeof(void*)); } } |