diff options
-rw-r--r-- | dex2oat/linker/image_writer.cc | 48 | ||||
-rw-r--r-- | dex2oat/linker/image_writer.h | 4 | ||||
-rw-r--r-- | openjdkjvmti/ti_redefine.cc | 4 | ||||
-rw-r--r-- | runtime/art_method.h | 4 | ||||
-rw-r--r-- | runtime/base/locks.cc | 5 | ||||
-rw-r--r-- | runtime/base/locks.h | 3 | ||||
-rw-r--r-- | runtime/class_linker-inl.h | 58 | ||||
-rw-r--r-- | runtime/class_linker.cc | 26 | ||||
-rw-r--r-- | runtime/class_linker.h | 5 | ||||
-rw-r--r-- | runtime/class_linker_test.cc | 5 | ||||
-rw-r--r-- | runtime/interpreter/mterp/mterp.cc | 7 | ||||
-rw-r--r-- | runtime/mirror/dex_cache-inl.h | 217 | ||||
-rw-r--r-- | runtime/mirror/dex_cache.cc | 167 | ||||
-rw-r--r-- | runtime/mirror/dex_cache.h | 66 | ||||
-rw-r--r-- | runtime/mirror/dex_cache_test.cc | 41 |
15 files changed, 190 insertions, 470 deletions
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc index d8973d4eda..f76190c1bf 100644 --- a/dex2oat/linker/image_writer.cc +++ b/dex2oat/linker/image_writer.cc @@ -1109,49 +1109,6 @@ void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) { Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor); } -void ImageWriter::ClearDexCache(ObjPtr<mirror::DexCache> dex_cache) { - // Clear methods. - mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods(); - for (size_t slot_idx = 0, num = dex_cache->NumResolvedMethods(); slot_idx != num; ++slot_idx) { - mirror::MethodDexCachePair invalid(nullptr, - mirror::MethodDexCachePair::InvalidIndexForSlot(slot_idx)); - mirror::DexCache::SetNativePair(resolved_methods, slot_idx, invalid); - } - // Clear fields. - mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields(); - for (size_t slot_idx = 0, num = dex_cache->NumResolvedFields(); slot_idx != num; ++slot_idx) { - mirror::FieldDexCachePair invalid(nullptr, - mirror::FieldDexCachePair::InvalidIndexForSlot(slot_idx)); - mirror::DexCache::SetNativePair(resolved_fields, slot_idx, invalid); - } - // Clear types. - mirror::TypeDexCacheType* resolved_types = dex_cache->GetResolvedTypes(); - for (size_t slot_idx = 0, num = dex_cache->NumResolvedTypes(); slot_idx != num; ++slot_idx) { - mirror::TypeDexCachePair invalid(nullptr, - mirror::TypeDexCachePair::InvalidIndexForSlot(slot_idx)); - resolved_types[slot_idx].store(invalid, std::memory_order_relaxed); - } - // Clear strings. - mirror::StringDexCacheType* resolved_strings = dex_cache->GetStrings(); - for (size_t slot_idx = 0, num = dex_cache->NumStrings(); slot_idx != num; ++slot_idx) { - mirror::StringDexCachePair invalid(nullptr, - mirror::StringDexCachePair::InvalidIndexForSlot(slot_idx)); - resolved_strings[slot_idx].store(invalid, std::memory_order_relaxed); - } - // Clear method types. - mirror::MethodTypeDexCacheType* resolved_method_types = dex_cache->GetResolvedMethodTypes(); - size_t num_resolved_method_types = dex_cache->NumResolvedMethodTypes(); - for (size_t slot_idx = 0; slot_idx != num_resolved_method_types; ++slot_idx) { - mirror::MethodTypeDexCachePair invalid( - nullptr, mirror::MethodTypeDexCachePair::InvalidIndexForSlot(slot_idx)); - resolved_method_types[slot_idx].store(invalid, std::memory_order_relaxed); - } - // Clear call sites. - std::fill_n(dex_cache->GetResolvedCallSites(), - dex_cache->NumResolvedCallSites(), - GcRoot<mirror::CallSite>(nullptr)); -} - void ImageWriter::PruneNonImageClasses() { Runtime* runtime = Runtime::Current(); ClassLinker* class_linker = runtime->GetClassLinker(); @@ -1185,7 +1142,7 @@ void ImageWriter::PruneNonImageClasses() { // Completely clear DexCaches. std::vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self); for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) { - ClearDexCache(dex_cache); + dex_cache->ResetNativeArrays(); } // Drop the array class cache in the ClassLinker, as these are roots holding those classes live. @@ -3146,7 +3103,8 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) { ArtMethod* src_method = src->GetArtMethod(); CopyAndFixupPointer(dest, mirror::Executable::ArtMethodOffset(), src_method); } else if (klass == GetClassRoot<mirror::DexCache>(class_roots)) { - down_cast<mirror::DexCache*>(copy)->ResetNativeFields(); + down_cast<mirror::DexCache*>(copy)->SetDexFile(nullptr); + down_cast<mirror::DexCache*>(copy)->ResetNativeArrays(); } else if (klass->IsClassLoaderClass()) { mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy); // If src is a ClassLoader, set the class table to null so that it gets recreated by the diff --git a/dex2oat/linker/image_writer.h b/dex2oat/linker/image_writer.h index 1321ced67d..a0c2665952 100644 --- a/dex2oat/linker/image_writer.h +++ b/dex2oat/linker/image_writer.h @@ -446,10 +446,6 @@ class ImageWriter final { // Remove unwanted classes from various roots. void PruneNonImageClasses() REQUIRES_SHARED(Locks::mutator_lock_); - // Remove everything from the DexCache. - void ClearDexCache(ObjPtr<mirror::DexCache> dex_cache) - REQUIRES_SHARED(Locks::mutator_lock_); - // Find dex caches for pruning or preloading. std::vector<ObjPtr<mirror::DexCache>> FindDexCaches(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc index 12d00ad052..3e841e76e9 100644 --- a/openjdkjvmti/ti_redefine.cc +++ b/openjdkjvmti/ti_redefine.cc @@ -759,9 +759,7 @@ art::mirror::DexCache* Redefiner::ClassRedefinition::CreateNewDexCache( } art::WriterMutexLock mu(driver_->self_, *art::Locks::dex_lock_); cache->SetLocation(location.Get()); - cache->InitializeNativeFields(dex_file_.get(), - loader.IsNull() ? driver_->runtime_->GetLinearAlloc() - : loader->GetAllocator()); + cache->Initialize(dex_file_.get(), loader.Get()); return cache.Get(); } diff --git a/runtime/art_method.h b/runtime/art_method.h index 31b81d40df..2469d089ce 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -63,10 +63,6 @@ class Object; template <typename MirrorType> class ObjectArray; class PointerArray; class String; - -template <typename T> struct NativeDexCachePair; -using MethodDexCachePair = NativeDexCachePair<ArtMethod>; -using MethodDexCacheType = std::atomic<MethodDexCachePair>; } // namespace mirror class ArtMethod final { diff --git a/runtime/base/locks.cc b/runtime/base/locks.cc index 7404d0d4fc..e53007316a 100644 --- a/runtime/base/locks.cc +++ b/runtime/base/locks.cc @@ -74,6 +74,7 @@ Mutex* Locks::user_code_suspension_lock_ = nullptr; Uninterruptible Roles::uninterruptible_; ReaderWriterMutex* Locks::jni_globals_lock_ = nullptr; Mutex* Locks::jni_weak_globals_lock_ = nullptr; +Mutex* Locks::dex_cache_lock_ = nullptr; ReaderWriterMutex* Locks::dex_lock_ = nullptr; Mutex* Locks::native_debug_interface_lock_ = nullptr; ReaderWriterMutex* Locks::jni_id_lock_ = nullptr; @@ -250,6 +251,10 @@ void Locks::Init() { DCHECK(dex_lock_ == nullptr); dex_lock_ = new ReaderWriterMutex("ClassLinker dex lock", current_lock_level); + UPDATE_CURRENT_LOCK_LEVEL(kDexCacheLock); + DCHECK(dex_cache_lock_ == nullptr); + dex_cache_lock_ = new Mutex("DexCache lock", current_lock_level); + UPDATE_CURRENT_LOCK_LEVEL(kOatFileManagerLock); DCHECK(oat_file_manager_lock_ == nullptr); oat_file_manager_lock_ = new ReaderWriterMutex("OatFile manager lock", current_lock_level); diff --git a/runtime/base/locks.h b/runtime/base/locks.h index 7008539154..25e503cbdc 100644 --- a/runtime/base/locks.h +++ b/runtime/base/locks.h @@ -97,6 +97,7 @@ enum LockLevel : uint8_t { kTracingStreamingLock, kClassLoaderClassesLock, kDefaultMutexLevel, + kDexCacheLock, kDexLock, kMarkSweepLargeObjectLock, kJdwpObjectRegistryLock, @@ -290,6 +291,8 @@ class Locks { static ReaderWriterMutex* dex_lock_ ACQUIRED_AFTER(modify_ldt_lock_); + static Mutex* dex_cache_lock_ ACQUIRED_AFTER(dex_lock_); + // Guards opened oat files in OatFileManager. static ReaderWriterMutex* oat_file_manager_lock_ ACQUIRED_AFTER(dex_lock_); diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h index 69f5a77a25..f45ccb5929 100644 --- a/runtime/class_linker-inl.h +++ b/runtime/class_linker-inl.h @@ -70,10 +70,7 @@ inline ObjPtr<mirror::String> ClassLinker::ResolveString(dex::StringIndex string ArtField* referrer) { Thread::PoisonObjectPointersIfDebug(); DCHECK(!Thread::Current()->IsExceptionPending()); - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. - ObjPtr<mirror::String> resolved = - referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedString(string_idx); + ObjPtr<mirror::String> resolved = referrer->GetDexCache()->GetResolvedString(string_idx); if (resolved == nullptr) { resolved = DoResolveString(string_idx, referrer->GetDexCache()); } @@ -84,10 +81,7 @@ inline ObjPtr<mirror::String> ClassLinker::ResolveString(dex::StringIndex string ArtMethod* referrer) { Thread::PoisonObjectPointersIfDebug(); DCHECK(!Thread::Current()->IsExceptionPending()); - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. - ObjPtr<mirror::String> resolved = - referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedString(string_idx); + ObjPtr<mirror::String> resolved = referrer->GetDexCache()->GetResolvedString(string_idx); if (resolved == nullptr) { resolved = DoResolveString(string_idx, referrer->GetDexCache()); } @@ -122,10 +116,8 @@ inline ObjPtr<mirror::Class> ClassLinker::ResolveType(dex::TypeIndex type_idx, Thread::Current()->PoisonObjectPointers(); } DCHECK(!Thread::Current()->IsExceptionPending()); - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. ObjPtr<mirror::Class> resolved_type = - referrer->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetResolvedType(type_idx); + referrer->GetDexCache<kDefaultVerifyFlags>()->GetResolvedType(type_idx); if (resolved_type == nullptr) { resolved_type = DoResolveType(type_idx, referrer); } @@ -136,10 +128,7 @@ inline ObjPtr<mirror::Class> ClassLinker::ResolveType(dex::TypeIndex type_idx, ArtField* referrer) { Thread::PoisonObjectPointersIfDebug(); DCHECK(!Thread::Current()->IsExceptionPending()); - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. - ObjPtr<mirror::Class> resolved_type = - referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedType(type_idx); + ObjPtr<mirror::Class> resolved_type = referrer->GetDexCache()->GetResolvedType(type_idx); if (UNLIKELY(resolved_type == nullptr)) { resolved_type = DoResolveType(type_idx, referrer); } @@ -150,10 +139,7 @@ inline ObjPtr<mirror::Class> ClassLinker::ResolveType(dex::TypeIndex type_idx, ArtMethod* referrer) { Thread::PoisonObjectPointersIfDebug(); DCHECK(!Thread::Current()->IsExceptionPending()); - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. - ObjPtr<mirror::Class> resolved_type = - referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedType(type_idx); + ObjPtr<mirror::Class> resolved_type = referrer->GetDexCache()->GetResolvedType(type_idx); if (UNLIKELY(resolved_type == nullptr)) { resolved_type = DoResolveType(type_idx, referrer); } @@ -174,10 +160,8 @@ inline ObjPtr<mirror::Class> ClassLinker::ResolveType(dex::TypeIndex type_idx, inline ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(dex::TypeIndex type_idx, ObjPtr<mirror::Class> referrer) { - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. ObjPtr<mirror::Class> type = - referrer->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetResolvedType(type_idx); + referrer->GetDexCache<kDefaultVerifyFlags>()->GetResolvedType(type_idx); if (type == nullptr) { type = DoLookupResolvedType(type_idx, referrer); } @@ -186,10 +170,7 @@ inline ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(dex::TypeIndex type inline ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(dex::TypeIndex type_idx, ArtField* referrer) { - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. - ObjPtr<mirror::Class> type = - referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedType(type_idx); + ObjPtr<mirror::Class> type = referrer->GetDexCache()->GetResolvedType(type_idx); if (type == nullptr) { type = DoLookupResolvedType(type_idx, referrer->GetDeclaringClass()); } @@ -198,10 +179,7 @@ inline ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(dex::TypeIndex type inline ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(dex::TypeIndex type_idx, ArtMethod* referrer) { - // We do not need the read barrier for getting the DexCache for the initial resolved type - // lookup as both from-space and to-space copies point to the same native resolved types array. - ObjPtr<mirror::Class> type = - referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedType(type_idx); + ObjPtr<mirror::Class> type = referrer->GetDexCache()->GetResolvedType(type_idx); if (type == nullptr) { type = DoLookupResolvedType(type_idx, referrer->GetDeclaringClass()); } @@ -307,10 +285,7 @@ inline ArtMethod* ClassLinker::GetResolvedMethod(uint32_t method_idx, ArtMethod* // lookup in the context of the original method from where it steals the code. // However, we delay the GetInterfaceMethodIfProxy() until needed. DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor()); - // We do not need the read barrier for getting the DexCache for the initial resolved method - // lookup as both from-space and to-space copies point to the same native resolved methods array. - ArtMethod* resolved_method = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedMethod( - method_idx); + ArtMethod* resolved_method = referrer->GetDexCache()->GetResolvedMethod(method_idx); if (resolved_method == nullptr) { return nullptr; } @@ -350,10 +325,7 @@ inline ArtMethod* ClassLinker::ResolveMethod(Thread* self, // However, we delay the GetInterfaceMethodIfProxy() until needed. DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor()); Thread::PoisonObjectPointersIfDebug(); - // We do not need the read barrier for getting the DexCache for the initial resolved method - // lookup as both from-space and to-space copies point to the same native resolved methods array. - ArtMethod* resolved_method = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedMethod( - method_idx); + ArtMethod* resolved_method = referrer->GetDexCache()->GetResolvedMethod(method_idx); DCHECK(resolved_method == nullptr || !resolved_method->IsRuntimeMethod()); if (UNLIKELY(resolved_method == nullptr)) { referrer = referrer->GetInterfaceMethodIfProxy(image_pointer_size_); @@ -418,10 +390,7 @@ inline ArtMethod* ClassLinker::ResolveMethod(Thread* self, inline ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx, ArtMethod* referrer, bool is_static) { - // We do not need the read barrier for getting the DexCache for the initial resolved field - // lookup as both from-space and to-space copies point to the same native resolved fields array. - ArtField* field = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedField( - field_idx); + ArtField* field = referrer->GetDexCache()->GetResolvedField(field_idx); if (field == nullptr) { ObjPtr<mirror::ClassLoader> class_loader = referrer->GetDeclaringClass()->GetClassLoader(); field = LookupResolvedField(field_idx, referrer->GetDexCache(), class_loader, is_static); @@ -433,10 +402,7 @@ inline ArtField* ClassLinker::ResolveField(uint32_t field_idx, ArtMethod* referrer, bool is_static) { Thread::PoisonObjectPointersIfDebug(); - // We do not need the read barrier for getting the DexCache for the initial resolved field - // lookup as both from-space and to-space copies point to the same native resolved fields array. - ArtField* resolved_field = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedField( - field_idx); + ArtField* resolved_field = referrer->GetDexCache()->GetResolvedField(field_idx); if (UNLIKELY(resolved_field == nullptr)) { StackHandleScope<2> hs(Thread::Current()); ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass(); diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index b98708ebe5..0fe8caa49e 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1937,13 +1937,10 @@ bool ClassLinker::AddImageSpace( return false; } - LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get()); - DCHECK(linear_alloc != nullptr); - DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image); { - // Native fields are all null. Initialize them and allocate native memory. + // Native fields are all null. Initialize them. WriterMutexLock mu(self, *Locks::dex_lock_); - dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc); + dex_cache->Initialize(dex_file.get(), class_loader.Get()); } if (!app_image) { // Register dex files, keep track of existing ones that are conflicts. @@ -2404,13 +2401,14 @@ ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& return dex_cache.Get(); } -ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self, - const DexFile& dex_file, - LinearAlloc* linear_alloc) { +ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache( + Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) { + StackHandleScope<1> hs(self); + Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader)); ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file); if (dex_cache != nullptr) { WriterMutexLock mu(self, *Locks::dex_lock_); - dex_cache->InitializeNativeFields(&dex_file, linear_alloc); + dex_cache->Initialize(&dex_file, h_class_loader.Get()); } return dex_cache; } @@ -3845,10 +3843,8 @@ void ClassLinker::LoadMethod(const DexFile& dex_file, } void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) { - ObjPtr<mirror::DexCache> dex_cache = AllocAndInitializeDexCache( - self, - *dex_file, - Runtime::Current()->GetLinearAlloc()); + ObjPtr<mirror::DexCache> dex_cache = + AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr); CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation(); AppendToBootClassPath(dex_file, dex_cache); } @@ -4038,10 +4034,10 @@ ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file, const DexCacheData* old_data = FindDexCacheDataLocked(dex_file); old_dex_cache = DecodeDexCacheLocked(self, old_data); if (old_dex_cache == nullptr && h_dex_cache != nullptr) { - // Do InitializeNativeFields while holding dex lock to make sure two threads don't call it + // Do Initialize while holding dex lock to make sure two threads don't call it // at the same time with the same dex cache. Since the .bss is shared this can cause failing // DCHECK that the arrays are null. - h_dex_cache->InitializeNativeFields(&dex_file, linear_alloc); + h_dex_cache->Initialize(&dex_file, h_class_loader.Get()); RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get()); } if (old_dex_cache != nullptr) { diff --git a/runtime/class_linker.h b/runtime/class_linker.h index 95dc6cfaff..d1212a3e77 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -93,9 +93,6 @@ class MethodHandlesLookup; class MethodType; template<class T> class ObjectArray; class StackTraceElement; -template <typename T> struct NativeDexCachePair; -using MethodDexCachePair = NativeDexCachePair<ArtMethod>; -using MethodDexCacheType = std::atomic<MethodDexCachePair>; } // namespace mirror class ClassVisitor { @@ -952,7 +949,7 @@ class ClassLinker { // Used for tests and AppendToBootClassPath. ObjPtr<mirror::DexCache> AllocAndInitializeDexCache(Thread* self, const DexFile& dex_file, - LinearAlloc* linear_alloc) + ObjPtr<mirror::ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::dex_lock_) REQUIRES(!Roles::uninterruptible_); diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index c561c4d591..53908d8c0b 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -1544,8 +1544,9 @@ TEST_F(ClassLinkerTest, RegisterDexFileName) { nullptr, nullptr)); // Make a copy of the dex cache with changed name. - LinearAlloc* alloc = Runtime::Current()->GetLinearAlloc(); - dex_cache.Assign(class_linker->AllocAndInitializeDexCache(Thread::Current(), *dex_file, alloc)); + dex_cache.Assign(class_linker->AllocAndInitializeDexCache(Thread::Current(), + *dex_file, + /* class_loader= */ nullptr)); DCHECK_EQ(dex_cache->GetLocation()->CompareTo(location.Get()), 0); { WriterMutexLock mu(soa.Self(), *Locks::dex_lock_); diff --git a/runtime/interpreter/mterp/mterp.cc b/runtime/interpreter/mterp/mterp.cc index 5927c92555..c8506a09f6 100644 --- a/runtime/interpreter/mterp/mterp.cc +++ b/runtime/interpreter/mterp/mterp.cc @@ -763,11 +763,8 @@ ALWAYS_INLINE bool MterpFieldAccessFast(Instruction* inst, // This effectively inlines the fast path from ArtMethod::GetDexCache. ArtMethod* referrer = shadow_frame->GetMethod(); if (LIKELY(!referrer->IsObsolete() && !do_access_checks)) { - // Avoid read barriers, since we need only the pointer to the native (non-movable) - // DexCache field array which we can get even through from-space objects. - ObjPtr<mirror::Class> klass = referrer->GetDeclaringClass<kWithoutReadBarrier>(); - ObjPtr<mirror::DexCache> dex_cache = - klass->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>(); + ObjPtr<mirror::Class> klass = referrer->GetDeclaringClass(); + ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache<kDefaultVerifyFlags>(); // Try to find the desired field in DexCache. uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c(); diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h index 1ca42024f4..730bc478be 100644 --- a/runtime/mirror/dex_cache-inl.h +++ b/runtime/mirror/dex_cache-inl.h @@ -28,6 +28,7 @@ #include "class_linker.h" #include "dex/dex_file.h" #include "gc_root-inl.h" +#include "linear_alloc.h" #include "mirror/call_site.h" #include "mirror/class.h" #include "mirror/method_type.h" @@ -41,6 +42,38 @@ namespace art { namespace mirror { +template<typename DexCachePair> +static void InitializeArray(std::atomic<DexCachePair>* array) { + DexCachePair::Initialize(array); +} + +template<typename T> +static void InitializeArray(GcRoot<T>*) { + // No special initialization is needed. +} + +template<typename T, size_t kMaxCacheSize> +T* DexCache::AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num) { + num = std::min<size_t>(num, kMaxCacheSize); + if (num == 0) { + return nullptr; + } + Thread* self = Thread::Current(); + ClassLinker* linker = Runtime::Current()->GetClassLinker(); + LinearAlloc* alloc = linker->GetOrCreateAllocatorForClassLoader(GetClassLoader()); + MutexLock mu(self, *Locks::dex_cache_lock_); // Avoid allocation by multiple threads. + T* array = GetFieldPtr64<T*>(obj_offset); + if (array != nullptr) { + DCHECK(alloc->Contains(array)); + return array; // Other thread just allocated the array. + } + array = reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16))); + InitializeArray(array); // Ensure other threads see the array initialized. + SetField32Volatile<false, false>(num_offset, num); + SetField64Volatile<false, false>(obj_offset, reinterpret_cast<uint64_t>(array)); + return array; +} + template <typename T> inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index) : object(object), index(index) {} @@ -83,27 +116,22 @@ inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) { } inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) { - const uint32_t num_preresolved_strings = NumPreResolvedStrings(); - if (num_preresolved_strings != 0u) { - GcRoot<mirror::String>* preresolved_strings = GetPreResolvedStrings(); - // num_preresolved_strings can become 0 and preresolved_strings can become null in any order - // when ClearPreResolvedStrings is called. - if (preresolved_strings != nullptr) { - DCHECK_LT(string_idx.index_, num_preresolved_strings); - DCHECK_EQ(num_preresolved_strings, GetDexFile()->NumStringIds()); - mirror::String* string = preresolved_strings[string_idx.index_].Read(); - if (LIKELY(string != nullptr)) { - return string; - } - } + StringDexCacheType* strings = GetStrings(); + if (UNLIKELY(strings == nullptr)) { + return nullptr; } - return GetStrings()[StringSlotIndex(string_idx)].load( + return strings[StringSlotIndex(string_idx)].load( std::memory_order_relaxed).GetObjectForIndex(string_idx.index_); } inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { DCHECK(resolved != nullptr); - GetStrings()[StringSlotIndex(string_idx)].store( + StringDexCacheType* strings = GetStrings(); + if (UNLIKELY(strings == nullptr)) { + strings = AllocArray<StringDexCacheType, kDexCacheStringCacheSize>( + StringsOffset(), NumStringsOffset(), GetDexFile()->NumStringIds()); + } + strings[StringSlotIndex(string_idx)].store( StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed); Runtime* const runtime = Runtime::Current(); if (UNLIKELY(runtime->IsActiveTransaction())) { @@ -114,32 +142,14 @@ inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<Stri WriteBarrier::ForEveryFieldWrite(this); } -inline void DexCache::SetPreResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { - DCHECK(resolved != nullptr); - DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds()); - GetPreResolvedStrings()[string_idx.index_] = GcRoot<mirror::String>(resolved); - Runtime* const runtime = Runtime::Current(); - CHECK(runtime->IsAotCompiler()); - CHECK(!runtime->IsActiveTransaction()); - // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. - WriteBarrier::ForEveryFieldWrite(this); -} - -inline void DexCache::ClearPreResolvedStrings() { - SetFieldPtr64</*kTransactionActive=*/false, - /*kCheckTransaction=*/false, - kVerifyNone, - GcRoot<mirror::String>*>(PreResolvedStringsOffset(), nullptr); - SetField32</*kTransactionActive=*/false, - /*bool kCheckTransaction=*/false, - kVerifyNone, - /*kIsVolatile=*/false>(NumPreResolvedStringsOffset(), 0); -} - inline void DexCache::ClearString(dex::StringIndex string_idx) { DCHECK(Runtime::Current()->IsAotCompiler()); uint32_t slot_idx = StringSlotIndex(string_idx); - StringDexCacheType* slot = &GetStrings()[slot_idx]; + StringDexCacheType* strings = GetStrings(); + if (UNLIKELY(strings == nullptr)) { + return; + } + StringDexCacheType* slot = &strings[slot_idx]; // This is racy but should only be called from the transactional interpreter. if (slot->load(std::memory_order_relaxed).index == string_idx.index_) { StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx)); @@ -157,18 +167,27 @@ inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) { inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) { // It is theorized that a load acquire is not required since obtaining the resolved class will // always have an address dependency or a lock. - return GetResolvedTypes()[TypeSlotIndex(type_idx)].load( + TypeDexCacheType* resolved_types = GetResolvedTypes(); + if (UNLIKELY(resolved_types == nullptr)) { + return nullptr; + } + return resolved_types[TypeSlotIndex(type_idx)].load( std::memory_order_relaxed).GetObjectForIndex(type_idx.index_); } inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) { DCHECK(resolved != nullptr); DCHECK(resolved->IsResolved()) << resolved->GetStatus(); + TypeDexCacheType* resolved_types = GetResolvedTypes(); + if (UNLIKELY(resolved_types == nullptr)) { + resolved_types = AllocArray<TypeDexCacheType, kDexCacheTypeCacheSize>( + ResolvedTypesOffset(), NumResolvedTypesOffset(), GetDexFile()->NumTypeIds()); + } // TODO default transaction support. // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a // class but not necessarily seeing the loaded members like the static fields array. // See b/32075261. - GetResolvedTypes()[TypeSlotIndex(type_idx)].store( + resolved_types[TypeSlotIndex(type_idx)].store( TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release); // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. WriteBarrier::ForEveryFieldWrite(this); @@ -176,8 +195,12 @@ inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> res inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) { DCHECK(Runtime::Current()->IsAotCompiler()); + TypeDexCacheType* resolved_types = GetResolvedTypes(); + if (UNLIKELY(resolved_types == nullptr)) { + return; + } uint32_t slot_idx = TypeSlotIndex(type_idx); - TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx]; + TypeDexCacheType* slot = &resolved_types[slot_idx]; // This is racy but should only be called from the single-threaded ImageWriter and tests. if (slot->load(std::memory_order_relaxed).index == type_idx.index_) { TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx)); @@ -194,13 +217,22 @@ inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) { } inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) { - return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load( + MethodTypeDexCacheType* methods = GetResolvedMethodTypes(); + if (UNLIKELY(methods == nullptr)) { + return nullptr; + } + return methods[MethodTypeSlotIndex(proto_idx)].load( std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_); } inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) { DCHECK(resolved != nullptr); - GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store( + MethodTypeDexCacheType* methods = GetResolvedMethodTypes(); + if (UNLIKELY(methods == nullptr)) { + methods = AllocArray<MethodTypeDexCacheType, kDexCacheMethodTypeCacheSize>( + ResolvedMethodTypesOffset(), NumResolvedMethodTypesOffset(), GetDexFile()->NumProtoIds()); + } + methods[MethodTypeSlotIndex(proto_idx)].store( MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed); // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. WriteBarrier::ForEveryFieldWrite(this); @@ -209,7 +241,11 @@ inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodTyp inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) { DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds()); - GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx]; + GcRoot<CallSite>* call_sites = GetResolvedCallSites(); + if (UNLIKELY(call_sites == nullptr)) { + return nullptr; + } + GcRoot<mirror::CallSite>& target = call_sites[call_site_idx]; Atomic<GcRoot<mirror::CallSite>>& ref = reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); return ref.load(std::memory_order_seq_cst).Read(); @@ -222,7 +258,12 @@ inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx, GcRoot<mirror::CallSite> null_call_site(nullptr); GcRoot<mirror::CallSite> candidate(call_site); - GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx]; + GcRoot<CallSite>* call_sites = GetResolvedCallSites(); + if (UNLIKELY(call_sites == nullptr)) { + call_sites = AllocArray<GcRoot<CallSite>, std::numeric_limits<size_t>::max()>( + ResolvedCallSitesOffset(), NumResolvedCallSitesOffset(), GetDexFile()->NumCallSiteIds()); + } + GcRoot<mirror::CallSite>& target = call_sites[call_site_idx]; // The first assignment for a given call site wins. Atomic<GcRoot<mirror::CallSite>>& ref = @@ -244,14 +285,23 @@ inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) { } inline ArtField* DexCache::GetResolvedField(uint32_t field_idx) { - auto pair = GetNativePair(GetResolvedFields(), FieldSlotIndex(field_idx)); + FieldDexCacheType* fields = GetResolvedFields(); + if (UNLIKELY(fields == nullptr)) { + return nullptr; + } + auto pair = GetNativePair(fields, FieldSlotIndex(field_idx)); return pair.GetObjectForIndex(field_idx); } inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field) { DCHECK(field != nullptr); FieldDexCachePair pair(field, field_idx); - SetNativePair(GetResolvedFields(), FieldSlotIndex(field_idx), pair); + FieldDexCacheType* fields = GetResolvedFields(); + if (UNLIKELY(fields == nullptr)) { + fields = AllocArray<FieldDexCacheType, kDexCacheFieldCacheSize>( + ResolvedFieldsOffset(), NumResolvedFieldsOffset(), GetDexFile()->NumFieldIds()); + } + SetNativePair(fields, FieldSlotIndex(field_idx), pair); } inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) { @@ -262,14 +312,23 @@ inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) { } inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) { - auto pair = GetNativePair(GetResolvedMethods(), MethodSlotIndex(method_idx)); + MethodDexCacheType* methods = GetResolvedMethods(); + if (UNLIKELY(methods == nullptr)) { + return nullptr; + } + auto pair = GetNativePair(methods, MethodSlotIndex(method_idx)); return pair.GetObjectForIndex(method_idx); } inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method) { DCHECK(method != nullptr); MethodDexCachePair pair(method, method_idx); - SetNativePair(GetResolvedMethods(), MethodSlotIndex(method_idx), pair); + MethodDexCacheType* methods = GetResolvedMethods(); + if (UNLIKELY(methods == nullptr)) { + methods = AllocArray<MethodDexCacheType, kDexCacheMethodCacheSize>( + ResolvedMethodsOffset(), NumResolvedMethodsOffset(), GetDexFile()->NumMethodIds()); + } + SetNativePair(methods, MethodSlotIndex(method_idx), pair); } template <typename T> @@ -310,7 +369,7 @@ inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs, size_t num_pairs, const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { - for (size_t i = 0; i < num_pairs; ++i) { + for (size_t i = 0; pairs != nullptr && i < num_pairs; ++i) { DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed); // NOTE: We need the "template" keyword here to avoid a compilation // failure. GcRoot<T> is a template argument-dependent type and we need to @@ -345,65 +404,9 @@ inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visito GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>(); size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>(); - for (size_t i = 0; i != num_call_sites; ++i) { + for (size_t i = 0; resolved_call_sites != nullptr && i != num_call_sites; ++i) { visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier()); } - - GcRoot<mirror::String>* const preresolved_strings = GetPreResolvedStrings(); - if (preresolved_strings != nullptr) { - const size_t num_preresolved_strings = NumPreResolvedStrings(); - for (size_t i = 0; i != num_preresolved_strings; ++i) { - visitor.VisitRootIfNonNull(preresolved_strings[i].AddressWithoutBarrier()); - } - } - } -} - -template <ReadBarrierOption kReadBarrierOption, typename Visitor> -inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) { - StringDexCacheType* src = GetStrings(); - for (size_t i = 0, count = NumStrings(); i < count; ++i) { - StringDexCachePair source = src[i].load(std::memory_order_relaxed); - String* ptr = source.object.Read<kReadBarrierOption>(); - String* new_source = visitor(ptr); - source.object = GcRoot<String>(new_source); - dest[i].store(source, std::memory_order_relaxed); - } -} - -template <ReadBarrierOption kReadBarrierOption, typename Visitor> -inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) { - TypeDexCacheType* src = GetResolvedTypes(); - for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) { - TypeDexCachePair source = src[i].load(std::memory_order_relaxed); - Class* ptr = source.object.Read<kReadBarrierOption>(); - Class* new_source = visitor(ptr); - source.object = GcRoot<Class>(new_source); - dest[i].store(source, std::memory_order_relaxed); - } -} - -template <ReadBarrierOption kReadBarrierOption, typename Visitor> -inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, - const Visitor& visitor) { - MethodTypeDexCacheType* src = GetResolvedMethodTypes(); - for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) { - MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed); - MethodType* ptr = source.object.Read<kReadBarrierOption>(); - MethodType* new_source = visitor(ptr); - source.object = GcRoot<MethodType>(new_source); - dest[i].store(source, std::memory_order_relaxed); - } -} - -template <ReadBarrierOption kReadBarrierOption, typename Visitor> -inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, - const Visitor& visitor) { - GcRoot<mirror::CallSite>* src = GetResolvedCallSites(); - for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) { - mirror::CallSite* source = src[i].Read<kReadBarrierOption>(); - mirror::CallSite* new_source = visitor(source); - dest[i] = GcRoot<mirror::CallSite>(new_source); } } diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc index 1b9558e6df..fda827d59e 100644 --- a/runtime/mirror/dex_cache.cc +++ b/runtime/mirror/dex_cache.cc @@ -35,15 +35,7 @@ namespace art { namespace mirror { -template<typename T> -static T* AllocArray(Thread* self, LinearAlloc* alloc, size_t num) { - if (num == 0) { - return nullptr; - } - return reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16))); -} - -void DexCache::InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) { +void DexCache::Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader) { DCHECK(GetDexFile() == nullptr); DCHECK(GetStrings() == nullptr); DCHECK(GetResolvedTypes() == nullptr); @@ -53,94 +45,9 @@ void DexCache::InitializeNativeFields(const DexFile* dex_file, LinearAlloc* line DCHECK(GetResolvedCallSites() == nullptr); ScopedAssertNoThreadSuspension sants(__FUNCTION__); - Thread* self = Thread::Current(); - - size_t num_strings = std::min<size_t>(kDexCacheStringCacheSize, dex_file->NumStringIds()); - size_t num_types = std::min<size_t>(kDexCacheTypeCacheSize, dex_file->NumTypeIds()); - size_t num_fields = std::min<size_t>(kDexCacheFieldCacheSize, dex_file->NumFieldIds()); - size_t num_methods = std::min<size_t>(kDexCacheMethodCacheSize, dex_file->NumMethodIds()); - size_t num_method_types = std::min<size_t>(kDexCacheMethodTypeCacheSize, dex_file->NumProtoIds()); - size_t num_call_sites = dex_file->NumCallSiteIds(); // Full size. - static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8."); - StringDexCacheType* strings = - AllocArray<StringDexCacheType>(self, linear_alloc, num_strings); - TypeDexCacheType* types = - AllocArray<TypeDexCacheType>(self, linear_alloc, num_types); - MethodDexCacheType* methods = - AllocArray<MethodDexCacheType>(self, linear_alloc, num_methods); - FieldDexCacheType* fields = - AllocArray<FieldDexCacheType>(self, linear_alloc, num_fields); - MethodTypeDexCacheType* method_types = - AllocArray<MethodTypeDexCacheType>(self, linear_alloc, num_method_types); - GcRoot<mirror::CallSite>* call_sites = - AllocArray<GcRoot<CallSite>>(self, linear_alloc, num_call_sites); - - DCHECK_ALIGNED(types, alignof(StringDexCacheType)) << - "Expected StringsOffset() to align to StringDexCacheType."; - DCHECK_ALIGNED(strings, alignof(StringDexCacheType)) << - "Expected strings to align to StringDexCacheType."; - static_assert(alignof(StringDexCacheType) == 8u, - "Expected StringDexCacheType to have align of 8."); - if (kIsDebugBuild) { - // Consistency check to make sure all the dex cache arrays are empty. b/28992179 - for (size_t i = 0; i < num_strings; ++i) { - CHECK_EQ(strings[i].load(std::memory_order_relaxed).index, 0u); - CHECK(strings[i].load(std::memory_order_relaxed).object.IsNull()); - } - for (size_t i = 0; i < num_types; ++i) { - CHECK_EQ(types[i].load(std::memory_order_relaxed).index, 0u); - CHECK(types[i].load(std::memory_order_relaxed).object.IsNull()); - } - for (size_t i = 0; i < num_methods; ++i) { - CHECK_EQ(GetNativePair(methods, i).index, 0u); - CHECK(GetNativePair(methods, i).object == nullptr); - } - for (size_t i = 0; i < num_fields; ++i) { - CHECK_EQ(GetNativePair(fields, i).index, 0u); - CHECK(GetNativePair(fields, i).object == nullptr); - } - for (size_t i = 0; i < num_method_types; ++i) { - CHECK_EQ(method_types[i].load(std::memory_order_relaxed).index, 0u); - CHECK(method_types[i].load(std::memory_order_relaxed).object.IsNull()); - } - for (size_t i = 0; i < dex_file->NumCallSiteIds(); ++i) { - CHECK(call_sites[i].IsNull()); - } - } - if (strings != nullptr) { - mirror::StringDexCachePair::Initialize(strings); - } - if (types != nullptr) { - mirror::TypeDexCachePair::Initialize(types); - } - if (fields != nullptr) { - mirror::FieldDexCachePair::Initialize(fields); - } - if (methods != nullptr) { - mirror::MethodDexCachePair::Initialize(methods); - } - if (method_types != nullptr) { - mirror::MethodTypeDexCachePair::Initialize(method_types); - } SetDexFile(dex_file); - SetNativeArrays(strings, - num_strings, - types, - num_types, - methods, - num_methods, - fields, - num_fields, - method_types, - num_method_types, - call_sites, - num_call_sites); -} - -void DexCache::ResetNativeFields() { - SetDexFile(nullptr); - SetNativeArrays(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0); + SetClassLoader(class_loader); } void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) { @@ -184,59 +91,19 @@ void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) { } } -bool DexCache::AddPreResolvedStringsArray() { - DCHECK_EQ(NumPreResolvedStrings(), 0u); - Thread* const self = Thread::Current(); - LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc(); - const size_t num_strings = GetDexFile()->NumStringIds(); - if (num_strings != 0) { - GcRoot<mirror::String>* strings = - linear_alloc->AllocArray<GcRoot<mirror::String>>(self, num_strings); - if (strings == nullptr) { - // Failed to allocate pre-resolved string array (probably due to address fragmentation), bail. - return false; - } - SetField32<false>(NumPreResolvedStringsOffset(), num_strings); - - CHECK(strings != nullptr); - SetPreResolvedStrings(strings); - for (size_t i = 0; i < GetDexFile()->NumStringIds(); ++i) { - CHECK(GetPreResolvedStrings()[i].Read() == nullptr); - } - } - return true; -} - -void DexCache::SetNativeArrays(StringDexCacheType* strings, - uint32_t num_strings, - TypeDexCacheType* resolved_types, - uint32_t num_resolved_types, - MethodDexCacheType* resolved_methods, - uint32_t num_resolved_methods, - FieldDexCacheType* resolved_fields, - uint32_t num_resolved_fields, - MethodTypeDexCacheType* resolved_method_types, - uint32_t num_resolved_method_types, - GcRoot<CallSite>* resolved_call_sites, - uint32_t num_resolved_call_sites) { - CHECK_EQ(num_strings != 0u, strings != nullptr); - CHECK_EQ(num_resolved_types != 0u, resolved_types != nullptr); - CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr); - CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr); - CHECK_EQ(num_resolved_method_types != 0u, resolved_method_types != nullptr); - CHECK_EQ(num_resolved_call_sites != 0u, resolved_call_sites != nullptr); - SetStrings(strings); - SetResolvedTypes(resolved_types); - SetResolvedMethods(resolved_methods); - SetResolvedFields(resolved_fields); - SetResolvedMethodTypes(resolved_method_types); - SetResolvedCallSites(resolved_call_sites); - SetField32<false>(NumStringsOffset(), num_strings); - SetField32<false>(NumResolvedTypesOffset(), num_resolved_types); - SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods); - SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields); - SetField32<false>(NumResolvedMethodTypesOffset(), num_resolved_method_types); - SetField32<false>(NumResolvedCallSitesOffset(), num_resolved_call_sites); +void DexCache::ResetNativeArrays() { + SetStrings(nullptr); + SetResolvedTypes(nullptr); + SetResolvedMethods(nullptr); + SetResolvedFields(nullptr); + SetResolvedMethodTypes(nullptr); + SetResolvedCallSites(nullptr); + SetField32<false>(NumStringsOffset(), 0); + SetField32<false>(NumResolvedTypesOffset(), 0); + SetField32<false>(NumResolvedMethodsOffset(), 0); + SetField32<false>(NumResolvedFieldsOffset(), 0); + SetField32<false>(NumResolvedMethodTypesOffset(), 0); + SetField32<false>(NumResolvedCallSitesOffset(), 0); } void DexCache::SetLocation(ObjPtr<mirror::String> location) { @@ -247,6 +114,10 @@ void DexCache::SetClassLoader(ObjPtr<ClassLoader> class_loader) { SetFieldObject<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, class_loader_), class_loader); } +ObjPtr<ClassLoader> DexCache::GetClassLoader() { + return GetFieldObject<ClassLoader>(OFFSET_OF_OBJECT_MEMBER(DexCache, class_loader_)); +} + #if !defined(__aarch64__) && !defined(__x86_64__) static pthread_mutex_t dex_cache_slow_atomic_mutex = PTHREAD_MUTEX_INITIALIZER; diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h index dd05dddc46..874e6e1432 100644 --- a/runtime/mirror/dex_cache.h +++ b/runtime/mirror/dex_cache.h @@ -18,6 +18,7 @@ #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ #include "array.h" +#include "base/array_ref.h" #include "base/bit_utils.h" #include "base/locks.h" #include "dex/dex_file_types.h" @@ -186,29 +187,13 @@ class MANAGED DexCache final : public Object { return sizeof(DexCache); } - // Initialize native fields and allocate memory. - void InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) + void Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::dex_lock_); - // Clear all native fields. - void ResetNativeFields() REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupStrings(StringDexCacheType* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); - - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> - void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor) - REQUIRES_SHARED(Locks::mutator_lock_); + // Zero all array references. + // WARNING: This does not free the memory since it is in LinearAlloc. + void ResetNativeArrays() REQUIRES_SHARED(Locks::mutator_lock_); ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_); @@ -278,14 +263,6 @@ class MANAGED DexCache final : public Object { void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); - void SetPreResolvedString(dex::StringIndex string_idx, - ObjPtr<mirror::String> resolved) - ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); - - // Clear the preresolved string cache to prevent further usage. - void ClearPreResolvedStrings() - ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_); - // Clear a string for a string_idx, used to undo string intern transactions to make sure // the string isn't kept live. void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); @@ -329,21 +306,10 @@ class MANAGED DexCache final : public Object { return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset()); } - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> - GcRoot<mirror::String>* GetPreResolvedStrings() ALWAYS_INLINE - REQUIRES_SHARED(Locks::mutator_lock_) { - return GetFieldPtr64<GcRoot<mirror::String>*, kVerifyFlags>(PreResolvedStringsOffset()); - } - void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { SetFieldPtr<false>(StringsOffset(), strings); } - void SetPreResolvedStrings(GcRoot<mirror::String>* strings) - ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { - SetFieldPtr<false>(PreResolvedStringsOffset(), strings); - } - template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags> TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset()); @@ -464,27 +430,17 @@ class MANAGED DexCache final : public Object { uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_); uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); - // Returns true if we succeeded in adding the pre-resolved string array. - bool AddPreResolvedStringsArray() REQUIRES_SHARED(Locks::mutator_lock_); - void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_); void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_); + ObjPtr<ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_); + private: - void SetNativeArrays(StringDexCacheType* strings, - uint32_t num_strings, - TypeDexCacheType* resolved_types, - uint32_t num_resolved_types, - MethodDexCacheType* resolved_methods, - uint32_t num_resolved_methods, - FieldDexCacheType* resolved_fields, - uint32_t num_resolved_fields, - MethodTypeDexCacheType* resolved_method_types, - uint32_t num_resolved_method_types, - GcRoot<CallSite>* resolved_call_sites, - uint32_t num_resolved_call_sites) - REQUIRES_SHARED(Locks::mutator_lock_); + // Allocate new array in linear alloc and save it in the given fields. + template<typename T, size_t kMaxCacheSize> + T* AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num) + REQUIRES_SHARED(Locks::mutator_lock_); // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations, // so we use a custom pair class for loading and storing the NativeDexCachePair<>. diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc index 0728bab758..b89b20d1ca 100644 --- a/runtime/mirror/dex_cache_test.cc +++ b/runtime/mirror/dex_cache_test.cc @@ -45,21 +45,15 @@ TEST_F(DexCacheTest, Open) { ASSERT_TRUE(java_lang_dex_file_ != nullptr); Handle<DexCache> dex_cache( hs.NewHandle(class_linker_->AllocAndInitializeDexCache( - soa.Self(), - *java_lang_dex_file_, - Runtime::Current()->GetLinearAlloc()))); + soa.Self(), *java_lang_dex_file_, /*class_loader=*/nullptr))); ASSERT_TRUE(dex_cache != nullptr); - EXPECT_TRUE(dex_cache->StaticStringSize() == dex_cache->NumStrings() - || java_lang_dex_file_->NumStringIds() == dex_cache->NumStrings()); - EXPECT_TRUE(dex_cache->StaticTypeSize() == dex_cache->NumResolvedTypes() - || java_lang_dex_file_->NumTypeIds() == dex_cache->NumResolvedTypes()); - EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods() - || java_lang_dex_file_->NumMethodIds() == dex_cache->NumResolvedMethods()); - EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields() - || java_lang_dex_file_->NumFieldIds() == dex_cache->NumResolvedFields()); - EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes() - || java_lang_dex_file_->NumProtoIds() == dex_cache->NumResolvedMethodTypes()); + // The cache is initially empty. + EXPECT_EQ(0u, dex_cache->NumStrings()); + EXPECT_EQ(0u, dex_cache->NumResolvedTypes()); + EXPECT_EQ(0u, dex_cache->NumResolvedMethods()); + EXPECT_EQ(0u, dex_cache->NumResolvedFields()); + EXPECT_EQ(0u, dex_cache->NumResolvedMethodTypes()); } TEST_F(DexCacheMethodHandlesTest, Open) { @@ -68,26 +62,9 @@ TEST_F(DexCacheMethodHandlesTest, Open) { ASSERT_TRUE(java_lang_dex_file_ != nullptr); Handle<DexCache> dex_cache( hs.NewHandle(class_linker_->AllocAndInitializeDexCache( - soa.Self(), - *java_lang_dex_file_, - Runtime::Current()->GetLinearAlloc()))); + soa.Self(), *java_lang_dex_file_, /*class_loader=*/nullptr))); - EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes() - || java_lang_dex_file_->NumProtoIds() == dex_cache->NumResolvedMethodTypes()); -} - -TEST_F(DexCacheTest, LinearAlloc) { - ScopedObjectAccess soa(Thread::Current()); - jobject jclass_loader(LoadDex("Main")); - ASSERT_TRUE(jclass_loader != nullptr); - StackHandleScope<1> hs(soa.Self()); - Handle<mirror::ClassLoader> class_loader(hs.NewHandle( - soa.Decode<mirror::ClassLoader>(jclass_loader))); - ObjPtr<mirror::Class> klass = class_linker_->FindClass(soa.Self(), "LMain;", class_loader); - ASSERT_TRUE(klass != nullptr); - LinearAlloc* const linear_alloc = klass->GetClassLoader()->GetAllocator(); - EXPECT_NE(linear_alloc, runtime_->GetLinearAlloc()); - EXPECT_TRUE(linear_alloc->Contains(klass->GetDexCache()->GetResolvedMethods())); + EXPECT_EQ(0u, dex_cache->NumResolvedMethodTypes()); } TEST_F(DexCacheTest, TestResolvedFieldAccess) { |