Revert "Revert^2 "Lazily allocate DexCache arrays.""
This reverts commit e153a62e8e8a2c42f86d2db87c8188cd0d7bef6b.
Bug: b/181097963
Reason for revert: Crashes seen on bots.
Change-Id: I1b452d4a15adf42dd7170d77d1b79260d78400a3
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc
index 28ceb54..d7c2d65 100644
--- a/dex2oat/linker/image_writer.cc
+++ b/dex2oat/linker/image_writer.cc
@@ -1114,6 +1114,49 @@
Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
}
+void ImageWriter::ClearDexCache(ObjPtr<mirror::DexCache> dex_cache) {
+ // Clear methods.
+ mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
+ for (size_t slot_idx = 0, num = dex_cache->NumResolvedMethods(); slot_idx != num; ++slot_idx) {
+ mirror::MethodDexCachePair invalid(nullptr,
+ mirror::MethodDexCachePair::InvalidIndexForSlot(slot_idx));
+ mirror::DexCache::SetNativePair(resolved_methods, slot_idx, invalid);
+ }
+ // Clear fields.
+ mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
+ for (size_t slot_idx = 0, num = dex_cache->NumResolvedFields(); slot_idx != num; ++slot_idx) {
+ mirror::FieldDexCachePair invalid(nullptr,
+ mirror::FieldDexCachePair::InvalidIndexForSlot(slot_idx));
+ mirror::DexCache::SetNativePair(resolved_fields, slot_idx, invalid);
+ }
+ // Clear types.
+ mirror::TypeDexCacheType* resolved_types = dex_cache->GetResolvedTypes();
+ for (size_t slot_idx = 0, num = dex_cache->NumResolvedTypes(); slot_idx != num; ++slot_idx) {
+ mirror::TypeDexCachePair invalid(nullptr,
+ mirror::TypeDexCachePair::InvalidIndexForSlot(slot_idx));
+ resolved_types[slot_idx].store(invalid, std::memory_order_relaxed);
+ }
+ // Clear strings.
+ mirror::StringDexCacheType* resolved_strings = dex_cache->GetStrings();
+ for (size_t slot_idx = 0, num = dex_cache->NumStrings(); slot_idx != num; ++slot_idx) {
+ mirror::StringDexCachePair invalid(nullptr,
+ mirror::StringDexCachePair::InvalidIndexForSlot(slot_idx));
+ resolved_strings[slot_idx].store(invalid, std::memory_order_relaxed);
+ }
+ // Clear method types.
+ mirror::MethodTypeDexCacheType* resolved_method_types = dex_cache->GetResolvedMethodTypes();
+ size_t num_resolved_method_types = dex_cache->NumResolvedMethodTypes();
+ for (size_t slot_idx = 0; slot_idx != num_resolved_method_types; ++slot_idx) {
+ mirror::MethodTypeDexCachePair invalid(
+ nullptr, mirror::MethodTypeDexCachePair::InvalidIndexForSlot(slot_idx));
+ resolved_method_types[slot_idx].store(invalid, std::memory_order_relaxed);
+ }
+ // Clear call sites.
+ std::fill_n(dex_cache->GetResolvedCallSites(),
+ dex_cache->NumResolvedCallSites(),
+ GcRoot<mirror::CallSite>(nullptr));
+}
+
void ImageWriter::PruneNonImageClasses() {
Runtime* runtime = Runtime::Current();
ClassLinker* class_linker = runtime->GetClassLinker();
@@ -1147,7 +1190,7 @@
// Completely clear DexCaches.
std::vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self);
for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
- dex_cache->ResetNativeArrays();
+ ClearDexCache(dex_cache);
}
// Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
@@ -3131,8 +3174,7 @@
ArtField* src_field = src->GetArtField();
CopyAndFixupPointer(dest, mirror::FieldVarHandle::ArtFieldOffset(), src_field);
} else if (klass == GetClassRoot<mirror::DexCache>(class_roots)) {
- down_cast<mirror::DexCache*>(copy)->SetDexFile(nullptr);
- down_cast<mirror::DexCache*>(copy)->ResetNativeArrays();
+ down_cast<mirror::DexCache*>(copy)->ResetNativeFields();
} else if (klass->IsClassLoaderClass()) {
mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
// If src is a ClassLoader, set the class table to null so that it gets recreated by the
diff --git a/dex2oat/linker/image_writer.h b/dex2oat/linker/image_writer.h
index e13ee8e..2a9896a 100644
--- a/dex2oat/linker/image_writer.h
+++ b/dex2oat/linker/image_writer.h
@@ -446,6 +446,10 @@
// Remove unwanted classes from various roots.
void PruneNonImageClasses() REQUIRES_SHARED(Locks::mutator_lock_);
+ // Remove everything from the DexCache.
+ void ClearDexCache(ObjPtr<mirror::DexCache> dex_cache)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
// Find dex caches for pruning or preloading.
std::vector<ObjPtr<mirror::DexCache>> FindDexCaches(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_)
diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc
index a806000..c2fbadf 100644
--- a/openjdkjvmti/ti_redefine.cc
+++ b/openjdkjvmti/ti_redefine.cc
@@ -759,7 +759,10 @@
}
art::WriterMutexLock mu(driver_->self_, *art::Locks::dex_lock_);
cache->SetLocation(location.Get());
- cache->Initialize(dex_file_.get(), loader.Get());
+ cache->SetClassLoader(loader.Get());
+ cache->InitializeNativeFields(dex_file_.get(),
+ loader.IsNull() ? driver_->runtime_->GetLinearAlloc()
+ : loader->GetAllocator());
return cache.Get();
}
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 674cbc1..5af14b6 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -63,6 +63,10 @@
template <typename MirrorType> class ObjectArray;
class PointerArray;
class String;
+
+template <typename T> struct NativeDexCachePair;
+using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+using MethodDexCacheType = std::atomic<MethodDexCachePair>;
} // namespace mirror
class ArtMethod final {
diff --git a/runtime/base/locks.cc b/runtime/base/locks.cc
index e530073..7404d0d 100644
--- a/runtime/base/locks.cc
+++ b/runtime/base/locks.cc
@@ -74,7 +74,6 @@
Uninterruptible Roles::uninterruptible_;
ReaderWriterMutex* Locks::jni_globals_lock_ = nullptr;
Mutex* Locks::jni_weak_globals_lock_ = nullptr;
-Mutex* Locks::dex_cache_lock_ = nullptr;
ReaderWriterMutex* Locks::dex_lock_ = nullptr;
Mutex* Locks::native_debug_interface_lock_ = nullptr;
ReaderWriterMutex* Locks::jni_id_lock_ = nullptr;
@@ -251,10 +250,6 @@
DCHECK(dex_lock_ == nullptr);
dex_lock_ = new ReaderWriterMutex("ClassLinker dex lock", current_lock_level);
- UPDATE_CURRENT_LOCK_LEVEL(kDexCacheLock);
- DCHECK(dex_cache_lock_ == nullptr);
- dex_cache_lock_ = new Mutex("DexCache lock", current_lock_level);
-
UPDATE_CURRENT_LOCK_LEVEL(kOatFileManagerLock);
DCHECK(oat_file_manager_lock_ == nullptr);
oat_file_manager_lock_ = new ReaderWriterMutex("OatFile manager lock", current_lock_level);
diff --git a/runtime/base/locks.h b/runtime/base/locks.h
index 52d196b..24fb2e0 100644
--- a/runtime/base/locks.h
+++ b/runtime/base/locks.h
@@ -97,7 +97,6 @@
kTracingStreamingLock,
kClassLoaderClassesLock,
kDefaultMutexLevel,
- kDexCacheLock,
kDexLock,
kMarkSweepLargeObjectLock,
kJdwpObjectRegistryLock,
@@ -291,8 +290,6 @@
static ReaderWriterMutex* dex_lock_ ACQUIRED_AFTER(modify_ldt_lock_);
- static Mutex* dex_cache_lock_ ACQUIRED_AFTER(dex_lock_);
-
// Guards opened oat files in OatFileManager.
static ReaderWriterMutex* oat_file_manager_lock_ ACQUIRED_AFTER(dex_lock_);
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index b424451..05edb03 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1905,10 +1905,13 @@
return false;
}
+ LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get());
+ DCHECK(linear_alloc != nullptr);
+ DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image);
{
- // Native fields are all null. Initialize them.
+ // Native fields are all null. Initialize them and allocate native memory.
WriterMutexLock mu(self, *Locks::dex_lock_);
- dex_cache->Initialize(dex_file.get(), class_loader.Get());
+ dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc);
}
if (!app_image) {
// Register dex files, keep track of existing ones that are conflicts.
@@ -2364,14 +2367,13 @@
return dex_cache.Get();
}
-ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
- Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
- StackHandleScope<1> hs(self);
- Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
+ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
+ const DexFile& dex_file,
+ LinearAlloc* linear_alloc) {
ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
if (dex_cache != nullptr) {
WriterMutexLock mu(self, *Locks::dex_lock_);
- dex_cache->Initialize(&dex_file, h_class_loader.Get());
+ dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
}
return dex_cache;
}
@@ -3842,8 +3844,10 @@
}
void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
- ObjPtr<mirror::DexCache> dex_cache =
- AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
+ ObjPtr<mirror::DexCache> dex_cache = AllocAndInitializeDexCache(
+ self,
+ *dex_file,
+ Runtime::Current()->GetLinearAlloc());
CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
AppendToBootClassPath(dex_file, dex_cache);
}
@@ -4033,10 +4037,10 @@
const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
old_dex_cache = DecodeDexCacheLocked(self, old_data);
if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
- // Do Initialize while holding dex lock to make sure two threads don't call it
+ // Do InitializeNativeFields while holding dex lock to make sure two threads don't call it
// at the same time with the same dex cache. Since the .bss is shared this can cause failing
// DCHECK that the arrays are null.
- h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
+ h_dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
}
if (old_dex_cache != nullptr) {
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index cbb1dfe..5faf760 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -93,6 +93,9 @@
class MethodType;
template<class T> class ObjectArray;
class StackTraceElement;
+template <typename T> struct NativeDexCachePair;
+using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
+using MethodDexCacheType = std::atomic<MethodDexCachePair>;
} // namespace mirror
namespace verifier {
@@ -956,7 +959,7 @@
// Used for tests and AppendToBootClassPath.
ObjPtr<mirror::DexCache> AllocAndInitializeDexCache(Thread* self,
const DexFile& dex_file,
- ObjPtr<mirror::ClassLoader> class_loader)
+ LinearAlloc* linear_alloc)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Locks::dex_lock_)
REQUIRES(!Roles::uninterruptible_);
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 8e51149..e0f498d 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -1541,9 +1541,8 @@
nullptr,
nullptr));
// Make a copy of the dex cache with changed name.
- dex_cache.Assign(class_linker->AllocAndInitializeDexCache(Thread::Current(),
- *dex_file,
- /* class_loader= */ nullptr));
+ LinearAlloc* alloc = Runtime::Current()->GetLinearAlloc();
+ dex_cache.Assign(class_linker->AllocAndInitializeDexCache(Thread::Current(), *dex_file, alloc));
DCHECK_EQ(dex_cache->GetLocation()->CompareTo(location.Get()), 0);
{
WriterMutexLock mu(soa.Self(), *Locks::dex_lock_);
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 1501bd9..0553f4f 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -28,7 +28,6 @@
#include "class_linker.h"
#include "dex/dex_file.h"
#include "gc_root-inl.h"
-#include "linear_alloc.h"
#include "mirror/call_site.h"
#include "mirror/class.h"
#include "mirror/method_type.h"
@@ -42,38 +41,6 @@
namespace art {
namespace mirror {
-template<typename DexCachePair>
-static void InitializeArray(std::atomic<DexCachePair>* array) {
- DexCachePair::Initialize(array);
-}
-
-template<typename T>
-static void InitializeArray(GcRoot<T>*) {
- // No special initialization is needed.
-}
-
-template<typename T, size_t kMaxCacheSize>
-T* DexCache::AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num) {
- num = std::min<size_t>(num, kMaxCacheSize);
- if (num == 0) {
- return nullptr;
- }
- Thread* self = Thread::Current();
- ClassLinker* linker = Runtime::Current()->GetClassLinker();
- LinearAlloc* alloc = linker->GetOrCreateAllocatorForClassLoader(GetClassLoader());
- MutexLock mu(self, *Locks::dex_cache_lock_); // Avoid allocation by multiple threads.
- T* array = GetFieldPtr64<T*>(obj_offset);
- if (array != nullptr) {
- DCHECK(alloc->Contains(array));
- return array; // Other thread just allocated the array.
- }
- array = reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16)));
- InitializeArray(array); // Ensure other threads see the array initialized.
- SetField32Volatile<false, false>(num_offset, num);
- SetField64Volatile<false, false>(obj_offset, reinterpret_cast<uint64_t>(array));
- return array;
-}
-
template <typename T>
inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index)
: object(object), index(index) {}
@@ -116,22 +83,27 @@
}
inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
- StringDexCacheType* strings = GetStrings();
- if (UNLIKELY(strings == nullptr)) {
- return nullptr;
+ const uint32_t num_preresolved_strings = NumPreResolvedStrings();
+ if (num_preresolved_strings != 0u) {
+ GcRoot<mirror::String>* preresolved_strings = GetPreResolvedStrings();
+ // num_preresolved_strings can become 0 and preresolved_strings can become null in any order
+ // when ClearPreResolvedStrings is called.
+ if (preresolved_strings != nullptr) {
+ DCHECK_LT(string_idx.index_, num_preresolved_strings);
+ DCHECK_EQ(num_preresolved_strings, GetDexFile()->NumStringIds());
+ mirror::String* string = preresolved_strings[string_idx.index_].Read();
+ if (LIKELY(string != nullptr)) {
+ return string;
+ }
+ }
}
- return strings[StringSlotIndex(string_idx)].load(
+ return GetStrings()[StringSlotIndex(string_idx)].load(
std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
}
inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
DCHECK(resolved != nullptr);
- StringDexCacheType* strings = GetStrings();
- if (UNLIKELY(strings == nullptr)) {
- strings = AllocArray<StringDexCacheType, kDexCacheStringCacheSize>(
- StringsOffset(), NumStringsOffset(), GetDexFile()->NumStringIds());
- }
- strings[StringSlotIndex(string_idx)].store(
+ GetStrings()[StringSlotIndex(string_idx)].store(
StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
Runtime* const runtime = Runtime::Current();
if (UNLIKELY(runtime->IsActiveTransaction())) {
@@ -142,14 +114,32 @@
WriteBarrier::ForEveryFieldWrite(this);
}
+inline void DexCache::SetPreResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
+ DCHECK(resolved != nullptr);
+ DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
+ GetPreResolvedStrings()[string_idx.index_] = GcRoot<mirror::String>(resolved);
+ Runtime* const runtime = Runtime::Current();
+ CHECK(runtime->IsAotCompiler());
+ CHECK(!runtime->IsActiveTransaction());
+ // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
+ WriteBarrier::ForEveryFieldWrite(this);
+}
+
+inline void DexCache::ClearPreResolvedStrings() {
+ SetFieldPtr64</*kTransactionActive=*/false,
+ /*kCheckTransaction=*/false,
+ kVerifyNone,
+ GcRoot<mirror::String>*>(PreResolvedStringsOffset(), nullptr);
+ SetField32</*kTransactionActive=*/false,
+ /*bool kCheckTransaction=*/false,
+ kVerifyNone,
+ /*kIsVolatile=*/false>(NumPreResolvedStringsOffset(), 0);
+}
+
inline void DexCache::ClearString(dex::StringIndex string_idx) {
DCHECK(Runtime::Current()->IsAotCompiler());
uint32_t slot_idx = StringSlotIndex(string_idx);
- StringDexCacheType* strings = GetStrings();
- if (UNLIKELY(strings == nullptr)) {
- return;
- }
- StringDexCacheType* slot = &strings[slot_idx];
+ StringDexCacheType* slot = &GetStrings()[slot_idx];
// This is racy but should only be called from the transactional interpreter.
if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
@@ -167,27 +157,18 @@
inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
// It is theorized that a load acquire is not required since obtaining the resolved class will
// always have an address dependency or a lock.
- TypeDexCacheType* resolved_types = GetResolvedTypes();
- if (UNLIKELY(resolved_types == nullptr)) {
- return nullptr;
- }
- return resolved_types[TypeSlotIndex(type_idx)].load(
+ return GetResolvedTypes()[TypeSlotIndex(type_idx)].load(
std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
}
inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
DCHECK(resolved != nullptr);
DCHECK(resolved->IsResolved()) << resolved->GetStatus();
- TypeDexCacheType* resolved_types = GetResolvedTypes();
- if (UNLIKELY(resolved_types == nullptr)) {
- resolved_types = AllocArray<TypeDexCacheType, kDexCacheTypeCacheSize>(
- ResolvedTypesOffset(), NumResolvedTypesOffset(), GetDexFile()->NumTypeIds());
- }
// TODO default transaction support.
// Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
// class but not necessarily seeing the loaded members like the static fields array.
// See b/32075261.
- resolved_types[TypeSlotIndex(type_idx)].store(
+ GetResolvedTypes()[TypeSlotIndex(type_idx)].store(
TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
// TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
WriteBarrier::ForEveryFieldWrite(this);
@@ -195,12 +176,8 @@
inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
DCHECK(Runtime::Current()->IsAotCompiler());
- TypeDexCacheType* resolved_types = GetResolvedTypes();
- if (UNLIKELY(resolved_types == nullptr)) {
- return;
- }
uint32_t slot_idx = TypeSlotIndex(type_idx);
- TypeDexCacheType* slot = &resolved_types[slot_idx];
+ TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx];
// This is racy but should only be called from the single-threaded ImageWriter and tests.
if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
@@ -217,22 +194,13 @@
}
inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) {
- MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
- if (UNLIKELY(methods == nullptr)) {
- return nullptr;
- }
- return methods[MethodTypeSlotIndex(proto_idx)].load(
+ return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load(
std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_);
}
inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) {
DCHECK(resolved != nullptr);
- MethodTypeDexCacheType* methods = GetResolvedMethodTypes();
- if (UNLIKELY(methods == nullptr)) {
- methods = AllocArray<MethodTypeDexCacheType, kDexCacheMethodTypeCacheSize>(
- ResolvedMethodTypesOffset(), NumResolvedMethodTypesOffset(), GetDexFile()->NumProtoIds());
- }
- methods[MethodTypeSlotIndex(proto_idx)].store(
+ GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store(
MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed);
Runtime* const runtime = Runtime::Current();
if (UNLIKELY(runtime->IsActiveTransaction())) {
@@ -258,11 +226,7 @@
inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
- GcRoot<CallSite>* call_sites = GetResolvedCallSites();
- if (UNLIKELY(call_sites == nullptr)) {
- return nullptr;
- }
- GcRoot<mirror::CallSite>& target = call_sites[call_site_idx];
+ GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
Atomic<GcRoot<mirror::CallSite>>& ref =
reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
return ref.load(std::memory_order_seq_cst).Read();
@@ -275,12 +239,7 @@
GcRoot<mirror::CallSite> null_call_site(nullptr);
GcRoot<mirror::CallSite> candidate(call_site);
- GcRoot<CallSite>* call_sites = GetResolvedCallSites();
- if (UNLIKELY(call_sites == nullptr)) {
- call_sites = AllocArray<GcRoot<CallSite>, std::numeric_limits<size_t>::max()>(
- ResolvedCallSitesOffset(), NumResolvedCallSitesOffset(), GetDexFile()->NumCallSiteIds());
- }
- GcRoot<mirror::CallSite>& target = call_sites[call_site_idx];
+ GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
// The first assignment for a given call site wins.
Atomic<GcRoot<mirror::CallSite>>& ref =
@@ -302,23 +261,14 @@
}
inline ArtField* DexCache::GetResolvedField(uint32_t field_idx) {
- FieldDexCacheType* fields = GetResolvedFields();
- if (UNLIKELY(fields == nullptr)) {
- return nullptr;
- }
- auto pair = GetNativePair(fields, FieldSlotIndex(field_idx));
+ auto pair = GetNativePair(GetResolvedFields(), FieldSlotIndex(field_idx));
return pair.GetObjectForIndex(field_idx);
}
inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field) {
DCHECK(field != nullptr);
FieldDexCachePair pair(field, field_idx);
- FieldDexCacheType* fields = GetResolvedFields();
- if (UNLIKELY(fields == nullptr)) {
- fields = AllocArray<FieldDexCacheType, kDexCacheFieldCacheSize>(
- ResolvedFieldsOffset(), NumResolvedFieldsOffset(), GetDexFile()->NumFieldIds());
- }
- SetNativePair(fields, FieldSlotIndex(field_idx), pair);
+ SetNativePair(GetResolvedFields(), FieldSlotIndex(field_idx), pair);
}
inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
@@ -329,23 +279,14 @@
}
inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx) {
- MethodDexCacheType* methods = GetResolvedMethods();
- if (UNLIKELY(methods == nullptr)) {
- return nullptr;
- }
- auto pair = GetNativePair(methods, MethodSlotIndex(method_idx));
+ auto pair = GetNativePair(GetResolvedMethods(), MethodSlotIndex(method_idx));
return pair.GetObjectForIndex(method_idx);
}
inline void DexCache::SetResolvedMethod(uint32_t method_idx, ArtMethod* method) {
DCHECK(method != nullptr);
MethodDexCachePair pair(method, method_idx);
- MethodDexCacheType* methods = GetResolvedMethods();
- if (UNLIKELY(methods == nullptr)) {
- methods = AllocArray<MethodDexCacheType, kDexCacheMethodCacheSize>(
- ResolvedMethodsOffset(), NumResolvedMethodsOffset(), GetDexFile()->NumMethodIds());
- }
- SetNativePair(methods, MethodSlotIndex(method_idx), pair);
+ SetNativePair(GetResolvedMethods(), MethodSlotIndex(method_idx), pair);
}
template <typename T>
@@ -386,7 +327,7 @@
size_t num_pairs,
const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
- for (size_t i = 0; pairs != nullptr && i < num_pairs; ++i) {
+ for (size_t i = 0; i < num_pairs; ++i) {
DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed);
// NOTE: We need the "template" keyword here to avoid a compilation
// failure. GcRoot<T> is a template argument-dependent type and we need to
@@ -421,9 +362,65 @@
GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>();
size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>();
- for (size_t i = 0; resolved_call_sites != nullptr && i != num_call_sites; ++i) {
+ for (size_t i = 0; i != num_call_sites; ++i) {
visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier());
}
+
+ GcRoot<mirror::String>* const preresolved_strings = GetPreResolvedStrings();
+ if (preresolved_strings != nullptr) {
+ const size_t num_preresolved_strings = NumPreResolvedStrings();
+ for (size_t i = 0; i != num_preresolved_strings; ++i) {
+ visitor.VisitRootIfNonNull(preresolved_strings[i].AddressWithoutBarrier());
+ }
+ }
+ }
+}
+
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
+inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) {
+ StringDexCacheType* src = GetStrings();
+ for (size_t i = 0, count = NumStrings(); i < count; ++i) {
+ StringDexCachePair source = src[i].load(std::memory_order_relaxed);
+ String* ptr = source.object.Read<kReadBarrierOption>();
+ String* new_source = visitor(ptr);
+ source.object = GcRoot<String>(new_source);
+ dest[i].store(source, std::memory_order_relaxed);
+ }
+}
+
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
+inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) {
+ TypeDexCacheType* src = GetResolvedTypes();
+ for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) {
+ TypeDexCachePair source = src[i].load(std::memory_order_relaxed);
+ Class* ptr = source.object.Read<kReadBarrierOption>();
+ Class* new_source = visitor(ptr);
+ source.object = GcRoot<Class>(new_source);
+ dest[i].store(source, std::memory_order_relaxed);
+ }
+}
+
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
+inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest,
+ const Visitor& visitor) {
+ MethodTypeDexCacheType* src = GetResolvedMethodTypes();
+ for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) {
+ MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed);
+ MethodType* ptr = source.object.Read<kReadBarrierOption>();
+ MethodType* new_source = visitor(ptr);
+ source.object = GcRoot<MethodType>(new_source);
+ dest[i].store(source, std::memory_order_relaxed);
+ }
+}
+
+template <ReadBarrierOption kReadBarrierOption, typename Visitor>
+inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest,
+ const Visitor& visitor) {
+ GcRoot<mirror::CallSite>* src = GetResolvedCallSites();
+ for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) {
+ mirror::CallSite* source = src[i].Read<kReadBarrierOption>();
+ mirror::CallSite* new_source = visitor(source);
+ dest[i] = GcRoot<mirror::CallSite>(new_source);
}
}
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index fda827d..d100f32 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -35,7 +35,15 @@
namespace art {
namespace mirror {
-void DexCache::Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader) {
+template<typename T>
+static T* AllocArray(Thread* self, LinearAlloc* alloc, size_t num) {
+ if (num == 0) {
+ return nullptr;
+ }
+ return reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16)));
+}
+
+void DexCache::InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) {
DCHECK(GetDexFile() == nullptr);
DCHECK(GetStrings() == nullptr);
DCHECK(GetResolvedTypes() == nullptr);
@@ -45,9 +53,94 @@
DCHECK(GetResolvedCallSites() == nullptr);
ScopedAssertNoThreadSuspension sants(__FUNCTION__);
+ Thread* self = Thread::Current();
+ size_t num_strings = std::min<size_t>(kDexCacheStringCacheSize, dex_file->NumStringIds());
+ size_t num_types = std::min<size_t>(kDexCacheTypeCacheSize, dex_file->NumTypeIds());
+ size_t num_fields = std::min<size_t>(kDexCacheFieldCacheSize, dex_file->NumFieldIds());
+ size_t num_methods = std::min<size_t>(kDexCacheMethodCacheSize, dex_file->NumMethodIds());
+ size_t num_method_types = std::min<size_t>(kDexCacheMethodTypeCacheSize, dex_file->NumProtoIds());
+ size_t num_call_sites = dex_file->NumCallSiteIds(); // Full size.
+
+ static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8.");
+ StringDexCacheType* strings =
+ AllocArray<StringDexCacheType>(self, linear_alloc, num_strings);
+ TypeDexCacheType* types =
+ AllocArray<TypeDexCacheType>(self, linear_alloc, num_types);
+ MethodDexCacheType* methods =
+ AllocArray<MethodDexCacheType>(self, linear_alloc, num_methods);
+ FieldDexCacheType* fields =
+ AllocArray<FieldDexCacheType>(self, linear_alloc, num_fields);
+ MethodTypeDexCacheType* method_types =
+ AllocArray<MethodTypeDexCacheType>(self, linear_alloc, num_method_types);
+ GcRoot<mirror::CallSite>* call_sites =
+ AllocArray<GcRoot<CallSite>>(self, linear_alloc, num_call_sites);
+
+ DCHECK_ALIGNED(types, alignof(StringDexCacheType)) <<
+ "Expected StringsOffset() to align to StringDexCacheType.";
+ DCHECK_ALIGNED(strings, alignof(StringDexCacheType)) <<
+ "Expected strings to align to StringDexCacheType.";
+ static_assert(alignof(StringDexCacheType) == 8u,
+ "Expected StringDexCacheType to have align of 8.");
+ if (kIsDebugBuild) {
+ // Consistency check to make sure all the dex cache arrays are empty. b/28992179
+ for (size_t i = 0; i < num_strings; ++i) {
+ CHECK_EQ(strings[i].load(std::memory_order_relaxed).index, 0u);
+ CHECK(strings[i].load(std::memory_order_relaxed).object.IsNull());
+ }
+ for (size_t i = 0; i < num_types; ++i) {
+ CHECK_EQ(types[i].load(std::memory_order_relaxed).index, 0u);
+ CHECK(types[i].load(std::memory_order_relaxed).object.IsNull());
+ }
+ for (size_t i = 0; i < num_methods; ++i) {
+ CHECK_EQ(GetNativePair(methods, i).index, 0u);
+ CHECK(GetNativePair(methods, i).object == nullptr);
+ }
+ for (size_t i = 0; i < num_fields; ++i) {
+ CHECK_EQ(GetNativePair(fields, i).index, 0u);
+ CHECK(GetNativePair(fields, i).object == nullptr);
+ }
+ for (size_t i = 0; i < num_method_types; ++i) {
+ CHECK_EQ(method_types[i].load(std::memory_order_relaxed).index, 0u);
+ CHECK(method_types[i].load(std::memory_order_relaxed).object.IsNull());
+ }
+ for (size_t i = 0; i < dex_file->NumCallSiteIds(); ++i) {
+ CHECK(call_sites[i].IsNull());
+ }
+ }
+ if (strings != nullptr) {
+ mirror::StringDexCachePair::Initialize(strings);
+ }
+ if (types != nullptr) {
+ mirror::TypeDexCachePair::Initialize(types);
+ }
+ if (fields != nullptr) {
+ mirror::FieldDexCachePair::Initialize(fields);
+ }
+ if (methods != nullptr) {
+ mirror::MethodDexCachePair::Initialize(methods);
+ }
+ if (method_types != nullptr) {
+ mirror::MethodTypeDexCachePair::Initialize(method_types);
+ }
SetDexFile(dex_file);
- SetClassLoader(class_loader);
+ SetNativeArrays(strings,
+ num_strings,
+ types,
+ num_types,
+ methods,
+ num_methods,
+ fields,
+ num_fields,
+ method_types,
+ num_method_types,
+ call_sites,
+ num_call_sites);
+}
+
+void DexCache::ResetNativeFields() {
+ SetDexFile(nullptr);
+ SetNativeArrays(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0);
}
void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) {
@@ -91,19 +184,59 @@
}
}
-void DexCache::ResetNativeArrays() {
- SetStrings(nullptr);
- SetResolvedTypes(nullptr);
- SetResolvedMethods(nullptr);
- SetResolvedFields(nullptr);
- SetResolvedMethodTypes(nullptr);
- SetResolvedCallSites(nullptr);
- SetField32<false>(NumStringsOffset(), 0);
- SetField32<false>(NumResolvedTypesOffset(), 0);
- SetField32<false>(NumResolvedMethodsOffset(), 0);
- SetField32<false>(NumResolvedFieldsOffset(), 0);
- SetField32<false>(NumResolvedMethodTypesOffset(), 0);
- SetField32<false>(NumResolvedCallSitesOffset(), 0);
+bool DexCache::AddPreResolvedStringsArray() {
+ DCHECK_EQ(NumPreResolvedStrings(), 0u);
+ Thread* const self = Thread::Current();
+ LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
+ const size_t num_strings = GetDexFile()->NumStringIds();
+ if (num_strings != 0) {
+ GcRoot<mirror::String>* strings =
+ linear_alloc->AllocArray<GcRoot<mirror::String>>(self, num_strings);
+ if (strings == nullptr) {
+ // Failed to allocate pre-resolved string array (probably due to address fragmentation), bail.
+ return false;
+ }
+ SetField32<false>(NumPreResolvedStringsOffset(), num_strings);
+
+ CHECK(strings != nullptr);
+ SetPreResolvedStrings(strings);
+ for (size_t i = 0; i < GetDexFile()->NumStringIds(); ++i) {
+ CHECK(GetPreResolvedStrings()[i].Read() == nullptr);
+ }
+ }
+ return true;
+}
+
+void DexCache::SetNativeArrays(StringDexCacheType* strings,
+ uint32_t num_strings,
+ TypeDexCacheType* resolved_types,
+ uint32_t num_resolved_types,
+ MethodDexCacheType* resolved_methods,
+ uint32_t num_resolved_methods,
+ FieldDexCacheType* resolved_fields,
+ uint32_t num_resolved_fields,
+ MethodTypeDexCacheType* resolved_method_types,
+ uint32_t num_resolved_method_types,
+ GcRoot<CallSite>* resolved_call_sites,
+ uint32_t num_resolved_call_sites) {
+ CHECK_EQ(num_strings != 0u, strings != nullptr);
+ CHECK_EQ(num_resolved_types != 0u, resolved_types != nullptr);
+ CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr);
+ CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr);
+ CHECK_EQ(num_resolved_method_types != 0u, resolved_method_types != nullptr);
+ CHECK_EQ(num_resolved_call_sites != 0u, resolved_call_sites != nullptr);
+ SetStrings(strings);
+ SetResolvedTypes(resolved_types);
+ SetResolvedMethods(resolved_methods);
+ SetResolvedFields(resolved_fields);
+ SetResolvedMethodTypes(resolved_method_types);
+ SetResolvedCallSites(resolved_call_sites);
+ SetField32<false>(NumStringsOffset(), num_strings);
+ SetField32<false>(NumResolvedTypesOffset(), num_resolved_types);
+ SetField32<false>(NumResolvedMethodsOffset(), num_resolved_methods);
+ SetField32<false>(NumResolvedFieldsOffset(), num_resolved_fields);
+ SetField32<false>(NumResolvedMethodTypesOffset(), num_resolved_method_types);
+ SetField32<false>(NumResolvedCallSitesOffset(), num_resolved_call_sites);
}
void DexCache::SetLocation(ObjPtr<mirror::String> location) {
@@ -115,7 +248,7 @@
}
ObjPtr<ClassLoader> DexCache::GetClassLoader() {
- return GetFieldObject<ClassLoader>(OFFSET_OF_OBJECT_MEMBER(DexCache, class_loader_));
+ return GetFieldObject<mirror::ClassLoader>(OFFSET_OF_OBJECT_MEMBER(DexCache, class_loader_));
}
#if !defined(__aarch64__) && !defined(__x86_64__)
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 26fc520c..f02ddc6 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -18,7 +18,6 @@
#define ART_RUNTIME_MIRROR_DEX_CACHE_H_
#include "array.h"
-#include "base/array_ref.h"
#include "base/bit_utils.h"
#include "base/locks.h"
#include "dex/dex_file_types.h"
@@ -189,13 +188,29 @@
return sizeof(DexCache);
}
- void Initialize(const DexFile* dex_file, ObjPtr<ClassLoader> class_loader)
+ // Initialize native fields and allocate memory.
+ void InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::dex_lock_);
- // Zero all array references.
- // WARNING: This does not free the memory since it is in LinearAlloc.
- void ResetNativeArrays() REQUIRES_SHARED(Locks::mutator_lock_);
+ // Clear all native fields.
+ void ResetNativeFields() REQUIRES_SHARED(Locks::mutator_lock_);
+
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
+ void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor)
+ REQUIRES_SHARED(Locks::mutator_lock_);
ObjPtr<String> GetLocation() REQUIRES_SHARED(Locks::mutator_lock_);
@@ -265,6 +280,14 @@
void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE
REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetPreResolvedString(dex::StringIndex string_idx,
+ ObjPtr<mirror::String> resolved)
+ ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
+
+ // Clear the preresolved string cache to prevent further usage.
+ void ClearPreResolvedStrings()
+ ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_);
+
// Clear a string for a string_idx, used to undo string intern transactions to make sure
// the string isn't kept live.
void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -312,10 +335,21 @@
return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset());
}
+ template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+ GcRoot<mirror::String>* GetPreResolvedStrings() ALWAYS_INLINE
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ return GetFieldPtr64<GcRoot<mirror::String>*, kVerifyFlags>(PreResolvedStringsOffset());
+ }
+
void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
SetFieldPtr<false>(StringsOffset(), strings);
}
+ void SetPreResolvedStrings(GcRoot<mirror::String>* strings)
+ ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
+ SetFieldPtr<false>(PreResolvedStringsOffset(), strings);
+ }
+
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset());
@@ -436,17 +470,28 @@
uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_);
uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
+ // Returns true if we succeeded in adding the pre-resolved string array.
+ bool AddPreResolvedStringsArray() REQUIRES_SHARED(Locks::mutator_lock_);
+
void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_);
void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_);
-
ObjPtr<ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
private:
- // Allocate new array in linear alloc and save it in the given fields.
- template<typename T, size_t kMaxCacheSize>
- T* AllocArray(MemberOffset obj_offset, MemberOffset num_offset, size_t num)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ void SetNativeArrays(StringDexCacheType* strings,
+ uint32_t num_strings,
+ TypeDexCacheType* resolved_types,
+ uint32_t num_resolved_types,
+ MethodDexCacheType* resolved_methods,
+ uint32_t num_resolved_methods,
+ FieldDexCacheType* resolved_fields,
+ uint32_t num_resolved_fields,
+ MethodTypeDexCacheType* resolved_method_types,
+ uint32_t num_resolved_method_types,
+ GcRoot<CallSite>* resolved_call_sites,
+ uint32_t num_resolved_call_sites)
+ REQUIRES_SHARED(Locks::mutator_lock_);
// std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
// so we use a custom pair class for loading and storing the NativeDexCachePair<>.
diff --git a/runtime/mirror/dex_cache_test.cc b/runtime/mirror/dex_cache_test.cc
index b89b20d..0728bab 100644
--- a/runtime/mirror/dex_cache_test.cc
+++ b/runtime/mirror/dex_cache_test.cc
@@ -45,15 +45,21 @@
ASSERT_TRUE(java_lang_dex_file_ != nullptr);
Handle<DexCache> dex_cache(
hs.NewHandle(class_linker_->AllocAndInitializeDexCache(
- soa.Self(), *java_lang_dex_file_, /*class_loader=*/nullptr)));
+ soa.Self(),
+ *java_lang_dex_file_,
+ Runtime::Current()->GetLinearAlloc())));
ASSERT_TRUE(dex_cache != nullptr);
- // The cache is initially empty.
- EXPECT_EQ(0u, dex_cache->NumStrings());
- EXPECT_EQ(0u, dex_cache->NumResolvedTypes());
- EXPECT_EQ(0u, dex_cache->NumResolvedMethods());
- EXPECT_EQ(0u, dex_cache->NumResolvedFields());
- EXPECT_EQ(0u, dex_cache->NumResolvedMethodTypes());
+ EXPECT_TRUE(dex_cache->StaticStringSize() == dex_cache->NumStrings()
+ || java_lang_dex_file_->NumStringIds() == dex_cache->NumStrings());
+ EXPECT_TRUE(dex_cache->StaticTypeSize() == dex_cache->NumResolvedTypes()
+ || java_lang_dex_file_->NumTypeIds() == dex_cache->NumResolvedTypes());
+ EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
+ || java_lang_dex_file_->NumMethodIds() == dex_cache->NumResolvedMethods());
+ EXPECT_TRUE(dex_cache->StaticArtFieldSize() == dex_cache->NumResolvedFields()
+ || java_lang_dex_file_->NumFieldIds() == dex_cache->NumResolvedFields());
+ EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes()
+ || java_lang_dex_file_->NumProtoIds() == dex_cache->NumResolvedMethodTypes());
}
TEST_F(DexCacheMethodHandlesTest, Open) {
@@ -62,9 +68,26 @@
ASSERT_TRUE(java_lang_dex_file_ != nullptr);
Handle<DexCache> dex_cache(
hs.NewHandle(class_linker_->AllocAndInitializeDexCache(
- soa.Self(), *java_lang_dex_file_, /*class_loader=*/nullptr)));
+ soa.Self(),
+ *java_lang_dex_file_,
+ Runtime::Current()->GetLinearAlloc())));
- EXPECT_EQ(0u, dex_cache->NumResolvedMethodTypes());
+ EXPECT_TRUE(dex_cache->StaticMethodTypeSize() == dex_cache->NumResolvedMethodTypes()
+ || java_lang_dex_file_->NumProtoIds() == dex_cache->NumResolvedMethodTypes());
+}
+
+TEST_F(DexCacheTest, LinearAlloc) {
+ ScopedObjectAccess soa(Thread::Current());
+ jobject jclass_loader(LoadDex("Main"));
+ ASSERT_TRUE(jclass_loader != nullptr);
+ StackHandleScope<1> hs(soa.Self());
+ Handle<mirror::ClassLoader> class_loader(hs.NewHandle(
+ soa.Decode<mirror::ClassLoader>(jclass_loader)));
+ ObjPtr<mirror::Class> klass = class_linker_->FindClass(soa.Self(), "LMain;", class_loader);
+ ASSERT_TRUE(klass != nullptr);
+ LinearAlloc* const linear_alloc = klass->GetClassLoader()->GetAllocator();
+ EXPECT_NE(linear_alloc, runtime_->GetLinearAlloc());
+ EXPECT_TRUE(linear_alloc->Contains(klass->GetDexCache()->GetResolvedMethods()));
}
TEST_F(DexCacheTest, TestResolvedFieldAccess) {