Revert "Revert "Add one LinearAlloc per ClassLoader""
Issue was fixed by:
https://android-review.googlesource.com/#/c/171945/
Bug: 22720414
This reverts commit 7de5dfe37f3cf24e1166412b589f6f67dcd1f1c0.
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index bc8a9f4..6b9c8aa 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1318,9 +1318,8 @@
boot_class_table_.VisitRoots(buffered_visitor);
// TODO: Avoid marking these to enable class unloading.
JavaVMExt* const vm = Runtime::Current()->GetJavaVM();
- for (jweak weak_root : class_loaders_) {
- mirror::Object* class_loader =
- down_cast<mirror::ClassLoader*>(vm->DecodeWeakGlobal(self, weak_root));
+ for (const ClassLoaderData& data : class_loaders_) {
+ mirror::Object* class_loader = vm->DecodeWeakGlobal(self, data.weak_root);
// Don't need to update anything since the class loaders will be updated by SweepSystemWeaks.
visitor->VisitRootIfNonNull(&class_loader, RootInfo(kRootVMInternal));
}
@@ -1503,13 +1502,10 @@
STLDeleteElements(&oat_files_);
Thread* const self = Thread::Current();
JavaVMExt* const vm = Runtime::Current()->GetJavaVM();
- for (jweak weak_root : class_loaders_) {
- auto* const class_loader = down_cast<mirror::ClassLoader*>(
- vm->DecodeWeakGlobalDuringShutdown(self, weak_root));
- if (class_loader != nullptr) {
- delete class_loader->GetClassTable();
- }
- vm->DeleteWeakGlobalRef(self, weak_root);
+ for (const ClassLoaderData& data : class_loaders_) {
+ vm->DecodeWeakGlobalDuringShutdown(self, data.weak_root);
+ delete data.allocator;
+ delete data.class_table;
}
class_loaders_.clear();
}
@@ -2375,21 +2371,25 @@
}
}
-LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) {
+LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
+ LinearAlloc* allocator,
+ size_t length) {
if (length == 0) {
return nullptr;
}
// If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
- void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size);
+ void* array_storage = allocator->Alloc(self, storage_size);
auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
CHECK(ret != nullptr);
std::uninitialized_fill_n(&ret->At(0), length, ArtField());
return ret;
}
-LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) {
+LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
+ LinearAlloc* allocator,
+ size_t length) {
if (length == 0) {
return nullptr;
}
@@ -2397,7 +2397,7 @@
const size_t method_size = ArtMethod::Size(image_pointer_size_);
const size_t storage_size =
LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
- void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size);
+ void* array_storage = allocator->Alloc(self, storage_size);
auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
CHECK(ret != nullptr);
for (size_t i = 0; i < length; ++i) {
@@ -2406,6 +2406,15 @@
return ret;
}
+LinearAlloc* ClassLinker::GetAllocatorForClassLoader(mirror::ClassLoader* class_loader) {
+ if (class_loader == nullptr) {
+ return Runtime::Current()->GetLinearAlloc();
+ }
+ LinearAlloc* allocator = class_loader->GetAllocator();
+ DCHECK(allocator != nullptr);
+ return allocator;
+}
+
void ClassLinker::LoadClassMembers(Thread* self,
const DexFile& dex_file,
const uint8_t* class_data,
@@ -2418,8 +2427,11 @@
// Load static fields.
// We allow duplicate definitions of the same field in a class_data_item
// but ignore the repeated indexes here, b/21868015.
+ LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
ClassDataItemIterator it(dex_file, class_data);
- LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, it.NumStaticFields());
+ LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
+ allocator,
+ it.NumStaticFields());
size_t num_sfields = 0;
uint32_t last_field_idx = 0u;
for (; it.HasNextStaticField(); it.Next()) {
@@ -2435,7 +2447,9 @@
klass->SetSFieldsPtr(sfields);
DCHECK_EQ(klass->NumStaticFields(), num_sfields);
// Load instance fields.
- LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self, it.NumInstanceFields());
+ LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
+ allocator,
+ it.NumInstanceFields());
size_t num_ifields = 0u;
last_field_idx = 0u;
for (; it.HasNextInstanceField(); it.Next()) {
@@ -2458,8 +2472,8 @@
klass->SetIFieldsPtr(ifields);
DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
// Load methods.
- klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods()));
- klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods()));
+ klass->SetDirectMethodsPtr(AllocArtMethodArray(self, allocator, it.NumDirectMethods()));
+ klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, allocator, it.NumVirtualMethods()));
size_t class_def_method_index = 0;
uint32_t last_dex_method_index = DexFile::kDexNoIndex;
size_t last_class_def_method_index = 0;
@@ -3031,7 +3045,7 @@
WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
boot_class_table_.FreezeSnapshot();
MoveClassTableToPreZygoteVisitor visitor;
- VisitClassLoadersAndRemoveClearedLoaders(&visitor);
+ VisitClassLoaders(&visitor);
}
mirror::Class* ClassLinker::LookupClassFromImage(const char* descriptor) {
@@ -3414,9 +3428,12 @@
mirror::Class* existing = InsertClass(descriptor.c_str(), klass.Get(), hash);
CHECK(existing == nullptr);
+ // Needs to be after we insert the class so that the allocator field is set.
+ LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
+
// Instance fields are inherited, but we add a couple of static fields...
const size_t num_fields = 2;
- LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, num_fields);
+ LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
klass->SetSFieldsPtr(sfields);
// 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
@@ -3433,7 +3450,7 @@
throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
// Proxies have 1 direct method, the constructor
- LengthPrefixedArray<ArtMethod>* directs = AllocArtMethodArray(self, 1);
+ LengthPrefixedArray<ArtMethod>* directs = AllocArtMethodArray(self, allocator, 1);
// Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
// want to throw OOM in the future.
if (UNLIKELY(directs == nullptr)) {
@@ -3448,7 +3465,7 @@
DCHECK_EQ(h_methods->GetClass(), mirror::Method::ArrayClass())
<< PrettyClass(h_methods->GetClass());
const size_t num_virtual_methods = h_methods->GetLength();
- auto* virtuals = AllocArtMethodArray(self, num_virtual_methods);
+ auto* virtuals = AllocArtMethodArray(self, allocator, num_virtual_methods);
// Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
// want to throw OOM in the future.
if (UNLIKELY(virtuals == nullptr)) {
@@ -4166,9 +4183,14 @@
if (class_table == nullptr) {
class_table = new ClassTable;
Thread* const self = Thread::Current();
- class_loaders_.push_back(self->GetJniEnv()->vm->AddWeakGlobalRef(self, class_loader));
+ ClassLoaderData data;
+ data.weak_root = self->GetJniEnv()->vm->AddWeakGlobalRef(self, class_loader);
+ data.class_table = class_table;
+ data.allocator = Runtime::Current()->CreateLinearAlloc();
+ class_loaders_.push_back(data);
// Don't already have a class table, add it to the class loader.
- class_loader->SetClassTable(class_table);
+ class_loader->SetClassTable(data.class_table);
+ class_loader->SetAllocator(data.allocator);
}
return class_table;
}
@@ -6158,7 +6180,10 @@
ArtMethod* ClassLinker::CreateRuntimeMethod() {
const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
const size_t method_size = ArtMethod::Size(image_pointer_size_);
- LengthPrefixedArray<ArtMethod>* method_array = AllocArtMethodArray(Thread::Current(), 1);
+ LengthPrefixedArray<ArtMethod>* method_array = AllocArtMethodArray(
+ Thread::Current(),
+ Runtime::Current()->GetLinearAlloc(),
+ 1);
ArtMethod* method = &method_array->At(0, method_size, method_alignment);
CHECK(method != nullptr);
method->SetDexMethodIndex(DexFile::kDexNoIndex);
@@ -6171,33 +6196,34 @@
find_array_class_cache_next_victim_ = 0;
}
-void ClassLinker::VisitClassLoadersAndRemoveClearedLoaders(ClassLoaderVisitor* visitor) {
+void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
Thread* const self = Thread::Current();
- Locks::classlinker_classes_lock_->AssertExclusiveHeld(self);
JavaVMExt* const vm = self->GetJniEnv()->vm;
- for (auto it = class_loaders_.begin(); it != class_loaders_.end();) {
- const jweak weak_root = *it;
- mirror::ClassLoader* const class_loader = down_cast<mirror::ClassLoader*>(
- vm->DecodeWeakGlobal(self, weak_root));
+ for (const ClassLoaderData& data : class_loaders_) {
+ auto* const class_loader = down_cast<mirror::ClassLoader*>(
+ vm->DecodeWeakGlobal(self, data.weak_root));
if (class_loader != nullptr) {
visitor->Visit(class_loader);
- ++it;
- } else {
- // Remove the cleared weak reference from the array.
- vm->DeleteWeakGlobalRef(self, weak_root);
- it = class_loaders_.erase(it);
}
}
}
-void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
+void ClassLinker::CleanupClassLoaders() {
Thread* const self = Thread::Current();
- JavaVMExt* const vm = self->GetJniEnv()->vm;
- for (jweak weak_root : class_loaders_) {
- mirror::ClassLoader* const class_loader = down_cast<mirror::ClassLoader*>(
- vm->DecodeWeakGlobal(self, weak_root));
+ WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
+ JavaVMExt* const vm = Runtime::Current()->GetJavaVM();
+ for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
+ const ClassLoaderData& data = *it;
+ auto* const class_loader = down_cast<mirror::ClassLoader*>(
+ vm->DecodeWeakGlobal(self, data.weak_root));
if (class_loader != nullptr) {
- visitor->Visit(class_loader);
+ ++it;
+ } else {
+ // Weak reference was cleared, delete the data associated with this class loader.
+ delete data.class_table;
+ delete data.allocator;
+ vm->DeleteWeakGlobalRef(self, data.weak_root);
+ it = class_loaders_.erase(it);
}
}
}
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index fee7066..f705330 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -403,9 +403,13 @@
SHARED_REQUIRES(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
- LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self, size_t length);
+ LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self,
+ LinearAlloc* allocator,
+ size_t length);
- LengthPrefixedArray<ArtMethod>* AllocArtMethodArray(Thread* self, size_t length);
+ LengthPrefixedArray<ArtMethod>* AllocArtMethodArray(Thread* self,
+ LinearAlloc* allocator,
+ size_t length);
mirror::PointerArray* AllocPointerArray(Thread* self, size_t length)
SHARED_REQUIRES(Locks::mutator_lock_)
@@ -546,17 +550,24 @@
// entries are roots, but potentially not image classes.
void DropFindArrayClassCache() SHARED_REQUIRES(Locks::mutator_lock_);
- private:
- // The RemoveClearedLoaders version removes cleared weak global class loaders and frees their
- // class tables. This version can only be called with reader access to the
- // classlinker_classes_lock_ since it modifies the class_loaders_ list.
- void VisitClassLoadersAndRemoveClearedLoaders(ClassLoaderVisitor* visitor)
- REQUIRES(Locks::classlinker_classes_lock_)
+ // Clean up class loaders, this needs to happen after JNI weak globals are cleared.
+ void CleanupClassLoaders()
+ SHARED_REQUIRES(Locks::mutator_lock_)
+ REQUIRES(!Locks::classlinker_classes_lock_);
+
+ static LinearAlloc* GetAllocatorForClassLoader(mirror::ClassLoader* class_loader)
SHARED_REQUIRES(Locks::mutator_lock_);
+
+ private:
+ struct ClassLoaderData {
+ jobject weak_root; // Weak root to enable class unloading.
+ ClassTable* class_table;
+ LinearAlloc* allocator;
+ };
+
void VisitClassLoaders(ClassLoaderVisitor* visitor) const
SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
-
void VisitClassesInternal(ClassVisitor* visitor)
SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
@@ -826,8 +837,8 @@
std::vector<const OatFile*> oat_files_ GUARDED_BY(dex_lock_);
// This contains the class loaders which have class tables. It is populated by
- // InsertClassTableForClassLoader. Weak roots to enable class unloading.
- std::list<jweak> class_loaders_
+ // InsertClassTableForClassLoader.
+ std::list<ClassLoaderData> class_loaders_
GUARDED_BY(Locks::classlinker_classes_lock_);
// Boot class path table. Since the class loader for this is null.
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index b4ea3b3..0926ce3 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -550,6 +550,7 @@
struct ClassLoaderOffsets : public CheckOffsets<mirror::ClassLoader> {
ClassLoaderOffsets() : CheckOffsets<mirror::ClassLoader>(false, "Ljava/lang/ClassLoader;") {
+ addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, allocator_), "allocator");
addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, class_table_), "classTable");
addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, packages_), "packages");
addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, parent_), "parent");
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 399591b..468179c 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -457,6 +457,8 @@
CheckEmptyMarkStack();
// Re-enable weak ref accesses.
ReenableWeakRefAccess(self);
+ // Free data for class loaders that we unloaded.
+ Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
// Marking is done. Disable marking.
DisableMarking();
CheckEmptyMarkStack();
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 60f833b..f561764 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -205,6 +205,7 @@
ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
SweepSystemWeaks();
}
+ Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
// Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
// before they are properly counted.
RevokeAllThreadLocalBuffers();
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 089f453..2d1f312 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -283,11 +283,15 @@
void MarkSweep::ReclaimPhase() {
TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
- Thread* self = Thread::Current();
+ Thread* const self = Thread::Current();
// Process the references concurrently.
ProcessReferences(self);
SweepSystemWeaks(self);
- Runtime::Current()->AllowNewSystemWeaks();
+ Runtime* const runtime = Runtime::Current();
+ runtime->AllowNewSystemWeaks();
+ // Clean up class loaders after system weaks are swept since that is how we know if class
+ // unloading occurred.
+ runtime->GetClassLinker()->CleanupClassLoaders();
{
WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
GetHeap()->RecordFreeRevoke();
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index ed63ed0..7f57f30 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -248,6 +248,7 @@
ReaderMutexLock mu(self_, *Locks::heap_bitmap_lock_);
SweepSystemWeaks();
}
+ Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
// Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
// before they are properly counted.
RevokeAllThreadLocalBuffers();
diff --git a/runtime/jit/jit_code_cache_test.cc b/runtime/jit/jit_code_cache_test.cc
index a6cbb71..c76dc11 100644
--- a/runtime/jit/jit_code_cache_test.cc
+++ b/runtime/jit/jit_code_cache_test.cc
@@ -49,8 +49,11 @@
ASSERT_TRUE(reserved_code != nullptr);
ASSERT_TRUE(code_cache->ContainsCodePtr(reserved_code));
ASSERT_EQ(code_cache->NumMethods(), 1u);
- ClassLinker* const cl = Runtime::Current()->GetClassLinker();
- ArtMethod* method = &cl->AllocArtMethodArray(soa.Self(), 1)->At(0);
+ Runtime* const runtime = Runtime::Current();
+ ClassLinker* const class_linker = runtime->GetClassLinker();
+ ArtMethod* method = &class_linker->AllocArtMethodArray(soa.Self(),
+ runtime->GetLinearAlloc(),
+ 1)->At(0);
ASSERT_FALSE(code_cache->ContainsMethod(method));
method->SetEntryPointFromQuickCompiledCode(reserved_code);
ASSERT_TRUE(code_cache->ContainsMethod(method));
diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h
index f27b615..c2a65d6 100644
--- a/runtime/mirror/class_loader.h
+++ b/runtime/mirror/class_loader.h
@@ -35,18 +35,31 @@
static constexpr uint32_t InstanceSize() {
return sizeof(ClassLoader);
}
+
ClassLoader* GetParent() SHARED_REQUIRES(Locks::mutator_lock_) {
return GetFieldObject<ClassLoader>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, parent_));
}
+
ClassTable* GetClassTable() SHARED_REQUIRES(Locks::mutator_lock_) {
return reinterpret_cast<ClassTable*>(
GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, class_table_)));
}
+
void SetClassTable(ClassTable* class_table) SHARED_REQUIRES(Locks::mutator_lock_) {
SetField64<false>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, class_table_),
reinterpret_cast<uint64_t>(class_table));
}
+ LinearAlloc* GetAllocator() SHARED_REQUIRES(Locks::mutator_lock_) {
+ return reinterpret_cast<LinearAlloc*>(
+ GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_)));
+ }
+
+ void SetAllocator(LinearAlloc* allocator) SHARED_REQUIRES(Locks::mutator_lock_) {
+ SetField64<false>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_),
+ reinterpret_cast<uint64_t>(allocator));
+ }
+
private:
// Visit instance fields of the class loader as well as its associated classes.
// Null class loader is handled by ClassLinker::VisitClassRoots.
@@ -61,6 +74,7 @@
HeapReference<Object> proxyCache_;
// Native pointer to class table, need to zero this out when image writing.
uint32_t padding_ ATTRIBUTE_UNUSED;
+ uint64_t allocator_;
uint64_t class_table_;
friend struct art::ClassLoaderOffsets; // for verifying offset information
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index fba9d37..fe97394 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -947,10 +947,8 @@
if (IsCompiler() && Is64BitInstructionSet(kRuntimeISA)) {
// 4gb, no malloc. Explanation in header.
low_4gb_arena_pool_.reset(new ArenaPool(false, true));
- linear_alloc_.reset(new LinearAlloc(low_4gb_arena_pool_.get()));
- } else {
- linear_alloc_.reset(new LinearAlloc(arena_pool_.get()));
}
+ linear_alloc_.reset(CreateLinearAlloc());
BlockSignals();
InitPlatformSignalHandlers();
@@ -1791,4 +1789,10 @@
return verify_ == verifier::VerifyMode::kSoftFail;
}
+LinearAlloc* Runtime::CreateLinearAlloc() {
+ return (IsCompiler() && Is64BitInstructionSet(kRuntimeISA))
+ ? new LinearAlloc(low_4gb_arena_pool_.get())
+ : new LinearAlloc(arena_pool_.get());
+}
+
} // namespace art
diff --git a/runtime/runtime.h b/runtime/runtime.h
index a35eac1..6154c34 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -570,6 +570,9 @@
// Called from class linker.
void SetSentinel(mirror::Object* sentinel) SHARED_REQUIRES(Locks::mutator_lock_);
+ // Create a normal LinearAlloc or low 4gb version if we are 64 bit AOT compiler.
+ LinearAlloc* CreateLinearAlloc();
+
private:
static void InitPlatformSignalHandlers();
diff --git a/runtime/stack.cc b/runtime/stack.cc
index d739743..7f72f8a 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -840,23 +840,30 @@
} else {
CHECK(declaring_class == nullptr);
}
- auto* runtime = Runtime::Current();
- auto* la = runtime->GetLinearAlloc();
- if (!la->Contains(method)) {
- // Check image space.
- bool in_image = false;
- for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) {
- if (space->IsImageSpace()) {
- auto* image_space = space->AsImageSpace();
- const auto& header = image_space->GetImageHeader();
- const auto* methods = &header.GetMethodsSection();
- if (methods->Contains(reinterpret_cast<const uint8_t*>(method) - image_space->Begin())) {
- in_image = true;
- break;
+ Runtime* const runtime = Runtime::Current();
+ LinearAlloc* const linear_alloc = runtime->GetLinearAlloc();
+ if (!linear_alloc->Contains(method)) {
+ // Check class linker linear allocs.
+ mirror::Class* klass = method->GetDeclaringClass();
+ LinearAlloc* const class_linear_alloc = (klass != nullptr)
+ ? ClassLinker::GetAllocatorForClassLoader(klass->GetClassLoader())
+ : linear_alloc;
+ if (!class_linear_alloc->Contains(method)) {
+ // Check image space.
+ bool in_image = false;
+ for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) {
+ if (space->IsImageSpace()) {
+ auto* image_space = space->AsImageSpace();
+ const auto& header = image_space->GetImageHeader();
+ const auto* methods = &header.GetMethodsSection();
+ if (methods->Contains(reinterpret_cast<const uint8_t*>(method) - image_space->Begin())) {
+ in_image = true;
+ break;
+ }
}
}
+ CHECK(in_image) << PrettyMethod(method) << " not in linear alloc or image";
}
- CHECK(in_image) << PrettyMethod(method) << " not in linear alloc or image";
}
if (cur_quick_frame_ != nullptr) {
method->AssertPcIsWithinQuickCode(cur_quick_frame_pc_);