diff options
-rw-r--r-- | runtime/class_linker.cc | 110 | ||||
-rw-r--r-- | runtime/class_linker.h | 33 | ||||
-rw-r--r-- | runtime/class_linker_test.cc | 1 | ||||
-rw-r--r-- | runtime/gc/collector/concurrent_copying.cc | 2 | ||||
-rw-r--r-- | runtime/gc/collector/mark_compact.cc | 1 | ||||
-rw-r--r-- | runtime/gc/collector/mark_sweep.cc | 8 | ||||
-rw-r--r-- | runtime/gc/collector/semi_space.cc | 1 | ||||
-rw-r--r-- | runtime/jit/jit_code_cache_test.cc | 7 | ||||
-rw-r--r-- | runtime/mirror/class_loader.h | 14 | ||||
-rw-r--r-- | runtime/runtime.cc | 10 | ||||
-rw-r--r-- | runtime/runtime.h | 3 | ||||
-rw-r--r-- | runtime/stack.cc | 35 |
12 files changed, 74 insertions, 151 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index 6b9c8aa353..bc8a9f4936 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1318,8 +1318,9 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { boot_class_table_.VisitRoots(buffered_visitor); // TODO: Avoid marking these to enable class unloading. JavaVMExt* const vm = Runtime::Current()->GetJavaVM(); - for (const ClassLoaderData& data : class_loaders_) { - mirror::Object* class_loader = vm->DecodeWeakGlobal(self, data.weak_root); + for (jweak weak_root : class_loaders_) { + mirror::Object* class_loader = + down_cast<mirror::ClassLoader*>(vm->DecodeWeakGlobal(self, weak_root)); // Don't need to update anything since the class loaders will be updated by SweepSystemWeaks. visitor->VisitRootIfNonNull(&class_loader, RootInfo(kRootVMInternal)); } @@ -1502,10 +1503,13 @@ ClassLinker::~ClassLinker() { STLDeleteElements(&oat_files_); Thread* const self = Thread::Current(); JavaVMExt* const vm = Runtime::Current()->GetJavaVM(); - for (const ClassLoaderData& data : class_loaders_) { - vm->DecodeWeakGlobalDuringShutdown(self, data.weak_root); - delete data.allocator; - delete data.class_table; + for (jweak weak_root : class_loaders_) { + auto* const class_loader = down_cast<mirror::ClassLoader*>( + vm->DecodeWeakGlobalDuringShutdown(self, weak_root)); + if (class_loader != nullptr) { + delete class_loader->GetClassTable(); + } + vm->DeleteWeakGlobalRef(self, weak_root); } class_loaders_.clear(); } @@ -2371,25 +2375,21 @@ void ClassLinker::LoadClass(Thread* self, } } -LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self, - LinearAlloc* allocator, - size_t length) { +LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self, size_t length) { if (length == 0) { return nullptr; } // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>. static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4."); size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length); - void* array_storage = allocator->Alloc(self, storage_size); + void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size); auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length); CHECK(ret != nullptr); std::uninitialized_fill_n(&ret->At(0), length, ArtField()); return ret; } -LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, - LinearAlloc* allocator, - size_t length) { +LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, size_t length) { if (length == 0) { return nullptr; } @@ -2397,7 +2397,7 @@ LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, const size_t method_size = ArtMethod::Size(image_pointer_size_); const size_t storage_size = LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment); - void* array_storage = allocator->Alloc(self, storage_size); + void* array_storage = Runtime::Current()->GetLinearAlloc()->Alloc(self, storage_size); auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length); CHECK(ret != nullptr); for (size_t i = 0; i < length; ++i) { @@ -2406,15 +2406,6 @@ LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self, return ret; } -LinearAlloc* ClassLinker::GetAllocatorForClassLoader(mirror::ClassLoader* class_loader) { - if (class_loader == nullptr) { - return Runtime::Current()->GetLinearAlloc(); - } - LinearAlloc* allocator = class_loader->GetAllocator(); - DCHECK(allocator != nullptr); - return allocator; -} - void ClassLinker::LoadClassMembers(Thread* self, const DexFile& dex_file, const uint8_t* class_data, @@ -2427,11 +2418,8 @@ void ClassLinker::LoadClassMembers(Thread* self, // Load static fields. // We allow duplicate definitions of the same field in a class_data_item // but ignore the repeated indexes here, b/21868015. - LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); ClassDataItemIterator it(dex_file, class_data); - LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, - allocator, - it.NumStaticFields()); + LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, it.NumStaticFields()); size_t num_sfields = 0; uint32_t last_field_idx = 0u; for (; it.HasNextStaticField(); it.Next()) { @@ -2447,9 +2435,7 @@ void ClassLinker::LoadClassMembers(Thread* self, klass->SetSFieldsPtr(sfields); DCHECK_EQ(klass->NumStaticFields(), num_sfields); // Load instance fields. - LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self, - allocator, - it.NumInstanceFields()); + LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self, it.NumInstanceFields()); size_t num_ifields = 0u; last_field_idx = 0u; for (; it.HasNextInstanceField(); it.Next()) { @@ -2472,8 +2458,8 @@ void ClassLinker::LoadClassMembers(Thread* self, klass->SetIFieldsPtr(ifields); DCHECK_EQ(klass->NumInstanceFields(), num_ifields); // Load methods. - klass->SetDirectMethodsPtr(AllocArtMethodArray(self, allocator, it.NumDirectMethods())); - klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, allocator, it.NumVirtualMethods())); + klass->SetDirectMethodsPtr(AllocArtMethodArray(self, it.NumDirectMethods())); + klass->SetVirtualMethodsPtr(AllocArtMethodArray(self, it.NumVirtualMethods())); size_t class_def_method_index = 0; uint32_t last_dex_method_index = DexFile::kDexNoIndex; size_t last_class_def_method_index = 0; @@ -3045,7 +3031,7 @@ void ClassLinker::MoveClassTableToPreZygote() { WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); boot_class_table_.FreezeSnapshot(); MoveClassTableToPreZygoteVisitor visitor; - VisitClassLoaders(&visitor); + VisitClassLoadersAndRemoveClearedLoaders(&visitor); } mirror::Class* ClassLinker::LookupClassFromImage(const char* descriptor) { @@ -3428,12 +3414,9 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& mirror::Class* existing = InsertClass(descriptor.c_str(), klass.Get(), hash); CHECK(existing == nullptr); - // Needs to be after we insert the class so that the allocator field is set. - LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); - // Instance fields are inherited, but we add a couple of static fields... const size_t num_fields = 2; - LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields); + LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, num_fields); klass->SetSFieldsPtr(sfields); // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by @@ -3450,7 +3433,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal); // Proxies have 1 direct method, the constructor - LengthPrefixedArray<ArtMethod>* directs = AllocArtMethodArray(self, allocator, 1); + LengthPrefixedArray<ArtMethod>* directs = AllocArtMethodArray(self, 1); // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we // want to throw OOM in the future. if (UNLIKELY(directs == nullptr)) { @@ -3465,7 +3448,7 @@ mirror::Class* ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& DCHECK_EQ(h_methods->GetClass(), mirror::Method::ArrayClass()) << PrettyClass(h_methods->GetClass()); const size_t num_virtual_methods = h_methods->GetLength(); - auto* virtuals = AllocArtMethodArray(self, allocator, num_virtual_methods); + auto* virtuals = AllocArtMethodArray(self, num_virtual_methods); // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we // want to throw OOM in the future. if (UNLIKELY(virtuals == nullptr)) { @@ -4183,14 +4166,9 @@ ClassTable* ClassLinker::InsertClassTableForClassLoader(mirror::ClassLoader* cla if (class_table == nullptr) { class_table = new ClassTable; Thread* const self = Thread::Current(); - ClassLoaderData data; - data.weak_root = self->GetJniEnv()->vm->AddWeakGlobalRef(self, class_loader); - data.class_table = class_table; - data.allocator = Runtime::Current()->CreateLinearAlloc(); - class_loaders_.push_back(data); + class_loaders_.push_back(self->GetJniEnv()->vm->AddWeakGlobalRef(self, class_loader)); // Don't already have a class table, add it to the class loader. - class_loader->SetClassTable(data.class_table); - class_loader->SetAllocator(data.allocator); + class_loader->SetClassTable(class_table); } return class_table; } @@ -6180,10 +6158,7 @@ jobject ClassLinker::CreatePathClassLoader(Thread* self, std::vector<const DexFi ArtMethod* ClassLinker::CreateRuntimeMethod() { const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_); const size_t method_size = ArtMethod::Size(image_pointer_size_); - LengthPrefixedArray<ArtMethod>* method_array = AllocArtMethodArray( - Thread::Current(), - Runtime::Current()->GetLinearAlloc(), - 1); + LengthPrefixedArray<ArtMethod>* method_array = AllocArtMethodArray(Thread::Current(), 1); ArtMethod* method = &method_array->At(0, method_size, method_alignment); CHECK(method != nullptr); method->SetDexMethodIndex(DexFile::kDexNoIndex); @@ -6196,34 +6171,33 @@ void ClassLinker::DropFindArrayClassCache() { find_array_class_cache_next_victim_ = 0; } -void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const { +void ClassLinker::VisitClassLoadersAndRemoveClearedLoaders(ClassLoaderVisitor* visitor) { Thread* const self = Thread::Current(); + Locks::classlinker_classes_lock_->AssertExclusiveHeld(self); JavaVMExt* const vm = self->GetJniEnv()->vm; - for (const ClassLoaderData& data : class_loaders_) { - auto* const class_loader = down_cast<mirror::ClassLoader*>( - vm->DecodeWeakGlobal(self, data.weak_root)); + for (auto it = class_loaders_.begin(); it != class_loaders_.end();) { + const jweak weak_root = *it; + mirror::ClassLoader* const class_loader = down_cast<mirror::ClassLoader*>( + vm->DecodeWeakGlobal(self, weak_root)); if (class_loader != nullptr) { visitor->Visit(class_loader); + ++it; + } else { + // Remove the cleared weak reference from the array. + vm->DeleteWeakGlobalRef(self, weak_root); + it = class_loaders_.erase(it); } } } -void ClassLinker::CleanupClassLoaders() { +void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const { Thread* const self = Thread::Current(); - WriterMutexLock mu(self, *Locks::classlinker_classes_lock_); - JavaVMExt* const vm = Runtime::Current()->GetJavaVM(); - for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) { - const ClassLoaderData& data = *it; - auto* const class_loader = down_cast<mirror::ClassLoader*>( - vm->DecodeWeakGlobal(self, data.weak_root)); + JavaVMExt* const vm = self->GetJniEnv()->vm; + for (jweak weak_root : class_loaders_) { + mirror::ClassLoader* const class_loader = down_cast<mirror::ClassLoader*>( + vm->DecodeWeakGlobal(self, weak_root)); if (class_loader != nullptr) { - ++it; - } else { - // Weak reference was cleared, delete the data associated with this class loader. - delete data.class_table; - delete data.allocator; - vm->DeleteWeakGlobalRef(self, data.weak_root); - it = class_loaders_.erase(it); + visitor->Visit(class_loader); } } } diff --git a/runtime/class_linker.h b/runtime/class_linker.h index f705330b14..fee706625b 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -403,13 +403,9 @@ class ClassLinker { SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_); - LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self, - LinearAlloc* allocator, - size_t length); + LengthPrefixedArray<ArtField>* AllocArtFieldArray(Thread* self, size_t length); - LengthPrefixedArray<ArtMethod>* AllocArtMethodArray(Thread* self, - LinearAlloc* allocator, - size_t length); + LengthPrefixedArray<ArtMethod>* AllocArtMethodArray(Thread* self, size_t length); mirror::PointerArray* AllocPointerArray(Thread* self, size_t length) SHARED_REQUIRES(Locks::mutator_lock_) @@ -550,24 +546,17 @@ class ClassLinker { // entries are roots, but potentially not image classes. void DropFindArrayClassCache() SHARED_REQUIRES(Locks::mutator_lock_); - // Clean up class loaders, this needs to happen after JNI weak globals are cleared. - void CleanupClassLoaders() - SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(!Locks::classlinker_classes_lock_); - - static LinearAlloc* GetAllocatorForClassLoader(mirror::ClassLoader* class_loader) - SHARED_REQUIRES(Locks::mutator_lock_); - private: - struct ClassLoaderData { - jobject weak_root; // Weak root to enable class unloading. - ClassTable* class_table; - LinearAlloc* allocator; - }; - + // The RemoveClearedLoaders version removes cleared weak global class loaders and frees their + // class tables. This version can only be called with reader access to the + // classlinker_classes_lock_ since it modifies the class_loaders_ list. + void VisitClassLoadersAndRemoveClearedLoaders(ClassLoaderVisitor* visitor) + REQUIRES(Locks::classlinker_classes_lock_) + SHARED_REQUIRES(Locks::mutator_lock_); void VisitClassLoaders(ClassLoaderVisitor* visitor) const SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); + void VisitClassesInternal(ClassVisitor* visitor) SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_); @@ -837,8 +826,8 @@ class ClassLinker { std::vector<const OatFile*> oat_files_ GUARDED_BY(dex_lock_); // This contains the class loaders which have class tables. It is populated by - // InsertClassTableForClassLoader. - std::list<ClassLoaderData> class_loaders_ + // InsertClassTableForClassLoader. Weak roots to enable class unloading. + std::list<jweak> class_loaders_ GUARDED_BY(Locks::classlinker_classes_lock_); // Boot class path table. Since the class loader for this is null. diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index 0926ce3f6a..b4ea3b3460 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -550,7 +550,6 @@ struct StackTraceElementOffsets : public CheckOffsets<mirror::StackTraceElement> struct ClassLoaderOffsets : public CheckOffsets<mirror::ClassLoader> { ClassLoaderOffsets() : CheckOffsets<mirror::ClassLoader>(false, "Ljava/lang/ClassLoader;") { - addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, allocator_), "allocator"); addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, class_table_), "classTable"); addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, packages_), "packages"); addOffset(OFFSETOF_MEMBER(mirror::ClassLoader, parent_), "parent"); diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index 468179c9d5..399591b93d 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -457,8 +457,6 @@ void ConcurrentCopying::MarkingPhase() { CheckEmptyMarkStack(); // Re-enable weak ref accesses. ReenableWeakRefAccess(self); - // Free data for class loaders that we unloaded. - Runtime::Current()->GetClassLinker()->CleanupClassLoaders(); // Marking is done. Disable marking. DisableMarking(); CheckEmptyMarkStack(); diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc index f561764ce4..60f833b349 100644 --- a/runtime/gc/collector/mark_compact.cc +++ b/runtime/gc/collector/mark_compact.cc @@ -205,7 +205,6 @@ void MarkCompact::MarkingPhase() { ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_); SweepSystemWeaks(); } - Runtime::Current()->GetClassLinker()->CleanupClassLoaders(); // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked // before they are properly counted. RevokeAllThreadLocalBuffers(); diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 2d1f3120bf..089f453888 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -283,15 +283,11 @@ void MarkSweep::MarkReachableObjects() { void MarkSweep::ReclaimPhase() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); - Thread* const self = Thread::Current(); + Thread* self = Thread::Current(); // Process the references concurrently. ProcessReferences(self); SweepSystemWeaks(self); - Runtime* const runtime = Runtime::Current(); - runtime->AllowNewSystemWeaks(); - // Clean up class loaders after system weaks are swept since that is how we know if class - // unloading occurred. - runtime->GetClassLinker()->CleanupClassLoaders(); + Runtime::Current()->AllowNewSystemWeaks(); { WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); GetHeap()->RecordFreeRevoke(); diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index 7f57f30b27..ed63ed049f 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -248,7 +248,6 @@ void SemiSpace::MarkingPhase() { ReaderMutexLock mu(self_, *Locks::heap_bitmap_lock_); SweepSystemWeaks(); } - Runtime::Current()->GetClassLinker()->CleanupClassLoaders(); // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked // before they are properly counted. RevokeAllThreadLocalBuffers(); diff --git a/runtime/jit/jit_code_cache_test.cc b/runtime/jit/jit_code_cache_test.cc index c76dc1110a..a6cbb710af 100644 --- a/runtime/jit/jit_code_cache_test.cc +++ b/runtime/jit/jit_code_cache_test.cc @@ -49,11 +49,8 @@ TEST_F(JitCodeCacheTest, TestCoverage) { ASSERT_TRUE(reserved_code != nullptr); ASSERT_TRUE(code_cache->ContainsCodePtr(reserved_code)); ASSERT_EQ(code_cache->NumMethods(), 1u); - Runtime* const runtime = Runtime::Current(); - ClassLinker* const class_linker = runtime->GetClassLinker(); - ArtMethod* method = &class_linker->AllocArtMethodArray(soa.Self(), - runtime->GetLinearAlloc(), - 1)->At(0); + ClassLinker* const cl = Runtime::Current()->GetClassLinker(); + ArtMethod* method = &cl->AllocArtMethodArray(soa.Self(), 1)->At(0); ASSERT_FALSE(code_cache->ContainsMethod(method)); method->SetEntryPointFromQuickCompiledCode(reserved_code); ASSERT_TRUE(code_cache->ContainsMethod(method)); diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h index c2a65d62e2..f27b6155ce 100644 --- a/runtime/mirror/class_loader.h +++ b/runtime/mirror/class_loader.h @@ -35,31 +35,18 @@ class MANAGED ClassLoader : public Object { static constexpr uint32_t InstanceSize() { return sizeof(ClassLoader); } - ClassLoader* GetParent() SHARED_REQUIRES(Locks::mutator_lock_) { return GetFieldObject<ClassLoader>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, parent_)); } - ClassTable* GetClassTable() SHARED_REQUIRES(Locks::mutator_lock_) { return reinterpret_cast<ClassTable*>( GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, class_table_))); } - void SetClassTable(ClassTable* class_table) SHARED_REQUIRES(Locks::mutator_lock_) { SetField64<false>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, class_table_), reinterpret_cast<uint64_t>(class_table)); } - LinearAlloc* GetAllocator() SHARED_REQUIRES(Locks::mutator_lock_) { - return reinterpret_cast<LinearAlloc*>( - GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_))); - } - - void SetAllocator(LinearAlloc* allocator) SHARED_REQUIRES(Locks::mutator_lock_) { - SetField64<false>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_), - reinterpret_cast<uint64_t>(allocator)); - } - private: // Visit instance fields of the class loader as well as its associated classes. // Null class loader is handled by ClassLinker::VisitClassRoots. @@ -74,7 +61,6 @@ class MANAGED ClassLoader : public Object { HeapReference<Object> proxyCache_; // Native pointer to class table, need to zero this out when image writing. uint32_t padding_ ATTRIBUTE_UNUSED; - uint64_t allocator_; uint64_t class_table_; friend struct art::ClassLoaderOffsets; // for verifying offset information diff --git a/runtime/runtime.cc b/runtime/runtime.cc index ccfc4bcaaf..6b144cf48b 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -944,8 +944,10 @@ bool Runtime::Init(const RuntimeOptions& raw_options, bool ignore_unrecognized) if (IsCompiler() && Is64BitInstructionSet(kRuntimeISA)) { // 4gb, no malloc. Explanation in header. low_4gb_arena_pool_.reset(new ArenaPool(false, true)); + linear_alloc_.reset(new LinearAlloc(low_4gb_arena_pool_.get())); + } else { + linear_alloc_.reset(new LinearAlloc(arena_pool_.get())); } - linear_alloc_.reset(CreateLinearAlloc()); BlockSignals(); InitPlatformSignalHandlers(); @@ -1786,10 +1788,4 @@ bool Runtime::IsVerificationSoftFail() const { return verify_ == verifier::VerifyMode::kSoftFail; } -LinearAlloc* Runtime::CreateLinearAlloc() { - return (IsCompiler() && Is64BitInstructionSet(kRuntimeISA)) - ? new LinearAlloc(low_4gb_arena_pool_.get()) - : new LinearAlloc(arena_pool_.get()); -} - } // namespace art diff --git a/runtime/runtime.h b/runtime/runtime.h index 6154c34ec5..a35eac1af8 100644 --- a/runtime/runtime.h +++ b/runtime/runtime.h @@ -570,9 +570,6 @@ class Runtime { // Called from class linker. void SetSentinel(mirror::Object* sentinel) SHARED_REQUIRES(Locks::mutator_lock_); - // Create a normal LinearAlloc or low 4gb version if we are 64 bit AOT compiler. - LinearAlloc* CreateLinearAlloc(); - private: static void InitPlatformSignalHandlers(); diff --git a/runtime/stack.cc b/runtime/stack.cc index 7f72f8ab61..d739743151 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -840,30 +840,23 @@ void StackVisitor::SanityCheckFrame() const { } else { CHECK(declaring_class == nullptr); } - Runtime* const runtime = Runtime::Current(); - LinearAlloc* const linear_alloc = runtime->GetLinearAlloc(); - if (!linear_alloc->Contains(method)) { - // Check class linker linear allocs. - mirror::Class* klass = method->GetDeclaringClass(); - LinearAlloc* const class_linear_alloc = (klass != nullptr) - ? ClassLinker::GetAllocatorForClassLoader(klass->GetClassLoader()) - : linear_alloc; - if (!class_linear_alloc->Contains(method)) { - // Check image space. - bool in_image = false; - for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) { - if (space->IsImageSpace()) { - auto* image_space = space->AsImageSpace(); - const auto& header = image_space->GetImageHeader(); - const auto* methods = &header.GetMethodsSection(); - if (methods->Contains(reinterpret_cast<const uint8_t*>(method) - image_space->Begin())) { - in_image = true; - break; - } + auto* runtime = Runtime::Current(); + auto* la = runtime->GetLinearAlloc(); + if (!la->Contains(method)) { + // Check image space. + bool in_image = false; + for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) { + if (space->IsImageSpace()) { + auto* image_space = space->AsImageSpace(); + const auto& header = image_space->GetImageHeader(); + const auto* methods = &header.GetMethodsSection(); + if (methods->Contains(reinterpret_cast<const uint8_t*>(method) - image_space->Begin())) { + in_image = true; + break; } } - CHECK(in_image) << PrettyMethod(method) << " not in linear alloc or image"; } + CHECK(in_image) << PrettyMethod(method) << " not in linear alloc or image"; } if (cur_quick_frame_ != nullptr) { method->AssertPcIsWithinQuickCode(cur_quick_frame_pc_); |