diff options
author | 2021-02-25 15:41:33 +0000 | |
---|---|---|
committer | 2021-04-28 08:11:45 +0000 | |
commit | 1dab575cd635706f03d510763cb4ea1b115a2cee (patch) | |
tree | cc885f34c0c4c139e9095ee840e3936662243581 | |
parent | 854725b10087bfe86b5a794093e88786ff7b9f28 (diff) |
Remove some dead code around ClassTable.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 175869411
Change-Id: Ifb3ac102129470aff6eb228e5523b240c113818f
-rw-r--r-- | runtime/class_linker.cc | 5 | ||||
-rw-r--r-- | runtime/class_linker.h | 5 | ||||
-rw-r--r-- | runtime/class_table-inl.h | 7 | ||||
-rw-r--r-- | runtime/class_table.cc | 33 | ||||
-rw-r--r-- | runtime/class_table.h | 12 | ||||
-rw-r--r-- | runtime/class_table_test.cc | 9 |
6 files changed, 12 insertions, 59 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index de2fc0df69..82b644d8e7 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -2093,11 +2093,6 @@ bool ClassLinker::AddImageSpace( return true; } -bool ClassLinker::ClassInClassTable(ObjPtr<mirror::Class> klass) { - ClassTable* const class_table = ClassTableForClassLoader(klass->GetClassLoader()); - return class_table != nullptr && class_table->Contains(klass); -} - void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since // enabling tracing requires the mutator lock, there are no race conditions here. diff --git a/runtime/class_linker.h b/runtime/class_linker.h index d6e34ffed1..a1b0b29d26 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -688,11 +688,6 @@ class ClassLinker { return image_pointer_size_; } - // Used by image writer for checking. - bool ClassInClassTable(ObjPtr<mirror::Class> klass) - REQUIRES(Locks::classlinker_classes_lock_) - REQUIRES_SHARED(Locks::mutator_lock_); - // Clear the ArrayClass cache. This is necessary when cleaning up for the image, as the cache // entries are roots, but potentially not image classes. void DropFindArrayClassCache() REQUIRES_SHARED(Locks::mutator_lock_); diff --git a/runtime/class_table-inl.h b/runtime/class_table-inl.h index 3645b647c4..8e44ee3620 100644 --- a/runtime/class_table-inl.h +++ b/runtime/class_table-inl.h @@ -181,6 +181,13 @@ inline void ClassTable::RemoveStrongRoots(const Filter& filter) { strong_roots_.end()); } +inline ObjPtr<mirror::Class> ClassTable::LookupByDescriptor(ObjPtr<mirror::Class> klass) { + std::string temp; + const char* descriptor = klass->GetDescriptor(&temp); + uint32_t hash = TableSlot::HashDescriptor(klass); + return Lookup(descriptor, hash); +} + } // namespace art #endif // ART_RUNTIME_CLASS_TABLE_INL_H_ diff --git a/runtime/class_table.cc b/runtime/class_table.cc index 288e312bb1..03921a132a 100644 --- a/runtime/class_table.cc +++ b/runtime/class_table.cc @@ -33,22 +33,6 @@ void ClassTable::FreezeSnapshot() { classes_.push_back(ClassSet()); } -bool ClassTable::Contains(ObjPtr<mirror::Class> klass) { - return LookupByDescriptor(klass) == klass; -} - -ObjPtr<mirror::Class> ClassTable::LookupByDescriptor(ObjPtr<mirror::Class> klass) { - ReaderMutexLock mu(Thread::Current(), lock_); - TableSlot slot(klass); - for (ClassSet& class_set : classes_) { - auto it = class_set.find(slot); - if (it != class_set.end()) { - return it->Read(); - } - } - return nullptr; -} - ObjPtr<mirror::Class> ClassTable::UpdateClass(const char* descriptor, ObjPtr<mirror::Class> klass, size_t hash) { @@ -127,23 +111,8 @@ ObjPtr<mirror::Class> ClassTable::Lookup(const char* descriptor, size_t hash) { return nullptr; } -ObjPtr<mirror::Class> ClassTable::TryInsert(ObjPtr<mirror::Class> klass) { - TableSlot slot(klass); - WriterMutexLock mu(Thread::Current(), lock_); - for (ClassSet& class_set : classes_) { - auto it = class_set.find(slot); - if (it != class_set.end()) { - return it->Read(); - } - } - classes_.back().insert(slot); - return klass; -} - void ClassTable::Insert(ObjPtr<mirror::Class> klass) { - const uint32_t hash = TableSlot::HashDescriptor(klass); - WriterMutexLock mu(Thread::Current(), lock_); - classes_.back().InsertWithHash(TableSlot(klass, hash), hash); + InsertWithHash(klass, TableSlot::HashDescriptor(klass)); } void ClassTable::InsertWithHash(ObjPtr<mirror::Class> klass, size_t hash) { diff --git a/runtime/class_table.h b/runtime/class_table.h index 375954ac18..dfae1fd9e4 100644 --- a/runtime/class_table.h +++ b/runtime/class_table.h @@ -142,11 +142,6 @@ class ClassTable { ClassTable(); - // Used by image writer for checking. - bool Contains(ObjPtr<mirror::Class> klass) - REQUIRES(!lock_) - REQUIRES_SHARED(Locks::mutator_lock_); - // Freeze the current class tables by allocating a new table and never updating or modifying the // existing table. This helps prevents dirty pages after caused by inserting after zygote fork. void FreezeSnapshot() @@ -209,16 +204,11 @@ class ClassTable { REQUIRES_SHARED(Locks::mutator_lock_); // Return the first class that matches the descriptor of klass. Returns null if there are none. + // Used for tests and debug-build checks. ObjPtr<mirror::Class> LookupByDescriptor(ObjPtr<mirror::Class> klass) REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_); - // Try to insert a class and return the inserted class if successful. If another class - // with the same descriptor is already in the table, return the existing entry. - ObjPtr<mirror::Class> TryInsert(ObjPtr<mirror::Class> klass) - REQUIRES(!lock_) - REQUIRES_SHARED(Locks::mutator_lock_); - void Insert(ObjPtr<mirror::Class> klass) REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_); diff --git a/runtime/class_table_test.cc b/runtime/class_table_test.cc index 642e10ac46..d9f53ed6a2 100644 --- a/runtime/class_table_test.cc +++ b/runtime/class_table_test.cc @@ -100,12 +100,9 @@ TEST_F(ClassTableTest, ClassTable) { // Test inserting and related lookup functions. EXPECT_TRUE(table.LookupByDescriptor(h_Y.Get()) == nullptr); - EXPECT_FALSE(table.Contains(h_Y.Get())); table.Insert(h_Y.Get()); EXPECT_OBJ_PTR_EQ(table.LookupByDescriptor(h_X.Get()), h_X.Get()); EXPECT_OBJ_PTR_EQ(table.LookupByDescriptor(h_Y.Get()), h_Y.Get()); - EXPECT_TRUE(table.Contains(h_X.Get())); - EXPECT_TRUE(table.Contains(h_Y.Get())); EXPECT_EQ(table.NumZygoteClasses(class_loader.Get()), 1u); EXPECT_EQ(table.NumNonZygoteClasses(class_loader.Get()), 1u); @@ -142,7 +139,7 @@ TEST_F(ClassTableTest, ClassTable) { // Test remove. table.Remove(descriptor_x); - EXPECT_FALSE(table.Contains(h_X.Get())); + EXPECT_TRUE(table.LookupByDescriptor(h_X.Get()) == nullptr); // Test that reading a class set from memory works. table.Insert(h_X.Get()); @@ -158,8 +155,8 @@ TEST_F(ClassTableTest, ClassTable) { size_t count2 = table2.ReadFromMemory(&buffer[0]); EXPECT_EQ(count, count2); // Strong roots are not serialized, only classes. - EXPECT_TRUE(table2.Contains(h_X.Get())); - EXPECT_TRUE(table2.Contains(h_Y.Get())); + EXPECT_OBJ_PTR_EQ(table2.LookupByDescriptor(h_X.Get()), h_X.Get()); + EXPECT_OBJ_PTR_EQ(table2.LookupByDescriptor(h_Y.Get()), h_Y.Get()); // TODO: Add tests for UpdateClass, InsertOatFile. } |