summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--runtime/class_linker.cc86
-rw-r--r--runtime/class_linker.h57
-rw-r--r--runtime/entrypoints/entrypoint_utils.cc5
-rw-r--r--test/Android.run-test.mk15
4 files changed, 108 insertions, 55 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 0746e0ca06..ad436d0d91 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1804,6 +1804,7 @@ void ClassLinker::VisitClasses(ClassVisitor* visitor, void* arg) {
if (dex_cache_image_class_lookup_required_) {
MoveImageClassesToClassTable();
}
+ // TODO: why isn't this a ReaderMutexLock?
WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
for (std::pair<const size_t, GcRoot<mirror::Class> >& it : class_table_) {
mirror::Class* c = it.second.Read();
@@ -1813,18 +1814,75 @@ void ClassLinker::VisitClasses(ClassVisitor* visitor, void* arg) {
}
}
-static bool GetClassesVisitor(mirror::Class* c, void* arg) {
+static bool GetClassesVisitorSet(mirror::Class* c, void* arg) {
std::set<mirror::Class*>* classes = reinterpret_cast<std::set<mirror::Class*>*>(arg);
classes->insert(c);
return true;
}
+struct GetClassesVisitorArrayArg {
+ Handle<mirror::ObjectArray<mirror::Class>>* classes;
+ int32_t index;
+ bool success;
+};
+
+static bool GetClassesVisitorArray(mirror::Class* c, void* varg)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ GetClassesVisitorArrayArg* arg = reinterpret_cast<GetClassesVisitorArrayArg*>(varg);
+ if (arg->index < (*arg->classes)->GetLength()) {
+ (*arg->classes)->Set(arg->index, c);
+ arg->index++;
+ return true;
+ } else {
+ arg->success = false;
+ return false;
+ }
+}
+
void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor, void* arg) {
- std::set<mirror::Class*> classes;
- VisitClasses(GetClassesVisitor, &classes);
- for (mirror::Class* klass : classes) {
- if (!visitor(klass, arg)) {
- return;
+ // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
+ // is avoiding duplicates.
+ if (!kMovingClasses) {
+ std::set<mirror::Class*> classes;
+ VisitClasses(GetClassesVisitorSet, &classes);
+ for (mirror::Class* klass : classes) {
+ if (!visitor(klass, arg)) {
+ return;
+ }
+ }
+ } else {
+ Thread* self = Thread::Current();
+ StackHandleScope<1> hs(self);
+ Handle<mirror::ObjectArray<mirror::Class>> classes =
+ hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
+ GetClassesVisitorArrayArg local_arg;
+ local_arg.classes = &classes;
+ local_arg.success = false;
+ // We size the array assuming classes won't be added to the class table during the visit.
+ // If this assumption fails we iterate again.
+ while (!local_arg.success) {
+ size_t class_table_size;
+ {
+ ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
+ class_table_size = class_table_.size();
+ }
+ mirror::Class* class_type = mirror::Class::GetJavaLangClass();
+ mirror::Class* array_of_class = FindArrayClass(self, &class_type);
+ classes.Assign(
+ mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
+ CHECK(classes.Get() != nullptr); // OOME.
+ local_arg.index = 0;
+ local_arg.success = true;
+ VisitClasses(GetClassesVisitorArray, &local_arg);
+ }
+ for (int32_t i = 0; i < classes->GetLength(); ++i) {
+ // If the class table shrank during creation of the clases array we expect null elements. If
+ // the class table grew then the loop repeats. If classes are created after the loop has
+ // finished then we don't visit.
+ mirror::Class* klass = classes->Get(i);
+ if (klass != nullptr && !visitor(klass, arg)) {
+ return;
+ }
}
}
}
@@ -2309,7 +2367,9 @@ const OatFile::OatMethod ClassLinker::FindOatMethodFor(mirror::ArtMethod* method
size_t end = declaring_class->NumVirtualMethods();
bool found = false;
for (size_t i = 0; i < end; i++) {
- if (declaring_class->GetVirtualMethod(i) == method) {
+ // Check method index instead of identity in case of duplicate method definitions.
+ if (method->GetDexMethodIndex() ==
+ declaring_class->GetVirtualMethod(i)->GetDexMethodIndex()) {
found = true;
break;
}
@@ -2716,6 +2776,8 @@ void ClassLinker::LoadClassMembers(const DexFile& dex_file,
klass->SetVirtualMethods(virtuals);
}
size_t class_def_method_index = 0;
+ uint32_t last_dex_method_index = DexFile::kDexNoIndex;
+ size_t last_class_def_method_index = 0;
for (size_t i = 0; it.HasNextDirectMethod(); i++, it.Next()) {
StackHandleScope<1> hs(self);
Handle<mirror::ArtMethod> method(hs.NewHandle(LoadMethod(self, dex_file, it, klass)));
@@ -2725,7 +2787,15 @@ void ClassLinker::LoadClassMembers(const DexFile& dex_file,
}
klass->SetDirectMethod(i, method.Get());
LinkCode(method, oat_class, dex_file, it.GetMemberIndex(), class_def_method_index);
- method->SetMethodIndex(class_def_method_index);
+ uint32_t it_method_index = it.GetMemberIndex();
+ if (last_dex_method_index == it_method_index) {
+ // duplicate case
+ method->SetMethodIndex(last_class_def_method_index);
+ } else {
+ method->SetMethodIndex(class_def_method_index);
+ last_dex_method_index = it_method_index;
+ last_class_def_method_index = class_def_method_index;
+ }
class_def_method_index++;
}
for (size_t i = 0; it.HasNextVirtualMethod(); i++, it.Next()) {
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index a7a68b7546..7750c8ec4e 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -237,12 +237,14 @@ class ClassLinker {
}
void VisitClasses(ClassVisitor* visitor, void* arg)
- LOCKS_EXCLUDED(dex_lock_)
+ LOCKS_EXCLUDED(Locks::classlinker_classes_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- // Less efficient variant of VisitClasses that doesn't hold the classlinker_classes_lock_
- // when calling the visitor.
+
+ // Less efficient variant of VisitClasses that copies the class_table_ into secondary storage
+ // so that it can visit individual classes without holding the doesn't hold the
+ // Locks::classlinker_classes_lock_. As the Locks::classlinker_classes_lock_ isn't held this code
+ // can race with insertion and deletion of classes while the visitor is being called.
void VisitClassesWithoutClassesLock(ClassVisitor* visitor, void* arg)
- LOCKS_EXCLUDED(dex_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void VisitClassRoots(RootCallback* callback, void* arg, VisitRootFlags flags)
@@ -623,29 +625,6 @@ class ClassLinker {
ConstHandle<mirror::ArtMethod> prototype)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- std::vector<const DexFile*> boot_class_path_;
-
- mutable ReaderWriterMutex dex_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
- std::vector<size_t> new_dex_cache_roots_ GUARDED_BY(dex_lock_);;
- std::vector<GcRoot<mirror::DexCache>> dex_caches_ GUARDED_BY(dex_lock_);
- std::vector<const OatFile*> oat_files_ GUARDED_BY(dex_lock_);
-
-
- // multimap from a string hash code of a class descriptor to
- // mirror::Class* instances. Results should be compared for a matching
- // Class::descriptor_ and Class::class_loader_.
- typedef AllocationTrackingMultiMap<size_t, GcRoot<mirror::Class>, kAllocatorTagClassTable> Table;
- // This contains strong roots. To enable concurrent root scanning of
- // the class table, be careful to use a read barrier when accessing this.
- Table class_table_ GUARDED_BY(Locks::classlinker_classes_lock_);
- std::vector<std::pair<size_t, GcRoot<mirror::Class>>> new_class_roots_;
-
- // Do we need to search dex caches to find image classes?
- bool dex_cache_image_class_lookup_required_;
- // Number of times we've searched dex caches for a class. After a certain number of misses we move
- // the classes into the class_table_ to avoid dex cache based searches.
- Atomic<uint32_t> failed_dex_cache_class_lookups_;
-
mirror::Class* LookupClassFromTableLocked(const char* descriptor,
const mirror::ClassLoader* class_loader,
size_t hash)
@@ -656,6 +635,7 @@ class ClassLinker {
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
void MoveImageClassesToClassTable() LOCKS_EXCLUDED(Locks::classlinker_classes_lock_)
+ LOCKS_EXCLUDED(Locks::classlinker_classes_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
mirror::Class* LookupClassFromImage(const char* descriptor)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -672,6 +652,29 @@ class ClassLinker {
void FixupTemporaryDeclaringClass(mirror::Class* temp_class, mirror::Class* new_class)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ std::vector<const DexFile*> boot_class_path_;
+
+ mutable ReaderWriterMutex dex_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
+ std::vector<size_t> new_dex_cache_roots_ GUARDED_BY(dex_lock_);;
+ std::vector<GcRoot<mirror::DexCache>> dex_caches_ GUARDED_BY(dex_lock_);
+ std::vector<const OatFile*> oat_files_ GUARDED_BY(dex_lock_);
+
+
+ // multimap from a string hash code of a class descriptor to
+ // mirror::Class* instances. Results should be compared for a matching
+ // Class::descriptor_ and Class::class_loader_.
+ typedef AllocationTrackingMultiMap<size_t, GcRoot<mirror::Class>, kAllocatorTagClassTable> Table;
+ // This contains strong roots. To enable concurrent root scanning of
+ // the class table, be careful to use a read barrier when accessing this.
+ Table class_table_ GUARDED_BY(Locks::classlinker_classes_lock_);
+ std::vector<std::pair<size_t, GcRoot<mirror::Class>>> new_class_roots_;
+
+ // Do we need to search dex caches to find image classes?
+ bool dex_cache_image_class_lookup_required_;
+ // Number of times we've searched dex caches for a class. After a certain number of misses we move
+ // the classes into the class_table_ to avoid dex cache based searches.
+ Atomic<uint32_t> failed_dex_cache_class_lookups_;
+
// indexes into class_roots_.
// needs to be kept in sync with class_roots_descriptors_.
enum ClassRoot {
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index e1c532ece4..d834d4d115 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -114,11 +114,6 @@ void ThrowStackOverflowError(Thread* self) {
// We don't fail here because SetStackEndForStackOverflow will print better diagnostics.
}
- if (Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()) {
- // Remove extra entry pushed onto second stack during method tracing.
- Runtime::Current()->GetInstrumentation()->PopMethodForUnwind(self, false);
- }
-
self->SetStackEndForStackOverflow(); // Allow space on the stack for constructor to execute.
JNIEnvExt* env = self->GetJniEnv();
std::string msg("stack size ");
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index 7da57dd950..773a9501f2 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -193,21 +193,6 @@ endif
TEST_ART_BROKEN_NO_RELOCATE_TESTS :=
-# Tests that are broken with tracing.
-TEST_ART_BROKEN_TRACE_RUN_TESTS := \
- 004-SignalTest \
- 018-stack-overflow \
- 097-duplicate-method \
- 107-int-math2
-
-ifneq (,$(filter trace,$(TRACE_TYPES)))
- ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(PREBUILD_TYPES), \
- $(COMPILER_TYPES),$(RELOCATE_TYPES),trace,$(GC_TYPES),$(JNI_TYPES), \
- $(IMAGE_TYPES), $(TEST_ART_BROKEN_TRACE_RUN_TESTS), $(ALL_ADDRESS_SIZES))
-endif
-
-TEST_ART_BROKEN_TRACE_RUN_TESTS :=
-
# Tests that are broken with GC stress.
TEST_ART_BROKEN_GCSTRESS_RUN_TESTS := \
004-SignalTest