summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--openjdkjvmti/ti_redefine.cc5
-rw-r--r--runtime/class_linker.cc38
-rw-r--r--runtime/class_linker.h2
-rw-r--r--runtime/class_linker_test.cc3
-rw-r--r--runtime/gc/collector/mark_compact-inl.h39
-rw-r--r--runtime/gc/collector/mark_compact.cc59
-rw-r--r--runtime/gc/collector/mark_compact.h49
-rw-r--r--runtime/mirror/array-inl.h2
-rw-r--r--runtime/mirror/class-inl.h133
-rw-r--r--runtime/mirror/class-refvisitor-inl.h24
-rw-r--r--runtime/mirror/class.cc126
-rw-r--r--runtime/mirror/class.h53
-rw-r--r--runtime/mirror/class_ext.cc2
-rw-r--r--runtime/mirror/class_loader.h3
-rw-r--r--runtime/mirror/dex_cache-inl.h4
-rw-r--r--runtime/mirror/object-inl.h86
-rw-r--r--runtime/mirror/object-refvisitor-inl.h5
-rw-r--r--runtime/mirror/object.h18
-rw-r--r--runtime/mirror/reference-inl.h2
-rw-r--r--runtime/mirror/string-inl.h2
-rw-r--r--runtime/oat/oat.h4
-rw-r--r--runtime/runtime_image.cc12
22 files changed, 405 insertions, 266 deletions
diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc
index 3d0c05734c..0ba528a0a7 100644
--- a/openjdkjvmti/ti_redefine.cc
+++ b/openjdkjvmti/ti_redefine.cc
@@ -2031,13 +2031,14 @@ uint32_t Redefiner::ClassRedefinition::GetNewClassSize(art::ClassAccessor& acces
}
}
- return art::mirror::Class::ComputeClassSize(/*has_embedded_vtable=*/ false,
- /*num_vtable_entries=*/ 0,
+ return art::mirror::Class::ComputeClassSize(/*has_embedded_vtable=*/false,
+ /*num_vtable_entries=*/0,
num_8bit_static_fields,
num_16bit_static_fields,
num_32bit_static_fields,
num_64bit_static_fields,
num_ref_static_fields,
+ /*num_ref_bitmap_entries=*/0,
art::kRuntimePointerSize);
}
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index d0fdcf0c25..dd78be3105 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -3686,13 +3686,14 @@ uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
UNREACHABLE();
}
}
- return mirror::Class::ComputeClassSize(false,
- 0,
+ return mirror::Class::ComputeClassSize(/*has_embedded_vtable=*/false,
+ /*num_vtable_entries=*/0,
num_8,
num_16,
num_32,
num_64,
num_ref,
+ /*num_ref_bitmap_entries=*/0,
image_pointer_size_);
}
@@ -6335,7 +6336,8 @@ bool ClassLinker::LinkClass(Thread* self,
if (!LinkStaticFields(self, klass, &class_size)) {
return false;
}
- CreateReferenceInstanceOffsets(klass);
+ class_size =
+ mirror::Class::AdjustClassSizeForReferenceOffsetBitmapDuringLinking(klass.Get(), class_size);
CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
ImTable* imt = nullptr;
@@ -6377,6 +6379,7 @@ bool ClassLinker::LinkClass(Thread* self,
if (klass->ShouldHaveEmbeddedVTable()) {
klass->PopulateEmbeddedVTable(image_pointer_size_);
+ klass->PopulateReferenceOffsetBitmap();
}
if (klass->ShouldHaveImt()) {
klass->SetImt(imt, image_pointer_size_);
@@ -9882,35 +9885,6 @@ bool ClassLinker::VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::
return true;
}
-// Set the bitmap of reference instance field offsets.
-void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
- uint32_t reference_offsets = 0;
- ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
- // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
- if (super_class != nullptr) {
- reference_offsets = super_class->GetReferenceInstanceOffsets();
- // Compute reference offsets unless our superclass overflowed.
- if (reference_offsets != mirror::Class::kClassWalkSuper) {
- size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
- if (num_reference_fields != 0u) {
- // All of the fields that contain object references are guaranteed be grouped in memory
- // starting at an appropriately aligned address after super class object data.
- uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
- sizeof(mirror::HeapReference<mirror::Object>));
- uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
- sizeof(mirror::HeapReference<mirror::Object>);
- if (start_bit + num_reference_fields > 32) {
- reference_offsets = mirror::Class::kClassWalkSuper;
- } else {
- reference_offsets |= (0xffffffffu << start_bit) &
- (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
- }
- }
- }
- }
- klass->SetReferenceInstanceOffsets(reference_offsets);
-}
-
ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
ObjPtr<mirror::DexCache> dex_cache) {
StackHandleScope<1> hs(Thread::Current());
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 0fe1aaf7a3..bcc0986d18 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -1309,8 +1309,6 @@ class ClassLinker {
REQUIRES_SHARED(Locks::mutator_lock_);
bool VerifyRecordClass(Handle<mirror::Class> klass, ObjPtr<mirror::Class> super)
REQUIRES_SHARED(Locks::mutator_lock_);
- void CreateReferenceInstanceOffsets(Handle<mirror::Class> klass)
- REQUIRES_SHARED(Locks::mutator_lock_);
void CheckProxyConstructor(ArtMethod* constructor) const
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 932341d895..18f4b297ac 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -395,7 +395,8 @@ class ClassLinkerTest : public CommonRuntimeTest {
k = k->GetSuperClass();
}
EXPECT_GE(total_num_reference_instance_fields, 1U); // Should always have Object's class.
- if (klass->GetReferenceInstanceOffsets() != mirror::Class::kClassWalkSuper) {
+ if ((klass->GetReferenceInstanceOffsets() & mirror::Class::kVisitReferencesSlowpathMask) == 0 &&
+ klass->ShouldHaveEmbeddedVTable()) {
// The reference instance offsets have a bit set for each reference offset.
// +1 for Object's class.
EXPECT_EQ(static_cast<uint32_t>(POPCOUNT(klass->GetReferenceInstanceOffsets())) + 1,
diff --git a/runtime/gc/collector/mark_compact-inl.h b/runtime/gc/collector/mark_compact-inl.h
index 24d47638b1..447dd256d0 100644
--- a/runtime/gc/collector/mark_compact-inl.h
+++ b/runtime/gc/collector/mark_compact-inl.h
@@ -27,40 +27,11 @@ namespace collector {
inline void MarkCompact::UpdateClassAfterObjectMap(mirror::Object* obj) {
mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
- // Track a class if it needs walking super-classes for visiting references or
- // if it's higher in address order than its objects and is in moving space.
- if (UNLIKELY(
- (std::less<mirror::Object*>{}(obj, klass) && HasAddress(klass)) ||
- (klass->GetReferenceInstanceOffsets<kVerifyNone>() == mirror::Class::kClassWalkSuper &&
- walk_super_class_cache_ != klass))) {
- // Since this function gets invoked in the compaction pause as well, it is
- // preferable to store such super class separately rather than updating key
- // as the latter would require traversing the hierarchy for every object of 'klass'.
- auto ret1 = class_after_obj_hash_map_.try_emplace(ObjReference::FromMirrorPtr(klass),
- ObjReference::FromMirrorPtr(obj));
- if (ret1.second) {
- if (klass->GetReferenceInstanceOffsets<kVerifyNone>() == mirror::Class::kClassWalkSuper) {
- // In this case we require traversing through the super class hierarchy
- // and find the super class at the highest address order.
- mirror::Class* highest_klass = HasAddress(klass) ? klass : nullptr;
- for (ObjPtr<mirror::Class> k = klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>();
- k != nullptr;
- k = k->GetSuperClass<kVerifyNone, kWithoutReadBarrier>()) {
- // TODO: Can we break once we encounter a super class outside the moving space?
- if (HasAddress(k.Ptr())) {
- highest_klass = std::max(highest_klass, k.Ptr(), std::less<mirror::Class*>());
- }
- }
- if (highest_klass != nullptr && highest_klass != klass) {
- auto ret2 = super_class_after_class_hash_map_.try_emplace(
- ObjReference::FromMirrorPtr(klass), ObjReference::FromMirrorPtr(highest_klass));
- DCHECK(ret2.second);
- } else {
- walk_super_class_cache_ = klass;
- }
- }
- } else if (std::less<mirror::Object*>{}(obj, ret1.first->second.AsMirrorPtr())) {
- ret1.first->second = ObjReference::FromMirrorPtr(obj);
+ if (UNLIKELY(std::less<mirror::Object*>{}(obj, klass) && HasAddress(klass))) {
+ auto [iter, success] = class_after_obj_map_.try_emplace(ObjReference::FromMirrorPtr(klass),
+ ObjReference::FromMirrorPtr(obj));
+ if (!success && std::less<mirror::Object*>{}(obj, iter->second.AsMirrorPtr())) {
+ iter->second = ObjReference::FromMirrorPtr(obj);
}
}
}
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 5e5242bac4..2a1bbc4738 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -709,7 +709,6 @@ void MarkCompact::InitializePhase() {
black_allocations_begin_ = bump_pointer_space_->Limit();
CHECK_EQ(moving_space_begin_, bump_pointer_space_->Begin());
moving_space_end_ = bump_pointer_space_->Limit();
- walk_super_class_cache_ = nullptr;
// TODO: Would it suffice to read it once in the constructor, which is called
// in zygote process?
pointer_size_ = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
@@ -1386,16 +1385,14 @@ void MarkCompact::CompactPage(mirror::Object* obj,
}
obj_size = RoundUp(obj_size, kAlignment);
DCHECK_GT(obj_size, offset_within_obj)
- << "obj:" << obj
- << " class:"
- << obj->GetClass<kDefaultVerifyFlags, kWithFromSpaceBarrier>()
+ << "obj:" << obj << " class:" << obj->GetClass<kDefaultVerifyFlags, kWithFromSpaceBarrier>()
<< " to_addr:" << to_ref
<< " black-allocation-begin:" << reinterpret_cast<void*>(black_allocations_begin_)
<< " post-compact-end:" << reinterpret_cast<void*>(post_compact_end_)
- << " offset:" << offset * kAlignment
- << " class-after-obj-iter:"
- << (class_after_obj_iter_ != class_after_obj_ordered_map_.rend() ?
- class_after_obj_iter_->first.AsMirrorPtr() : nullptr)
+ << " offset:" << offset * kAlignment << " class-after-obj-iter:"
+ << (class_after_obj_iter_ != class_after_obj_map_.rend() ?
+ class_after_obj_iter_->first.AsMirrorPtr() :
+ nullptr)
<< " last-reclaimed-page:" << reinterpret_cast<void*>(last_reclaimed_page_)
<< " last-checked-reclaim-page-idx:" << last_checked_reclaim_page_idx_
<< " offset-of-last-idx:"
@@ -1450,16 +1447,15 @@ void MarkCompact::CompactPage(mirror::Object* obj,
obj_size = RoundUp(obj_size, kAlignment);
DCHECK_GT(obj_size, 0u)
<< "from_addr:" << obj
- << " from-space-class:"
- << obj->GetClass<kDefaultVerifyFlags, kWithFromSpaceBarrier>()
+ << " from-space-class:" << obj->GetClass<kDefaultVerifyFlags, kWithFromSpaceBarrier>()
<< " to_addr:" << ref
<< " black-allocation-begin:" << reinterpret_cast<void*>(black_allocations_begin_)
<< " post-compact-end:" << reinterpret_cast<void*>(post_compact_end_)
- << " offset:" << offset * kAlignment
- << " bytes_done:" << bytes_done
+ << " offset:" << offset * kAlignment << " bytes_done:" << bytes_done
<< " class-after-obj-iter:"
- << (class_after_obj_iter_ != class_after_obj_ordered_map_.rend() ?
- class_after_obj_iter_->first.AsMirrorPtr() : nullptr)
+ << (class_after_obj_iter_ != class_after_obj_map_.rend() ?
+ class_after_obj_iter_->first.AsMirrorPtr() :
+ nullptr)
<< " last-reclaimed-page:" << reinterpret_cast<void*>(last_reclaimed_page_)
<< " last-checked-reclaim-page-idx:" << last_checked_reclaim_page_idx_
<< " offset-of-last-idx:"
@@ -1926,7 +1922,7 @@ bool MarkCompact::FreeFromSpacePages(size_t cur_page_idx, int mode, size_t end_i
DCHECK_ALIGNED_PARAM(reclaim_begin, gPageSize);
DCHECK_ALIGNED_PARAM(last_reclaimed_page_, gPageSize);
// Check if the 'class_after_obj_map_' map allows pages to be freed.
- for (; class_after_obj_iter_ != class_after_obj_ordered_map_.rend(); class_after_obj_iter_++) {
+ for (; class_after_obj_iter_ != class_after_obj_map_.rend(); class_after_obj_iter_++) {
mirror::Object* klass = class_after_obj_iter_->first.AsMirrorPtr();
mirror::Class* from_klass = static_cast<mirror::Class*>(GetFromSpaceAddr(klass));
// Check with class' end to ensure that, if required, the entire class survives.
@@ -1934,10 +1930,7 @@ bool MarkCompact::FreeFromSpacePages(size_t cur_page_idx, int mode, size_t end_i
DCHECK_LE(klass_end, last_reclaimed_page_);
if (reinterpret_cast<uint8_t*>(klass_end) >= reclaim_begin) {
// Found a class which is in the reclaim range.
- uint8_t* obj_addr = reinterpret_cast<uint8_t*>(class_after_obj_iter_->second.AsMirrorPtr());
- // NOTE: Don't assert that obj is of 'klass' type as klass could instead
- // be its super-class.
- if (obj_addr < idx_addr) {
+ if (reinterpret_cast<uint8_t*>(class_after_obj_iter_->second.AsMirrorPtr()) < idx_addr) {
// Its lowest-address object is not compacted yet. Reclaim starting from
// the end of this class.
reclaim_begin = AlignUp(klass_end, gPageSize);
@@ -1987,29 +1980,6 @@ bool MarkCompact::FreeFromSpacePages(size_t cur_page_idx, int mode, size_t end_i
return all_mapped;
}
-void MarkCompact::UpdateClassAfterObjMap() {
- CHECK(class_after_obj_ordered_map_.empty());
- for (const auto& pair : class_after_obj_hash_map_) {
- auto super_class_iter = super_class_after_class_hash_map_.find(pair.first);
- ObjReference key = super_class_iter != super_class_after_class_hash_map_.end()
- ? super_class_iter->second
- : pair.first;
- if (std::less<mirror::Object*>{}(pair.second.AsMirrorPtr(), key.AsMirrorPtr()) &&
- HasAddress(key.AsMirrorPtr())) {
- auto [ret_iter, success] = class_after_obj_ordered_map_.try_emplace(key, pair.second);
- // It could fail only if the class 'key' has objects of its own, which are lower in
- // address order, as well of some of its derived class. In this case
- // choose the lowest address object.
- if (!success &&
- std::less<mirror::Object*>{}(pair.second.AsMirrorPtr(), ret_iter->second.AsMirrorPtr())) {
- ret_iter->second = pair.second;
- }
- }
- }
- class_after_obj_hash_map_.clear();
- super_class_after_class_hash_map_.clear();
-}
-
template <int kMode>
void MarkCompact::CompactMovingSpace(uint8_t* page) {
// For every page we have a starting object, which may have started in some
@@ -2030,13 +2000,12 @@ void MarkCompact::CompactMovingSpace(uint8_t* page) {
DCHECK(IsAlignedParam(pre_compact_page, gPageSize));
- UpdateClassAfterObjMap();
// These variables are maintained by FreeFromSpacePages().
last_reclaimed_page_ = pre_compact_page;
last_reclaimable_page_ = last_reclaimed_page_;
cur_reclaimable_page_ = last_reclaimed_page_;
last_checked_reclaim_page_idx_ = idx;
- class_after_obj_iter_ = class_after_obj_ordered_map_.rbegin();
+ class_after_obj_iter_ = class_after_obj_map_.rbegin();
// Allocated-black pages
mirror::Object* next_page_first_obj = nullptr;
while (idx > moving_first_objs_count_) {
@@ -4140,7 +4109,7 @@ void MarkCompact::FinishPhase() {
updated_roots_->clear();
}
}
- class_after_obj_ordered_map_.clear();
+ class_after_obj_map_.clear();
linear_alloc_arenas_.clear();
{
ReaderMutexLock mu(thread_running_gc_, *Locks::mutator_lock_);
diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h
index 6f02877148..0ea8fb56e0 100644
--- a/runtime/gc/collector/mark_compact.h
+++ b/runtime/gc/collector/mark_compact.h
@@ -21,7 +21,6 @@
#include <map>
#include <memory>
-#include <unordered_map>
#include <unordered_set>
#include "barrier.h"
@@ -538,12 +537,6 @@ class MarkCompact final : public GarbageCollector {
ALWAYS_INLINE void UpdateClassAfterObjectMap(mirror::Object* obj)
REQUIRES_SHARED(Locks::mutator_lock_);
- // Updates 'class_after_obj_map_' map by updating the keys (class) with its
- // highest-address super-class (obtained from 'super_class_after_class_map_'),
- // if there is any. This is to ensure we don't free from-space pages before
- // the lowest-address obj is compacted.
- void UpdateClassAfterObjMap();
-
void MarkZygoteLargeObjects() REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::heap_bitmap_lock_);
@@ -647,54 +640,22 @@ class MarkCompact final : public GarbageCollector {
uint8_t* begin_;
uint8_t* end_;
};
-
std::vector<LinearAllocSpaceData> linear_alloc_spaces_data_;
- class ObjReferenceHash {
- public:
- uint32_t operator()(const ObjReference& ref) const {
- return ref.AsVRegValue() >> kObjectAlignmentShift;
- }
- };
-
- class ObjReferenceEqualFn {
- public:
- bool operator()(const ObjReference& a, const ObjReference& b) const {
- return a.AsMirrorPtr() == b.AsMirrorPtr();
- }
- };
-
class LessByObjReference {
public:
bool operator()(const ObjReference& a, const ObjReference& b) const {
return std::less<mirror::Object*>{}(a.AsMirrorPtr(), b.AsMirrorPtr());
}
};
-
- // Data structures used to track objects whose layout information is stored in later
- // allocated classes (at higher addresses). We must be careful not to free the
- // corresponding from-space pages prematurely.
- using ObjObjOrderedMap = std::map<ObjReference, ObjReference, LessByObjReference>;
- using ObjObjUnorderedMap =
- std::unordered_map<ObjReference, ObjReference, ObjReferenceHash, ObjReferenceEqualFn>;
- // Unordered map of <K, S> such that the class K (in moving space) has kClassWalkSuper
- // in reference bitmap and S is its highest address super class.
- ObjObjUnorderedMap super_class_after_class_hash_map_;
- // Unordered map of <K, V> such that the class K (in moving space) is after its objects
- // or would require iterating super-class hierarchy when visiting references. And V is
- // its lowest address object (in moving space).
- ObjObjUnorderedMap class_after_obj_hash_map_;
- // Ordered map constructed before starting compaction using the above two maps. Key is a
- // class (or super-class) which is higher in address order than some of its object(s) and
- // value is the corresponding object with lowest address.
- ObjObjOrderedMap class_after_obj_ordered_map_;
+ using ClassAfterObjectMap = std::map<ObjReference, ObjReference, LessByObjReference>;
+ // map of <K, V> such that the class K (in moving space) is after its
+ // objects, and its object V is the lowest object (in moving space).
+ ClassAfterObjectMap class_after_obj_map_;
// Since the compaction is done in reverse, we use a reverse iterator. It is maintained
// either at the pair whose class is lower than the first page to be freed, or at the
// pair whose object is not yet compacted.
- ObjObjOrderedMap::const_reverse_iterator class_after_obj_iter_;
- // Cached reference to the last class which has kClassWalkSuper in reference
- // bitmap but has all its super classes lower address order than itself.
- mirror::Class* walk_super_class_cache_;
+ ClassAfterObjectMap::const_reverse_iterator class_after_obj_iter_;
// Used by FreeFromSpacePages() for maintaining markers in the moving space for
// how far the pages have been reclaimed (madvised) and checked.
//
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 40c1c2e6b4..1898aca12f 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -34,7 +34,7 @@ namespace mirror {
inline uint32_t Array::ClassSize(PointerSize pointer_size) {
uint32_t vtable_entries = Object::kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, 0, pointer_size);
}
template <VerifyObjectFlags kVerifyFlags>
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index b2c8e52351..482faec5f9 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -657,7 +657,7 @@ inline MemberOffset Class::GetFirstReferenceStaticFieldOffset(PointerSize pointe
if (ShouldHaveEmbeddedVTable<kVerifyFlags>()) {
// Static fields come after the embedded tables.
base = Class::ComputeClassSize(
- true, GetEmbeddedVTableLength<kVerifyFlags>(), 0, 0, 0, 0, 0, pointer_size);
+ true, GetEmbeddedVTableLength<kVerifyFlags>(), 0, 0, 0, 0, 0, 0, pointer_size);
}
return MemberOffset(base);
}
@@ -668,8 +668,8 @@ inline MemberOffset Class::GetFirstReferenceStaticFieldOffsetDuringLinking(
uint32_t base = sizeof(Class); // Static fields come after the class.
if (ShouldHaveEmbeddedVTable()) {
// Static fields come after the embedded tables.
- base = Class::ComputeClassSize(true, GetVTableDuringLinking()->GetLength(),
- 0, 0, 0, 0, 0, pointer_size);
+ base = Class::ComputeClassSize(
+ true, GetVTableDuringLinking()->GetLength(), 0, 0, 0, 0, 0, 0, pointer_size);
}
return MemberOffset(base);
}
@@ -757,6 +757,126 @@ inline size_t Class::GetPrimitiveTypeSizeShift() {
return size_shift;
}
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline void Class::VerifyOverflowReferenceBitmap() {
+ // Can't reliably access super-classes during CMC compaction.
+ if (Runtime::Current()->GetHeap()->IsPerformingUffdCompaction()) {
+ return;
+ }
+ CHECK(!IsVariableSize<kVerifyFlags>());
+ ObjPtr<Class> klass;
+ ObjPtr<mirror::Class> super_class;
+ size_t num_bits =
+ (RoundUp(GetObjectSize<kVerifyFlags>(), sizeof(mirror::HeapReference<mirror::Object>)) -
+ mirror::kObjectHeaderSize) /
+ sizeof(mirror::HeapReference<mirror::Object>);
+ std::vector<bool> check_bitmap(num_bits, false);
+ for (klass = this; klass != nullptr; klass = super_class) {
+ super_class = klass->GetSuperClass<kVerifyFlags, kReadBarrierOption>();
+ if (klass->NumReferenceInstanceFields<kVerifyFlags>() != 0) {
+ break;
+ }
+ }
+
+ if (super_class != nullptr) {
+ std::vector<ObjPtr<Class>> klasses;
+ for (; klass != nullptr; klass = super_class) {
+ super_class = klass->GetSuperClass<kVerifyFlags, kReadBarrierOption>();
+ if (super_class != nullptr) {
+ klasses.push_back(klass);
+ }
+ }
+
+ for (auto iter = klasses.rbegin(); iter != klasses.rend(); iter++) {
+ klass = *iter;
+ size_t idx = (klass->GetFirstReferenceInstanceFieldOffset<kVerifyFlags, kReadBarrierOption>()
+ .Uint32Value() -
+ mirror::kObjectHeaderSize) /
+ sizeof(mirror::HeapReference<mirror::Object>);
+ uint32_t num_refs = klass->NumReferenceInstanceFields<kVerifyFlags>();
+ for (uint32_t i = 0; i < num_refs; i++) {
+ check_bitmap[idx++] = true;
+ }
+ CHECK_LE(idx, num_bits) << PrettyClass();
+ }
+ }
+
+ uint32_t ref_offsets =
+ GetField32<kVerifyFlags>(OFFSET_OF_OBJECT_MEMBER(Class, reference_instance_offsets_));
+ CHECK_NE(ref_offsets, 0u) << PrettyClass();
+ CHECK((ref_offsets & kVisitReferencesSlowpathMask) != 0) << PrettyClass();
+ uint32_t bitmap_num_words = ref_offsets & ~kVisitReferencesSlowpathMask;
+ uint32_t* overflow_bitmap = reinterpret_cast<uint32_t*>(
+ reinterpret_cast<uint8_t*>(this) +
+ (GetClassSize<kVerifyFlags>() - bitmap_num_words * sizeof(uint32_t)));
+ for (uint32_t i = 0, field_offset = 0; i < bitmap_num_words; i++, field_offset += 32) {
+ ref_offsets = overflow_bitmap[i];
+ uint32_t check_bitmap_idx = field_offset;
+ // Confirm that all the bits in check_bitmap that ought to be set, are set.
+ while (ref_offsets != 0) {
+ if ((ref_offsets & 1) != 0) {
+ CHECK(check_bitmap[check_bitmap_idx])
+ << PrettyClass() << " i:" << i << " field_offset:" << field_offset
+ << " check_bitmap_idx:" << check_bitmap_idx << " bitmap_word:" << overflow_bitmap[i];
+ check_bitmap[check_bitmap_idx] = false;
+ }
+ ref_offsets >>= 1;
+ check_bitmap_idx++;
+ }
+ }
+ // Confirm that there is no other bit set.
+ std::ostringstream oss;
+ bool found = false;
+ for (size_t i = 0; i < check_bitmap.size(); i++) {
+ if (check_bitmap[i]) {
+ if (!found) {
+ DumpClass(oss, kDumpClassFullDetail);
+ oss << " set-bits:";
+ }
+ found = true;
+ oss << i << ",";
+ }
+ }
+ if (found) {
+ oss << " stored-bitmap:";
+ for (size_t i = 0; i < bitmap_num_words; i++) {
+ oss << overflow_bitmap[i] << ":";
+ }
+ LOG(FATAL) << oss.str();
+ }
+}
+
+inline size_t Class::AdjustClassSizeForReferenceOffsetBitmapDuringLinking(ObjPtr<Class> klass,
+ size_t class_size) {
+ if (klass->IsInstantiable()) {
+ // Find the first class with non-zero instance field count and its super-class'
+ // object-size together will tell us the required size.
+ for (ObjPtr<Class> k = klass; k != nullptr; k = k->GetSuperClass()) {
+ size_t num_reference_fields = k->NumReferenceInstanceFieldsDuringLinking();
+ if (num_reference_fields != 0) {
+ ObjPtr<Class> super = k->GetSuperClass();
+ // Leave it for mirror::Object (the class field is handled specially).
+ if (super != nullptr) {
+ // All of the fields that contain object references are guaranteed to be grouped in
+ // memory starting at an appropriately aligned address after super class object data.
+ uint32_t start_offset =
+ RoundUp(super->GetObjectSize(), sizeof(mirror::HeapReference<mirror::Object>));
+ uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
+ sizeof(mirror::HeapReference<mirror::Object>);
+ if (start_bit + num_reference_fields > 31) {
+ // Alignment that maybe required at the end of static fields smaller than 32-bit.
+ class_size = RoundUp(class_size, sizeof(uint32_t));
+ // 32-bit words required for the overflow bitmap.
+ class_size += RoundUp(start_bit + num_reference_fields, 32) / 32 * sizeof(uint32_t);
+ }
+ }
+ break;
+ }
+ }
+ }
+ return class_size;
+}
+
inline uint32_t Class::ComputeClassSize(bool has_embedded_vtable,
uint32_t num_vtable_entries,
uint32_t num_8bit_static_fields,
@@ -764,6 +884,7 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_vtable,
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
uint32_t num_ref_static_fields,
+ uint32_t num_ref_bitmap_entries,
PointerSize pointer_size) {
// Space used by java.lang.Class and its instance fields.
uint32_t size = sizeof(Class);
@@ -799,6 +920,12 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_vtable,
// Space used for primitive static fields.
size += num_8bit_static_fields * sizeof(uint8_t) + num_16bit_static_fields * sizeof(uint16_t) +
num_32bit_static_fields * sizeof(uint32_t) + num_64bit_static_fields * sizeof(uint64_t);
+
+ // Space used by reference-offset bitmap.
+ if (num_ref_bitmap_entries > 0) {
+ size = RoundUp(size, sizeof(uint32_t));
+ size += num_ref_bitmap_entries * sizeof(uint32_t);
+ }
return size;
}
diff --git a/runtime/mirror/class-refvisitor-inl.h b/runtime/mirror/class-refvisitor-inl.h
index 21ed1cd636..96c218b175 100644
--- a/runtime/mirror/class-refvisitor-inl.h
+++ b/runtime/mirror/class-refvisitor-inl.h
@@ -25,12 +25,32 @@
namespace art HIDDEN {
namespace mirror {
+// NO_THREAD_SAFETY_ANALYSIS for mutator_lock_ and heap_bitmap_lock_, as
+// requirements for these vary depending on the visitor.
+template <VerifyObjectFlags kVerifyFlags, typename Visitor>
+inline void Class::VisitStaticFieldsReferences(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS {
+ DCHECK(!IsTemp<kVerifyFlags>());
+ const size_t num_reference_fields = NumReferenceStaticFields();
+ if (num_reference_fields > 0u) {
+ // Presumably GC can happen when we are cross compiling, it should not cause performance
+ // problems to do pointer size logic.
+ MemberOffset field_offset = GetFirstReferenceStaticFieldOffset<kVerifyFlags>(
+ Runtime::Current()->GetClassLinker()->GetImagePointerSize());
+ for (size_t i = 0u; i < num_reference_fields; ++i) {
+ DCHECK_NE(field_offset.Uint32Value(), ClassOffset().Uint32Value());
+ visitor(this, field_offset, /*is_static=*/true);
+ field_offset =
+ MemberOffset(field_offset.Uint32Value() + sizeof(mirror::HeapReference<mirror::Object>));
+ }
+ }
+}
+
template <bool kVisitNativeRoots,
VerifyObjectFlags kVerifyFlags,
ReadBarrierOption kReadBarrierOption,
typename Visitor>
inline void Class::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
- VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass.Ptr(), visitor);
+ VisitInstanceFieldsReferences<kVerifyFlags>(klass.Ptr(), visitor);
// Right after a class is allocated, but not yet loaded
// (ClassStatus::kNotReady, see ClassLinker::LoadClass()), GC may find it
// and scan it. IsTemp() may call Class::GetAccessFlags() but may
@@ -42,7 +62,7 @@ inline void Class::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
// Temp classes don't ever populate imt/vtable or static fields and they are not even
// allocated with the right size for those. Also, unresolved classes don't have fields
// linked yet.
- VisitStaticFieldsReferences<kVerifyFlags, kReadBarrierOption>(this, visitor);
+ VisitStaticFieldsReferences<kVerifyFlags>(visitor);
}
if (kVisitNativeRoots) {
// Since this class is reachable, we must also visit the associated roots when we scan it.
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 12fbc36926..43dbc43115 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -451,15 +451,28 @@ void Class::DumpClass(std::ostream& os, int flags) {
}
void Class::SetReferenceInstanceOffsets(uint32_t new_reference_offsets) {
- if (kIsDebugBuild && new_reference_offsets != kClassWalkSuper) {
+ if (kIsDebugBuild) {
// Check that the number of bits set in the reference offset bitmap
// agrees with the number of references.
uint32_t count = 0;
for (ObjPtr<Class> c = this; c != nullptr; c = c->GetSuperClass()) {
count += c->NumReferenceInstanceFieldsDuringLinking();
}
+ uint32_t pop_cnt;
+ if ((new_reference_offsets & kVisitReferencesSlowpathMask) == 0) {
+ pop_cnt = static_cast<uint32_t>(POPCOUNT(new_reference_offsets));
+ } else {
+ uint32_t bitmap_num_words = new_reference_offsets & ~kVisitReferencesSlowpathMask;
+ uint32_t* overflow_bitmap =
+ reinterpret_cast<uint32_t*>(reinterpret_cast<uint8_t*>(this) +
+ (GetClassSize() - bitmap_num_words * sizeof(uint32_t)));
+ pop_cnt = 0;
+ for (uint32_t i = 0; i < bitmap_num_words; i++) {
+ pop_cnt += static_cast<uint32_t>(POPCOUNT(overflow_bitmap[i]));
+ }
+ }
// +1 for the Class in Object.
- CHECK_EQ(static_cast<uint32_t>(POPCOUNT(new_reference_offsets)) + 1, count);
+ CHECK_EQ(pop_cnt + 1, count);
}
// Not called within a transaction.
SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, reference_instance_offsets_),
@@ -1607,6 +1620,114 @@ void Class::PopulateEmbeddedVTable(PointerSize pointer_size) {
}
}
+// Set the bitmap of reference instance field offsets.
+void Class::PopulateReferenceOffsetBitmap() {
+ size_t num_reference_fields;
+ ObjPtr<mirror::Class> super_class;
+ ObjPtr<Class> klass;
+ // Find the first class with non-zero instance reference fields.
+ for (klass = this; klass != nullptr; klass = super_class) {
+ super_class = klass->GetSuperClass();
+ num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
+ if (num_reference_fields != 0) {
+ break;
+ }
+ }
+
+ uint32_t ref_offsets = 0;
+ // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
+ if (super_class != nullptr) {
+ // All of the reference fields added by this class are guaranteed to be grouped in memory
+ // starting at an appropriately aligned address after super class object data.
+ uint32_t start_offset =
+ RoundUp(super_class->GetObjectSize(), sizeof(mirror::HeapReference<mirror::Object>));
+ uint32_t start_bit =
+ (start_offset - mirror::kObjectHeaderSize) / sizeof(mirror::HeapReference<mirror::Object>);
+ uint32_t end_bit = start_bit + num_reference_fields;
+ bool overflowing = end_bit > 31;
+ uint32_t* overflow_bitmap; // Pointer to the last word of overflow bitmap to be written into.
+ uint32_t overflow_words_to_write; // Number of overflow bitmap words remaining to write.
+ // Index in 'overflow_bitmap' from where to start writing bitmap words (in reverse order).
+ int32_t overflow_bitmap_word_idx;
+ if (overflowing) {
+ // We will write overflow bitmap in reverse.
+ overflow_bitmap =
+ reinterpret_cast<uint32_t*>(reinterpret_cast<uint8_t*>(this) + GetClassSize());
+ DCHECK_ALIGNED(overflow_bitmap, sizeof(uint32_t));
+ overflow_bitmap_word_idx = 0;
+ overflow_words_to_write = RoundUp(end_bit, 32) / 32;
+ }
+ // TODO: Simplify by copying the bitmap from the super-class and then
+ // appending the reference fields added by this class.
+ while (true) {
+ if (UNLIKELY(overflowing)) {
+ // Write all the bitmap words which got skipped between previous
+ // super-class and the current one.
+ for (uint32_t new_words_to_write = RoundUp(end_bit, 32) / 32;
+ overflow_words_to_write > new_words_to_write;
+ overflow_words_to_write--) {
+ overflow_bitmap[--overflow_bitmap_word_idx] = ref_offsets;
+ ref_offsets = 0;
+ }
+ // Handle the references in the current super-class.
+ if (num_reference_fields != 0u) {
+ uint32_t aligned_end_bit = RoundDown(end_bit, 32);
+ uint32_t aligned_start_bit = RoundUp(start_bit, 32);
+ // Handle the case where a class' references are spanning across multiple 32-bit
+ // words of the overflow bitmap.
+ if (aligned_end_bit >= aligned_start_bit) {
+ // handle the unaligned end first
+ if (aligned_end_bit < end_bit) {
+ ref_offsets |= 0xffffffffu >> (32 - (end_bit - aligned_end_bit));
+ overflow_bitmap[--overflow_bitmap_word_idx] = ref_offsets;
+ overflow_words_to_write--;
+ ref_offsets = 0;
+ }
+ // store all the 32-bit bitmap words in between
+ for (; aligned_end_bit > aligned_start_bit; aligned_end_bit -= 32) {
+ overflow_bitmap[--overflow_bitmap_word_idx] = 0xffffffffu;
+ overflow_words_to_write--;
+ }
+ CHECK_EQ(ref_offsets, 0u);
+ // handle the unaligned start now
+ if (aligned_start_bit > start_bit) {
+ ref_offsets = 0xffffffffu << (32 - (aligned_start_bit - start_bit));
+ }
+ } else {
+ DCHECK_EQ(aligned_start_bit - aligned_end_bit, 32u);
+ ref_offsets |= (0xffffffffu << (32 - (aligned_start_bit - start_bit))) &
+ (0xffffffffu >> (32 - (end_bit - aligned_end_bit)));
+ }
+ }
+ } else if (num_reference_fields != 0u) {
+ ref_offsets |= (0xffffffffu << start_bit) & (0xffffffffu >> (32 - end_bit));
+ }
+
+ klass = super_class;
+ super_class = klass->GetSuperClass();
+ if (super_class == nullptr) {
+ break;
+ }
+ num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
+ start_offset =
+ RoundUp(super_class->GetObjectSize(), sizeof(mirror::HeapReference<mirror::Object>));
+ start_bit = (start_offset - mirror::kObjectHeaderSize) /
+ sizeof(mirror::HeapReference<mirror::Object>);
+ end_bit = start_bit + num_reference_fields;
+ }
+ if (overflowing) {
+ // We should not have more than one word left to write in the overflow bitmap.
+ DCHECK_LE(overflow_words_to_write, 1u)
+ << "overflow_bitmap_word_idx:" << -overflow_bitmap_word_idx;
+ if (overflow_words_to_write > 0) {
+ overflow_bitmap[--overflow_bitmap_word_idx] = ref_offsets;
+ }
+ ref_offsets = -overflow_bitmap_word_idx | kVisitReferencesSlowpathMask;
+ }
+ }
+ SetReferenceInstanceOffsets(ref_offsets);
+}
+
class ReadBarrierOnNativeRootsVisitor {
public:
void operator()([[maybe_unused]] ObjPtr<Object> obj,
@@ -1657,6 +1778,7 @@ class CopyClassVisitor {
h_new_class_obj->PopulateEmbeddedVTable(pointer_size_);
h_new_class_obj->SetImt(imt_, pointer_size_);
h_new_class_obj->SetClassSize(new_length_);
+ h_new_class_obj->PopulateReferenceOffsetBitmap();
// Visit all of the references to make sure there is no from space references in the native
// roots.
h_new_class_obj->Object::VisitReferences(ReadBarrierOnNativeRootsVisitor(), VoidFunctor());
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 17d45b4312..6384bfabc0 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -83,11 +83,12 @@ class EXPORT MANAGED Class final : public Object {
public:
MIRROR_CLASS("Ljava/lang/Class;");
- // A magic value for reference_instance_offsets_. Ignore the bits and walk the super chain when
- // this is the value.
- // [This is an unlikely "natural" value, since it would be 30 non-ref instance fields followed by
- // 2 ref instance fields.]
- static constexpr uint32_t kClassWalkSuper = 0xC0000000;
+ // 'reference_instance_offsets_' may contain up to 31 reference offsets. If
+ // more bits are required, then we set the most-significant bit and store the
+ // number of 32-bit bitmap entries required in the remaining bits. All the
+ // required bitmap entries after stored after static fields (at the end of the class).
+ static constexpr uint32_t kVisitReferencesSlowpathShift = 31;
+ static constexpr uint32_t kVisitReferencesSlowpathMask = 1u << kVisitReferencesSlowpathShift;
// Shift primitive type by kPrimitiveTypeSizeShiftShift to get the component type size shift
// Used for computing array size as follows:
@@ -566,6 +567,12 @@ class EXPORT MANAGED Class final : public Object {
void SetClassSize(uint32_t new_class_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ // Adjust class-size during linking in case an overflow bitmap for reference
+ // offsets is required.
+ static size_t AdjustClassSizeForReferenceOffsetBitmapDuringLinking(ObjPtr<Class> klass,
+ size_t class_size)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
// Compute how many bytes would be used a class with the given elements.
static uint32_t ComputeClassSize(bool has_embedded_vtable,
uint32_t num_vtable_entries,
@@ -574,18 +581,19 @@ class EXPORT MANAGED Class final : public Object {
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
uint32_t num_ref_static_fields,
+ uint32_t num_ref_bitmap_entries,
PointerSize pointer_size);
// The size of java.lang.Class.class.
static uint32_t ClassClassSize(PointerSize pointer_size) {
// The number of vtable entries in java.lang.Class.
uint32_t vtable_entries = Object::kVTableLength + 83;
- return ComputeClassSize(true, vtable_entries, 0, 0, 4, 1, 0, pointer_size);
+ return ComputeClassSize(true, vtable_entries, 0, 0, 4, 1, 0, 0, pointer_size);
}
// The size of a java.lang.Class representing a primitive such as int.class.
static uint32_t PrimitiveClassSize(PointerSize pointer_size) {
- return ComputeClassSize(false, 0, 0, 0, 0, 0, 0, pointer_size);
+ return ComputeClassSize(false, 0, 0, 0, 0, 0, 0, 0, pointer_size);
}
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -896,6 +904,14 @@ class EXPORT MANAGED Class final : public Object {
void PopulateEmbeddedVTable(PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_);
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+ void VerifyOverflowReferenceBitmap() REQUIRES_SHARED(Locks::mutator_lock_);
+ // If the bitmap in `reference_instance_offsets_` was found to be insufficient
+ // in CreateReferenceInstanceOffsets(), then populate the overflow bitmap,
+ // which is at the end of class object.
+ void PopulateReferenceOffsetBitmap() REQUIRES_SHARED(Locks::mutator_lock_);
+
// Given a method implemented by this class but potentially from a super class, return the
// specific implementation method for this class.
ArtMethod* FindVirtualMethodForVirtual(ArtMethod* method, PointerSize pointer_size)
@@ -1435,6 +1451,9 @@ class EXPORT MANAGED Class final : public Object {
// Check that the pointer size matches the one in the class linker.
ALWAYS_INLINE static void CheckPointerSize(PointerSize pointer_size);
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename Visitor>
+ void VisitStaticFieldsReferences(const Visitor& visitor) HOT_ATTR;
+
template <bool kVisitNativeRoots,
VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
@@ -1549,7 +1568,8 @@ class EXPORT MANAGED Class final : public Object {
// TODO: really 16bits
int32_t dex_type_idx_;
- // Number of instance fields that are object refs.
+ // Number of instance fields that are object refs. Does not count object refs
+ // in any super classes.
uint32_t num_reference_instance_fields_;
// Number of static fields that are object refs,
@@ -1583,19 +1603,20 @@ class EXPORT MANAGED Class final : public Object {
// The offset of the first declared virtual methods in the methods_ array.
uint16_t virtual_methods_offset_;
- // TODO: ?
- // initiating class loader list
- // NOTE: for classes with low serialNumber, these are unused, and the
- // values are kept in a table in gDvm.
- // InitiatingLoaderList initiating_loader_list_;
-
// The following data exist in real class objects.
- // Embedded Imtable, for class object that's not an interface, fixed size.
- // ImTableEntry embedded_imtable_[0];
+ // Embedded Vtable length, for class object that's instantiable, fixed size.
+ // uint32_t vtable_length_;
+ // Embedded Imtable pointer, for class object that's not an interface, fixed size.
+ // ImTableEntry embedded_imtable_;
// Embedded Vtable, for class object that's not an interface, variable size.
// VTableEntry embedded_vtable_[0];
// Static fields, variable size.
// uint32_t fields_[0];
+ // Embedded bitmap of offsets of ifields, for classes that need more than 31
+ // reference-offset bits. 'reference_instance_offsets_' stores the number of
+ // 32-bit entries that hold the entire bitmap. We compute the offset of first
+ // entry by subtracting this number from class_size_.
+ // uint32_t reference_bitmap_[0];
ART_FRIEND_TEST(DexCacheTest, TestResolvedFieldAccess); // For ResolvedFieldAccessTest
friend struct art::ClassOffsets; // for verifying offset information
diff --git a/runtime/mirror/class_ext.cc b/runtime/mirror/class_ext.cc
index d8cabd37b0..49177aca02 100644
--- a/runtime/mirror/class_ext.cc
+++ b/runtime/mirror/class_ext.cc
@@ -38,7 +38,7 @@ namespace mirror {
uint32_t ClassExt::ClassSize(PointerSize pointer_size) {
uint32_t vtable_entries = Object::kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, 0, pointer_size);
}
void ClassExt::SetObsoleteArrays(ObjPtr<PointerArray> methods,
diff --git a/runtime/mirror/class_loader.h b/runtime/mirror/class_loader.h
index 0e925f4598..d62768f589 100644
--- a/runtime/mirror/class_loader.h
+++ b/runtime/mirror/class_loader.h
@@ -75,8 +75,7 @@ class MANAGED ClassLoader : public Object {
ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
typename Visitor>
void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
- REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(!Locks::classlinker_classes_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::classlinker_classes_lock_);
// Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
HeapReference<String> name_;
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 8f9d624399..4ac5131958 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -128,7 +128,7 @@ inline T* GcRootArray<T>::Get(uint32_t index) {
inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
const uint32_t vtable_entries = Object::kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, 0, pointer_size);
}
inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
@@ -296,7 +296,7 @@ template <bool kVisitNativeRoots,
typename Visitor>
inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
// Visit instance fields first.
- VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
+ VisitInstanceFieldsReferences<kVerifyFlags>(klass, visitor);
// Visit arrays after.
if (kVisitNativeRoots) {
VisitNativeRoots<kVerifyFlags, kReadBarrierOption>(visitor);
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 14b9ca3af0..6a6f50347c 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -46,7 +46,7 @@ namespace mirror {
inline uint32_t Object::ClassSize(PointerSize pointer_size) {
uint32_t vtable_entries = kVTableLength;
- return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
+ return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, 0, pointer_size);
}
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
@@ -345,6 +345,7 @@ inline bool Object::IsPhantomReferenceInstance() {
return GetClass<kVerifyFlags>()->IsPhantomReferenceClass();
}
+// TODO: optimize this by using class_flags_ to determine type
template<VerifyObjectFlags kVerifyFlags>
inline size_t Object::SizeOf() {
// Read barrier is never required for SizeOf since objects sizes are constant. Reading from-space
@@ -858,65 +859,40 @@ inline void Object::UpdateField64ViaAccessor(MemberOffset field_offset,
accessor->Access(addr);
}
-template<bool kIsStatic,
- VerifyObjectFlags kVerifyFlags,
- ReadBarrierOption kReadBarrierOption,
- typename Visitor>
-inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) {
- if (!kIsStatic && (ref_offsets != mirror::Class::kClassWalkSuper)) {
- // Instance fields and not the slow-path.
- uint32_t field_offset = mirror::kObjectHeaderSize;
- while (ref_offsets != 0) {
- if ((ref_offsets & 1) != 0) {
- visitor(this, MemberOffset(field_offset), kIsStatic);
- }
- ref_offsets >>= 1;
- field_offset += sizeof(mirror::HeapReference<mirror::Object>);
+template <VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
+inline void Object::VisitInstanceFieldsReferences(ObjPtr<Class> klass, const Visitor& visitor) {
+ // Using NO_THREAD_SAFETY_ANALYSIS as heap_bitmap_lock_ and mutator_lock_ are
+ // required in shared/exclusive modes in all possible combinations.
+ auto visit_one_word = [&visitor, this](uint32_t field_offset, uint32_t ref_offsets)
+ NO_THREAD_SAFETY_ANALYSIS {
+ while (ref_offsets != 0) {
+ if ((ref_offsets & 1) != 0) {
+ visitor(this, MemberOffset(field_offset), /*is_static=*/false);
+ }
+ ref_offsets >>= 1;
+ field_offset += sizeof(HeapReference<Object>);
+ }
+ };
+
+ uint32_t ref_offsets = klass->GetReferenceInstanceOffsets<kVerifyFlags>();
+ DCHECK_NE(ref_offsets, 0u) << klass->PrettyDescriptor();
+ if (UNLIKELY((ref_offsets & Class::kVisitReferencesSlowpathMask) != 0)) {
+ if (kIsDebugBuild) {
+ klass->VerifyOverflowReferenceBitmap<kVerifyFlags, kReadBarrierOption>();
}
- } else {
- // There is no reference offset bitmap. In the non-static case, walk up the class
- // inheritance hierarchy and find reference offsets the hard way. In the static case, just
- // consider this class.
- for (ObjPtr<Class> klass = kIsStatic
- ? ObjPtr<Class>::DownCast(this)
- : GetClass<kVerifyFlags, kReadBarrierOption>();
- klass != nullptr;
- klass = kIsStatic ? nullptr : klass->GetSuperClass<kVerifyFlags, kReadBarrierOption>()) {
- const size_t num_reference_fields =
- kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields();
- if (num_reference_fields == 0u) {
- continue;
- }
- // Presumably GC can happen when we are cross compiling, it should not cause performance
- // problems to do pointer size logic.
- MemberOffset field_offset = kIsStatic
- ? klass->GetFirstReferenceStaticFieldOffset<kVerifyFlags>(
- Runtime::Current()->GetClassLinker()->GetImagePointerSize())
- : klass->GetFirstReferenceInstanceFieldOffset<kVerifyFlags, kReadBarrierOption>();
- for (size_t i = 0u; i < num_reference_fields; ++i) {
- // TODO: Do a simpler check?
- if (field_offset.Uint32Value() != ClassOffset().Uint32Value()) {
- visitor(this, field_offset, kIsStatic);
- }
- field_offset = MemberOffset(field_offset.Uint32Value() +
- sizeof(mirror::HeapReference<mirror::Object>));
- }
+ uint32_t bitmap_num_words = ref_offsets & ~Class::kVisitReferencesSlowpathMask;
+ uint32_t* overflow_bitmap = reinterpret_cast<uint32_t*>(
+ reinterpret_cast<uint8_t*>(klass.Ptr()) +
+ (klass->GetClassSize<kVerifyFlags>() - bitmap_num_words * sizeof(uint32_t)));
+ for (uint32_t i = 0; i < bitmap_num_words; i++) {
+ visit_one_word(kObjectHeaderSize + i * sizeof(HeapReference<Object>) * 32,
+ overflow_bitmap[i]);
}
+ } else {
+ visit_one_word(mirror::kObjectHeaderSize, ref_offsets);
}
}
-template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
-inline void Object::VisitInstanceFieldsReferences(ObjPtr<Class> klass, const Visitor& visitor) {
- VisitFieldsReferences<false, kVerifyFlags, kReadBarrierOption>(
- klass->GetReferenceInstanceOffsets<kVerifyFlags>(), visitor);
-}
-
-template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption, typename Visitor>
-inline void Object::VisitStaticFieldsReferences(ObjPtr<Class> klass, const Visitor& visitor) {
- DCHECK(!klass->IsTemp<kVerifyFlags>());
- klass->VisitFieldsReferences<true, kVerifyFlags, kReadBarrierOption>(0, visitor);
-}
-
template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
inline bool Object::IsClassLoader() {
return GetClass<kVerifyFlags, kReadBarrierOption>()->template IsClassLoaderClass<kVerifyFlags>();
diff --git a/runtime/mirror/object-refvisitor-inl.h b/runtime/mirror/object-refvisitor-inl.h
index 72aff4b686..140ef9f93b 100644
--- a/runtime/mirror/object-refvisitor-inl.h
+++ b/runtime/mirror/object-refvisitor-inl.h
@@ -72,6 +72,7 @@ inline void Object::VisitReferences(const Visitor& visitor,
const uint32_t class_flags = klass->GetClassFlags<kVerifyNone>();
if (LIKELY(class_flags == kClassFlagNormal) || class_flags == kClassFlagRecord) {
CheckNormalClass<kVerifyFlags>(klass);
+ DCHECK(klass->IsInstantiableNonArray()) << klass->PrettyDescriptor();
VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
return;
}
@@ -84,6 +85,7 @@ inline void Object::VisitReferences(const Visitor& visitor,
DCHECK(!klass->IsStringClass<kVerifyFlags>());
if (class_flags == kClassFlagClass) {
DCHECK(klass->IsClassClass<kVerifyFlags>());
+ DCHECK(klass->IsInstantiableNonArray()) << klass->PrettyDescriptor();
ObjPtr<Class> as_klass = AsClass<kVerifyNone>();
as_klass->VisitReferences<kVisitNativeRoots, kVerifyFlags, kReadBarrierOption>(klass, visitor);
return;
@@ -96,12 +98,14 @@ inline void Object::VisitReferences(const Visitor& visitor,
}
if ((class_flags & kClassFlagReference) != 0) {
+ DCHECK(klass->IsInstantiableNonArray()) << klass->PrettyDescriptor();
VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
ref_visitor(klass, AsReference<kVerifyFlags, kReadBarrierOption>());
return;
}
if (class_flags == kClassFlagDexCache) {
+ DCHECK(klass->IsInstantiableNonArray()) << klass->PrettyDescriptor();
DCHECK(klass->IsDexCacheClass<kVerifyFlags>());
ObjPtr<mirror::DexCache> const dex_cache = AsDexCache<kVerifyFlags, kReadBarrierOption>();
dex_cache->VisitReferences<kVisitNativeRoots,
@@ -111,6 +115,7 @@ inline void Object::VisitReferences(const Visitor& visitor,
}
if (class_flags == kClassFlagClassLoader) {
+ DCHECK(klass->IsInstantiableNonArray()) << klass->PrettyDescriptor();
DCHECK(klass->IsClassLoaderClass<kVerifyFlags>());
ObjPtr<mirror::ClassLoader> const class_loader =
AsClassLoader<kVerifyFlags, kReadBarrierOption>();
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index debac07aba..97e218b4de 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -714,23 +714,11 @@ class EXPORT MANAGED LOCKABLE Object {
}
}
- // TODO: Fixme when anotatalysis works with visitors.
- template<bool kIsStatic,
- VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
- ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
- typename Visitor>
- void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR
- NO_THREAD_SAFETY_ANALYSIS;
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
- ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
- typename Visitor>
+ template <VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+ ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+ typename Visitor>
void VisitInstanceFieldsReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) HOT_ATTR
REQUIRES_SHARED(Locks::mutator_lock_);
- template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
- ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
- typename Visitor>
- void VisitStaticFieldsReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) HOT_ATTR
- REQUIRES_SHARED(Locks::mutator_lock_);
private:
template <bool kAllowInflation>
diff --git a/runtime/mirror/reference-inl.h b/runtime/mirror/reference-inl.h
index c8a0957a0a..ac3d7c424b 100644
--- a/runtime/mirror/reference-inl.h
+++ b/runtime/mirror/reference-inl.h
@@ -27,7 +27,7 @@ namespace mirror {
inline uint32_t Reference::ClassSize(PointerSize pointer_size) {
uint32_t vtable_entries = Object::kVTableLength + 4;
- return Class::ComputeClassSize(false, vtable_entries, 2, 0, 0, 0, 0, pointer_size);
+ return Class::ComputeClassSize(false, vtable_entries, 2, 0, 0, 0, 0, 0, pointer_size);
}
template<bool kTransactionActive>
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index c76ef69b1d..97f1fb8fc6 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -39,7 +39,7 @@ inline uint32_t String::ClassSize(PointerSize pointer_size) {
#else
uint32_t vtable_entries = Object::kVTableLength + 72;
#endif
- return Class::ComputeClassSize(true, vtable_entries, 3, 0, 0, 1, 3, pointer_size);
+ return Class::ComputeClassSize(true, vtable_entries, 3, 0, 0, 1, 3, 0, pointer_size);
}
inline uint16_t String::CharAt(int32_t index) {
diff --git a/runtime/oat/oat.h b/runtime/oat/oat.h
index cd7df60ca5..10e47f5b31 100644
--- a/runtime/oat/oat.h
+++ b/runtime/oat/oat.h
@@ -44,8 +44,8 @@ std::ostream& operator<<(std::ostream& stream, StubType stub_type);
class EXPORT PACKED(4) OatHeader {
public:
static constexpr std::array<uint8_t, 4> kOatMagic { { 'o', 'a', 't', '\n' } };
- // Last oat version changed reason: Adding new entrypoints for on demand tracing.
- static constexpr std::array<uint8_t, 4> kOatVersion{{'2', '4', '5', '\0'}};
+ // Last oat version changed reason: Implement variable sized ref-offset bitmap in mirror::Class.
+ static constexpr std::array<uint8_t, 4> kOatVersion{{'2', '4', '6', '\0'}};
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
static constexpr const char* kDebuggableKey = "debuggable";
diff --git a/runtime/runtime_image.cc b/runtime/runtime_image.cc
index 997ea2fde6..fdba92e67f 100644
--- a/runtime/runtime_image.cc
+++ b/runtime/runtime_image.cc
@@ -1610,9 +1610,15 @@ class RuntimeImageHelper {
// Clear static field values.
auto clear_class = [&] () REQUIRES_SHARED(Locks::mutator_lock_) {
MemberOffset static_offset = cls->GetFirstReferenceStaticFieldOffset(kRuntimePointerSize);
- memset(objects_.data() + offset + static_offset.Uint32Value(),
- 0,
- cls->GetClassSize() - static_offset.Uint32Value());
+ uint32_t ref_offsets = cls->GetReferenceInstanceOffsets();
+ size_t size = cls->GetClassSize() - static_offset.Uint32Value();
+ // Adjust for overflow instance-offset bitmap, which is after the static
+ // fields.
+ if ((ref_offsets & mirror::Class::kVisitReferencesSlowpathMask) != 0) {
+ ref_offsets &= ~mirror::Class::kVisitReferencesSlowpathMask;
+ size -= ref_offsets * sizeof(uint32_t);
+ }
+ memset(objects_.data() + offset + static_offset.Uint32Value(), 0, size);
};
clear_class();