Reduced memory usage of primitive fields smaller than 4-bytes
Reduced memory used by byte and boolean fields from 4 bytes down to a
single byte and shorts and chars down to two bytes. Fields are now
arranged as Reference followed by decreasing component sizes, with
fields shuffled forward as needed.
Bug: 8135266
Change-Id: I65eaf31ed27e5bd5ba0c7d4606454b720b074752
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index b0ff7ea..52dd0ee 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -556,6 +556,8 @@
inline uint32_t Class::ComputeClassSize(bool has_embedded_tables,
uint32_t num_vtable_entries,
+ uint32_t num_8bit_static_fields,
+ uint32_t num_16bit_static_fields,
uint32_t num_32bit_static_fields,
uint32_t num_64bit_static_fields,
uint32_t num_ref_static_fields) {
@@ -569,19 +571,39 @@
sizeof(int32_t) /* vtable len */ +
embedded_vtable_size;
}
+
// Space used by reference statics.
size += num_ref_static_fields * sizeof(HeapReference<Object>);
- // Possible pad for alignment.
- if (((size & 7) != 0) && (num_64bit_static_fields > 0)) {
- size += sizeof(uint32_t);
- if (num_32bit_static_fields != 0) {
- // Shuffle one 32 bit static field forward.
- num_32bit_static_fields--;
+ if (!IsAligned<8>(size) && num_64bit_static_fields > 0) {
+ uint32_t gap = 8 - (size & 0x7);
+ size += gap; // will be padded
+ // Shuffle 4-byte fields forward.
+ while (gap >= sizeof(uint32_t) && num_32bit_static_fields != 0) {
+ --num_32bit_static_fields;
+ gap -= sizeof(uint32_t);
+ }
+ // Shuffle 2-byte fields forward.
+ while (gap >= sizeof(uint16_t) && num_16bit_static_fields != 0) {
+ --num_16bit_static_fields;
+ gap -= sizeof(uint16_t);
+ }
+ // Shuffle byte fields forward.
+ while (gap >= sizeof(uint8_t) && num_8bit_static_fields != 0) {
+ --num_8bit_static_fields;
+ gap -= sizeof(uint8_t);
}
}
+ // Guaranteed to be at least 4 byte aligned. No need for further alignments.
// Space used for primitive static fields.
- size += (num_32bit_static_fields * sizeof(uint32_t)) +
+ size += (num_8bit_static_fields * sizeof(uint8_t)) +
+ (num_16bit_static_fields * sizeof(uint16_t)) +
+ (num_32bit_static_fields * sizeof(uint32_t)) +
(num_64bit_static_fields * sizeof(uint64_t));
+ // For now, the start of of subclass expects to be 4-byte aligned, pad end of object to ensure
+ // alignment.
+ if (!IsAligned<4>(size)) {
+ size = RoundUp(size, 4);
+ }
return size;
}
@@ -705,11 +727,11 @@
}
inline bool Class::GetSlowPathEnabled() {
- return GetField32(GetSlowPathFlagOffset());
+ return GetFieldBoolean(GetSlowPathFlagOffset());
}
inline void Class::SetSlowPath(bool enabled) {
- SetField32<false>(GetSlowPathFlagOffset(), enabled);
+ SetFieldBoolean<false>(GetSlowPathFlagOffset(), enabled);
}
inline void Class::InitializeClassVisitor::operator()(