summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build/Android.gtest.mk1
-rw-r--r--compiler/optimizing/code_generator.cc48
-rw-r--r--compiler/optimizing/stack_map_stream.h77
-rw-r--r--compiler/optimizing/stack_map_test.cc143
-rw-r--r--oatdump/oatdump.cc45
-rw-r--r--runtime/check_reference_map_visitor.h27
-rw-r--r--runtime/memory_region.h51
-rw-r--r--runtime/memory_region_test.cc57
-rw-r--r--runtime/stack.cc39
-rw-r--r--runtime/stack_map.h439
-rw-r--r--runtime/utils.h18
11 files changed, 733 insertions, 212 deletions
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk
index 6967808b8e..09790fe8e5 100644
--- a/build/Android.gtest.mk
+++ b/build/Android.gtest.mk
@@ -155,6 +155,7 @@ RUNTIME_GTEST_COMMON_SRC_FILES := \
runtime/java_vm_ext_test.cc \
runtime/leb128_test.cc \
runtime/mem_map_test.cc \
+ runtime/memory_region_test.cc \
runtime/mirror/dex_cache_test.cc \
runtime/mirror/object_test.cc \
runtime/monitor_pool_test.cc \
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index ed3f949afe..7d256ae4aa 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -610,7 +610,7 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, uint32_t dex_pc) {
for (size_t i = 0; i < environment_size; ++i) {
HInstruction* current = environment->GetInstructionAt(i);
if (current == nullptr) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kNone, 0);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kNone, 0);
continue;
}
@@ -620,37 +620,43 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, uint32_t dex_pc) {
DCHECK_EQ(current, location.GetConstant());
if (current->IsLongConstant()) {
int64_t value = current->AsLongConstant()->GetValue();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, Low32Bits(value));
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, High32Bits(value));
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
+ Low32Bits(value));
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
+ High32Bits(value));
++i;
DCHECK_LT(i, environment_size);
} else if (current->IsDoubleConstant()) {
int64_t value = bit_cast<double, int64_t>(current->AsDoubleConstant()->GetValue());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, Low32Bits(value));
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, High32Bits(value));
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
+ Low32Bits(value));
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
+ High32Bits(value));
++i;
DCHECK_LT(i, environment_size);
} else if (current->IsIntConstant()) {
int32_t value = current->AsIntConstant()->GetValue();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, value);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
} else if (current->IsNullConstant()) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, 0);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, 0);
} else {
DCHECK(current->IsFloatConstant());
int32_t value = bit_cast<float, int32_t>(current->AsFloatConstant()->GetValue());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kConstant, value);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
}
break;
}
case Location::kStackSlot: {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInStack, location.GetStackIndex());
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
+ location.GetStackIndex());
break;
}
case Location::kDoubleStackSlot: {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInStack, location.GetStackIndex());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInStack,
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
+ location.GetStackIndex());
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
location.GetHighStackIndex(kVRegSize));
++i;
DCHECK_LT(i, environment_size);
@@ -659,9 +665,9 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, uint32_t dex_pc) {
case Location::kRegister : {
int id = location.reg();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
if (current->GetType() == Primitive::kPrimLong) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
++i;
DCHECK_LT(i, environment_size);
}
@@ -670,9 +676,9 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, uint32_t dex_pc) {
case Location::kFpuRegister : {
int id = location.reg();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInFpuRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
if (current->GetType() == Primitive::kPrimDouble) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInFpuRegister, id);
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
++i;
DCHECK_LT(i, environment_size);
}
@@ -680,16 +686,20 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction, uint32_t dex_pc) {
}
case Location::kFpuRegisterPair : {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInFpuRegister, location.low());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInFpuRegister, location.high());
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister,
+ location.low());
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister,
+ location.high());
++i;
DCHECK_LT(i, environment_size);
break;
}
case Location::kRegisterPair : {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInRegister, location.low());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterMap::kInRegister, location.high());
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister,
+ location.low());
+ stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister,
+ location.high());
++i;
DCHECK_LT(i, environment_size);
break;
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 5283d5dcca..79bebd2e64 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -56,11 +56,6 @@ class StackMapStream : public ValueObject {
size_t inline_infos_start_index;
};
- struct DexRegisterEntry {
- DexRegisterMap::LocationKind kind;
- int32_t value;
- };
-
struct InlineInfoEntry {
uint32_t method_index;
};
@@ -90,11 +85,11 @@ class StackMapStream : public ValueObject {
}
}
- void AddDexRegisterEntry(DexRegisterMap::LocationKind kind, int32_t value) {
- DexRegisterEntry entry;
- entry.kind = kind;
- entry.value = value;
- dex_register_maps_.Add(entry);
+ void AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
+ // Ensure we only use non-compressed location kind at this stage.
+ DCHECK(DexRegisterLocation::IsShortLocationKind(kind))
+ << DexRegisterLocation::PrettyDescriptor(kind);
+ dex_register_maps_.Add(DexRegisterLocation(kind, value));
}
void AddInlineInfoEntry(uint32_t method_index) {
@@ -106,7 +101,7 @@ class StackMapStream : public ValueObject {
size_t ComputeNeededSize() const {
return CodeInfo::kFixedSize
+ ComputeStackMapSize()
- + ComputeDexRegisterMapSize()
+ + ComputeDexRegisterMapsSize()
+ ComputeInlineInfoSize();
}
@@ -114,27 +109,44 @@ class StackMapStream : public ValueObject {
return stack_maps_.Size() * StackMap::ComputeAlignedStackMapSize(stack_mask_max_);
}
- size_t ComputeDexRegisterMapSize() const {
- // We currently encode all dex register information per stack map.
- return stack_maps_.Size() * DexRegisterMap::kFixedSize
- // For each dex register entry.
- + (dex_register_maps_.Size() * DexRegisterMap::SingleEntrySize());
+ // Compute the size of the Dex register map of `entry`.
+ size_t ComputeDexRegisterMapSize(const StackMapEntry& entry) const {
+ size_t size = DexRegisterMap::kFixedSize;
+ for (size_t j = 0; j < entry.num_dex_registers; ++j) {
+ DexRegisterLocation dex_register_location =
+ dex_register_maps_.Get(entry.dex_register_maps_start_index + j);
+ size += DexRegisterMap::EntrySize(dex_register_location);
+ }
+ return size;
+ }
+
+ // Compute the size of all the Dex register maps.
+ size_t ComputeDexRegisterMapsSize() const {
+ size_t size = stack_maps_.Size() * DexRegisterMap::kFixedSize;
+ // The size of each register location depends on the type of
+ // the entry.
+ for (size_t i = 0, e = dex_register_maps_.Size(); i < e; ++i) {
+ DexRegisterLocation entry = dex_register_maps_.Get(i);
+ size += DexRegisterMap::EntrySize(entry);
+ }
+ return size;
}
+ // Compute the size of all the inline information pieces.
size_t ComputeInlineInfoSize() const {
return inline_infos_.Size() * InlineInfo::SingleEntrySize()
// For encoding the depth.
+ (number_of_stack_maps_with_inline_info_ * InlineInfo::kFixedSize);
}
- size_t ComputeInlineInfoStart() const {
- return ComputeDexRegisterMapStart() + ComputeDexRegisterMapSize();
- }
-
size_t ComputeDexRegisterMapStart() const {
return CodeInfo::kFixedSize + ComputeStackMapSize();
}
+ size_t ComputeInlineInfoStart() const {
+ return ComputeDexRegisterMapStart() + ComputeDexRegisterMapsSize();
+ }
+
void FillIn(MemoryRegion region) {
CodeInfo code_info(region);
code_info.SetOverallSize(region.size());
@@ -144,7 +156,7 @@ class StackMapStream : public ValueObject {
MemoryRegion dex_register_maps_region = region.Subregion(
ComputeDexRegisterMapStart(),
- ComputeDexRegisterMapSize());
+ ComputeDexRegisterMapsSize());
MemoryRegion inline_infos_region = region.Subregion(
ComputeInlineInfoStart(),
@@ -167,20 +179,25 @@ class StackMapStream : public ValueObject {
}
if (entry.num_dex_registers != 0) {
- // Set the register map.
- MemoryRegion register_region = dex_register_maps_region.Subregion(
- next_dex_register_map_offset,
- DexRegisterMap::kFixedSize
- + entry.num_dex_registers * DexRegisterMap::SingleEntrySize());
+ // Set the Dex register map.
+ MemoryRegion register_region =
+ dex_register_maps_region.Subregion(
+ next_dex_register_map_offset,
+ ComputeDexRegisterMapSize(entry));
next_dex_register_map_offset += register_region.size();
DexRegisterMap dex_register_map(register_region);
stack_map.SetDexRegisterMapOffset(register_region.start() - memory_start);
+ // Offset in `dex_register_map` where to store the next register entry.
+ size_t offset = DexRegisterMap::kFixedSize;
for (size_t j = 0; j < entry.num_dex_registers; ++j) {
- DexRegisterEntry register_entry =
- dex_register_maps_.Get(j + entry.dex_register_maps_start_index);
- dex_register_map.SetRegisterInfo(j, register_entry.kind, register_entry.value);
+ DexRegisterLocation dex_register_location =
+ dex_register_maps_.Get(entry.dex_register_maps_start_index + j);
+ dex_register_map.SetRegisterInfo(offset, dex_register_location);
+ offset += DexRegisterMap::EntrySize(dex_register_location);
}
+ // Ensure we reached the end of the Dex registers region.
+ DCHECK_EQ(offset, register_region.size());
} else {
stack_map.SetDexRegisterMapOffset(StackMap::kNoDexRegisterMap);
}
@@ -208,7 +225,7 @@ class StackMapStream : public ValueObject {
private:
GrowableArray<StackMapEntry> stack_maps_;
- GrowableArray<DexRegisterEntry> dex_register_maps_;
+ GrowableArray<DexRegisterLocation> dex_register_maps_;
GrowableArray<InlineInfoEntry> inline_infos_;
int stack_mask_max_;
size_t number_of_stack_maps_with_inline_info_;
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 5b025106ac..3a5f80686d 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -22,7 +22,7 @@
namespace art {
-bool SameBits(MemoryRegion region, const BitVector& bit_vector) {
+static bool SameBits(MemoryRegion region, const BitVector& bit_vector) {
for (size_t i = 0; i < region.size_in_bits(); ++i) {
if (region.LoadBit(i) != bit_vector.IsBitSet(i)) {
return false;
@@ -31,9 +31,9 @@ bool SameBits(MemoryRegion region, const BitVector& bit_vector) {
return true;
}
-size_t ComputeDexRegisterMapSize(size_t number_of_dex_registers) {
- return DexRegisterMap::kFixedSize
- + number_of_dex_registers * DexRegisterMap::SingleEntrySize();
+static size_t ComputeDexRegisterMapSize(const DexRegisterMap& dex_registers,
+ size_t number_of_dex_registers) {
+ return dex_registers.FindLocationOffset(number_of_dex_registers);
}
TEST(StackMapTest, Test1) {
@@ -44,8 +44,8 @@ TEST(StackMapTest, Test1) {
ArenaBitVector sp_mask(&arena, 0, false);
size_t number_of_dex_registers = 2;
stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(DexRegisterMap::kInStack, 0);
- stream.AddDexRegisterEntry(DexRegisterMap::kConstant, -2);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, 0);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, -2);
size_t size = stream.ComputeNeededSize();
void* memory = arena.Alloc(size, kArenaAllocMisc);
@@ -67,14 +67,17 @@ TEST(StackMapTest, Test1) {
ASSERT_TRUE(SameBits(stack_mask, sp_mask));
ASSERT_TRUE(stack_map.HasDexRegisterMap());
- DexRegisterMap dex_registers =
- code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- ASSERT_EQ(16u, dex_registers.Size());
- ASSERT_EQ(16u, ComputeDexRegisterMapSize(number_of_dex_registers));
- ASSERT_EQ(DexRegisterMap::kInStack, dex_registers.GetLocationKind(0));
- ASSERT_EQ(DexRegisterMap::kConstant, dex_registers.GetLocationKind(1));
- ASSERT_EQ(0, dex_registers.GetValue(0));
- ASSERT_EQ(-2, dex_registers.GetValue(1));
+ DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+ ASSERT_EQ(6u, dex_registers.Size());
+ ASSERT_EQ(6u, ComputeDexRegisterMapSize(dex_registers, number_of_dex_registers));
+ DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0);
+ DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1);
+ ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kConstant, location1.GetKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetInternalKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kConstantLargeValue, location1.GetInternalKind());
+ ASSERT_EQ(0, location0.GetValue());
+ ASSERT_EQ(-2, location1.GetValue());
ASSERT_FALSE(stack_map.HasInlineInfo());
}
@@ -89,8 +92,8 @@ TEST(StackMapTest, Test2) {
sp_mask1.SetBit(4);
size_t number_of_dex_registers = 2;
stream.AddStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
- stream.AddDexRegisterEntry(DexRegisterMap::kInStack, 0);
- stream.AddDexRegisterEntry(DexRegisterMap::kConstant, -2);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, 0);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, -2);
stream.AddInlineInfoEntry(42);
stream.AddInlineInfoEntry(82);
@@ -98,8 +101,8 @@ TEST(StackMapTest, Test2) {
sp_mask2.SetBit(3);
sp_mask1.SetBit(8);
stream.AddStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0);
- stream.AddDexRegisterEntry(DexRegisterMap::kInRegister, 18);
- stream.AddDexRegisterEntry(DexRegisterMap::kInFpuRegister, 3);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, 18);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, 3);
size_t size = stream.ComputeNeededSize();
void* memory = arena.Alloc(size, kArenaAllocMisc);
@@ -111,54 +114,66 @@ TEST(StackMapTest, Test2) {
ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
// First stack map.
- StackMap stack_map = code_info.GetStackMapAt(0);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
- ASSERT_EQ(0u, stack_map.GetDexPc());
- ASSERT_EQ(64u, stack_map.GetNativePcOffset());
- ASSERT_EQ(0x3u, stack_map.GetRegisterMask());
-
- MemoryRegion stack_mask = stack_map.GetStackMask();
- ASSERT_TRUE(SameBits(stack_mask, sp_mask1));
-
- ASSERT_TRUE(stack_map.HasDexRegisterMap());
- DexRegisterMap dex_registers =
- code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- ASSERT_EQ(16u, dex_registers.Size());
- ASSERT_EQ(16u, ComputeDexRegisterMapSize(number_of_dex_registers));
- ASSERT_EQ(DexRegisterMap::kInStack, dex_registers.GetLocationKind(0));
- ASSERT_EQ(DexRegisterMap::kConstant, dex_registers.GetLocationKind(1));
- ASSERT_EQ(0, dex_registers.GetValue(0));
- ASSERT_EQ(-2, dex_registers.GetValue(1));
-
- ASSERT_TRUE(stack_map.HasInlineInfo());
- InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
- ASSERT_EQ(2u, inline_info.GetDepth());
- ASSERT_EQ(42u, inline_info.GetMethodReferenceIndexAtDepth(0));
- ASSERT_EQ(82u, inline_info.GetMethodReferenceIndexAtDepth(1));
+ {
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_EQ(0u, stack_map.GetDexPc());
+ ASSERT_EQ(64u, stack_map.GetNativePcOffset());
+ ASSERT_EQ(0x3u, stack_map.GetRegisterMask());
+
+ MemoryRegion stack_mask = stack_map.GetStackMask();
+ ASSERT_TRUE(SameBits(stack_mask, sp_mask1));
+
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
+ DexRegisterMap dex_registers =
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+ ASSERT_EQ(6u, dex_registers.Size());
+ ASSERT_EQ(6u, ComputeDexRegisterMapSize(dex_registers, number_of_dex_registers));
+ DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0);
+ DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1);
+ ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kConstant, location1.GetKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetInternalKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kConstantLargeValue, location1.GetInternalKind());
+ ASSERT_EQ(0, location0.GetValue());
+ ASSERT_EQ(-2, location1.GetValue());
+
+ ASSERT_TRUE(stack_map.HasInlineInfo());
+ InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+ ASSERT_EQ(2u, inline_info.GetDepth());
+ ASSERT_EQ(42u, inline_info.GetMethodReferenceIndexAtDepth(0));
+ ASSERT_EQ(82u, inline_info.GetMethodReferenceIndexAtDepth(1));
+ }
// Second stack map.
- stack_map = code_info.GetStackMapAt(1);
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u)));
- ASSERT_EQ(1u, stack_map.GetDexPc());
- ASSERT_EQ(128u, stack_map.GetNativePcOffset());
- ASSERT_EQ(0xFFu, stack_map.GetRegisterMask());
-
- stack_mask = stack_map.GetStackMask();
- ASSERT_TRUE(SameBits(stack_mask, sp_mask2));
-
- ASSERT_TRUE(stack_map.HasDexRegisterMap());
- dex_registers =
- code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- ASSERT_EQ(16u, dex_registers.Size());
- ASSERT_EQ(16u, ComputeDexRegisterMapSize(number_of_dex_registers));
- ASSERT_EQ(DexRegisterMap::kInRegister, dex_registers.GetLocationKind(0));
- ASSERT_EQ(DexRegisterMap::kInFpuRegister, dex_registers.GetLocationKind(1));
- ASSERT_EQ(18, dex_registers.GetValue(0));
- ASSERT_EQ(3, dex_registers.GetValue(1));
-
- ASSERT_FALSE(stack_map.HasInlineInfo());
+ {
+ StackMap stack_map = code_info.GetStackMapAt(1);
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u)));
+ ASSERT_EQ(1u, stack_map.GetDexPc());
+ ASSERT_EQ(128u, stack_map.GetNativePcOffset());
+ ASSERT_EQ(0xFFu, stack_map.GetRegisterMask());
+
+ MemoryRegion stack_mask = stack_map.GetStackMask();
+ ASSERT_TRUE(SameBits(stack_mask, sp_mask2));
+
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
+ DexRegisterMap dex_registers =
+ code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+ ASSERT_EQ(2u, dex_registers.Size());
+ ASSERT_EQ(2u, ComputeDexRegisterMapSize(dex_registers, number_of_dex_registers));
+ DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0);
+ DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1);
+ ASSERT_EQ(DexRegisterLocation::Kind::kInRegister, location0.GetKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kInFpuRegister, location1.GetKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kInRegister, location0.GetInternalKind());
+ ASSERT_EQ(DexRegisterLocation::Kind::kInFpuRegister, location1.GetInternalKind());
+ ASSERT_EQ(18, location0.GetValue());
+ ASSERT_EQ(3, location1.GetValue());
+
+ ASSERT_FALSE(stack_map.HasInlineInfo());
+ }
}
} // namespace art
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index aab4f8bc0c..9ae3b79f62 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1039,6 +1039,33 @@ class OatDumper {
}
}
+ void DumpRegisterMapping(std::ostream& os,
+ size_t dex_register_num,
+ DexRegisterLocation::Kind kind,
+ int32_t value,
+ const std::string& prefix = "v",
+ const std::string& suffix = "") {
+ os << " " << prefix << dex_register_num << ": "
+ << DexRegisterLocation::PrettyDescriptor(kind)
+ << " (" << value << ")" << suffix << '\n';
+ }
+
+ void DumpStackMapHeader(std::ostream& os, const CodeInfo& code_info, size_t stack_map_num) {
+ StackMap stack_map = code_info.GetStackMapAt(stack_map_num);
+ os << " StackMap " << stack_map_num
+ << std::hex
+ << " (dex_pc=0x" << stack_map.GetDexPc()
+ << ", native_pc_offset=0x" << stack_map.GetNativePcOffset()
+ << ", register_mask=0x" << stack_map.GetRegisterMask()
+ << std::dec
+ << ", stack_mask=0b";
+ MemoryRegion stack_mask = stack_map.GetStackMask();
+ for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
+ os << stack_mask.LoadBit(e - i - 1);
+ }
+ os << ")\n";
+ };
+
// Display a CodeInfo object emitted by the optimizing compiler.
void DumpCodeInfo(std::ostream& os,
const CodeInfo& code_info,
@@ -1049,27 +1076,21 @@ class OatDumper {
os << " Optimized CodeInfo (size=" << code_info_size
<< ", number_of_dex_registers=" << number_of_dex_registers
<< ", number_of_stack_maps=" << number_of_stack_maps << ")\n";
+
+ // Display stack maps along with Dex register maps.
for (size_t i = 0; i < number_of_stack_maps; ++i) {
StackMap stack_map = code_info.GetStackMapAt(i);
- // TODO: Display stack_mask value.
- os << " StackMap " << i
- << std::hex
- << " (dex_pc=0x" << stack_map.GetDexPc()
- << ", native_pc_offset=0x" << stack_map.GetNativePcOffset()
- << ", register_mask=0x" << stack_map.GetRegisterMask()
- << std::dec
- << ")\n";
+ DumpStackMapHeader(os, code_info, i);
if (stack_map.HasDexRegisterMap()) {
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
for (size_t j = 0; j < number_of_dex_registers; ++j) {
- os << " v" << j << ": "
- << DexRegisterMap::PrettyDescriptor(dex_register_map.GetLocationKind(j))
- << " (" << dex_register_map.GetValue(j) << ")\n";
+ DexRegisterLocation location = dex_register_map.GetLocationKindAndValue(j);
+ DumpRegisterMapping(os, j, location.GetInternalKind(), location.GetValue());
}
}
- // TODO: Display more information from code_info.
}
+ // TODO: Dump the stack map's inline information.
}
// Display a vmap table.
diff --git a/runtime/check_reference_map_visitor.h b/runtime/check_reference_map_visitor.h
index 93062a7c4b..893ab11bad 100644
--- a/runtime/check_reference_map_visitor.h
+++ b/runtime/check_reference_map_visitor.h
@@ -66,31 +66,36 @@ class CheckReferenceMapVisitor : public StackVisitor {
mirror::ArtMethod* m = GetMethod();
CodeInfo code_info = m->GetOptimizedCodeInfo();
StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
- DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map, m->GetCodeItem()->registers_size_);
+ DexRegisterMap dex_register_map =
+ code_info.GetDexRegisterMapOf(stack_map, m->GetCodeItem()->registers_size_);
MemoryRegion stack_mask = stack_map.GetStackMask();
uint32_t register_mask = stack_map.GetRegisterMask();
for (int i = 0; i < number_of_references; ++i) {
int reg = registers[i];
CHECK(reg < m->GetCodeItem()->registers_size_);
- DexRegisterMap::LocationKind location = dex_register_map.GetLocationKind(reg);
- switch (location) {
- case DexRegisterMap::kNone:
+ DexRegisterLocation location = dex_register_map.GetLocationKindAndValue(reg);
+ switch (location.GetKind()) {
+ case DexRegisterLocation::Kind::kNone:
// Not set, should not be a reference.
CHECK(false);
break;
- case DexRegisterMap::kInStack:
- CHECK(stack_mask.LoadBit(dex_register_map.GetValue(reg) >> 2));
+ case DexRegisterLocation::Kind::kInStack:
+ DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
+ CHECK(stack_mask.LoadBit(location.GetValue() / kFrameSlotSize));
break;
- case DexRegisterMap::kInRegister:
- CHECK_NE(register_mask & (1 << dex_register_map.GetValue(reg)), 0u);
+ case DexRegisterLocation::Kind::kInRegister:
+ CHECK_NE(register_mask & (1 << location.GetValue()), 0u);
break;
- case DexRegisterMap::kInFpuRegister:
+ case DexRegisterLocation::Kind::kInFpuRegister:
// In Fpu register, should not be a reference.
CHECK(false);
break;
- case DexRegisterMap::kConstant:
- CHECK_EQ(dex_register_map.GetValue(reg), 0);
+ case DexRegisterLocation::Kind::kConstant:
+ CHECK_EQ(location.GetValue(), 0);
break;
+ default:
+ LOG(FATAL) << "Unexpected location kind"
+ << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
}
}
}
diff --git a/runtime/memory_region.h b/runtime/memory_region.h
index b3820be26c..939a1a9212 100644
--- a/runtime/memory_region.h
+++ b/runtime/memory_region.h
@@ -23,6 +23,7 @@
#include "base/macros.h"
#include "base/value_object.h"
#include "globals.h"
+#include "utils.h"
namespace art {
@@ -45,14 +46,64 @@ class MemoryRegion FINAL : public ValueObject {
uint8_t* start() const { return reinterpret_cast<uint8_t*>(pointer_); }
uint8_t* end() const { return start() + size_; }
+ // Load value of type `T` at `offset`. The memory address corresponding
+ // to `offset` should be word-aligned.
template<typename T> T Load(uintptr_t offset) const {
+ // TODO: DCHECK that the address is word-aligned.
return *ComputeInternalPointer<T>(offset);
}
+ // Store `value` (of type `T`) at `offset`. The memory address
+ // corresponding to `offset` should be word-aligned.
template<typename T> void Store(uintptr_t offset, T value) const {
+ // TODO: DCHECK that the address is word-aligned.
*ComputeInternalPointer<T>(offset) = value;
}
+ // TODO: Local hack to prevent name clashes between two conflicting
+ // implementations of bit_cast:
+ // - art::bit_cast<Destination, Source> runtime/base/casts.h, and
+ // - art::bit_cast<Source, Destination> from runtime/utils.h.
+ // Remove this when these routines have been merged.
+ template<typename Source, typename Destination>
+ static Destination local_bit_cast(Source in) {
+ static_assert(sizeof(Source) <= sizeof(Destination),
+ "Size of Source not <= size of Destination");
+ union {
+ Source u;
+ Destination v;
+ } tmp;
+ tmp.u = in;
+ return tmp.v;
+ }
+
+ // Load value of type `T` at `offset`. The memory address corresponding
+ // to `offset` does not need to be word-aligned.
+ template<typename T> T LoadUnaligned(uintptr_t offset) const {
+ // Equivalent unsigned integer type corresponding to T.
+ typedef typename UnsignedIntegerType<sizeof(T)>::type U;
+ U equivalent_unsigned_integer_value = 0;
+ // Read the value byte by byte in a little-endian fashion.
+ for (size_t i = 0; i < sizeof(U); ++i) {
+ equivalent_unsigned_integer_value +=
+ *ComputeInternalPointer<uint8_t>(offset + i) << (i * kBitsPerByte);
+ }
+ return local_bit_cast<U, T>(equivalent_unsigned_integer_value);
+ }
+
+ // Store `value` (of type `T`) at `offset`. The memory address
+ // corresponding to `offset` does not need to be word-aligned.
+ template<typename T> void StoreUnaligned(uintptr_t offset, T value) const {
+ // Equivalent unsigned integer type corresponding to T.
+ typedef typename UnsignedIntegerType<sizeof(T)>::type U;
+ U equivalent_unsigned_integer_value = local_bit_cast<T, U>(value);
+ // Write the value byte by byte in a little-endian fashion.
+ for (size_t i = 0; i < sizeof(U); ++i) {
+ *ComputeInternalPointer<uint8_t>(offset + i) =
+ (equivalent_unsigned_integer_value >> (i * kBitsPerByte)) & 0xFF;
+ }
+ }
+
template<typename T> T* PointerTo(uintptr_t offset) const {
return ComputeInternalPointer<T>(offset);
}
diff --git a/runtime/memory_region_test.cc b/runtime/memory_region_test.cc
new file mode 100644
index 0000000000..50575dd21a
--- /dev/null
+++ b/runtime/memory_region_test.cc
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "memory_region.h"
+
+#include "gtest/gtest.h"
+
+namespace art {
+
+TEST(MemoryRegion, LoadUnaligned) {
+ const size_t n = 8;
+ uint8_t data[n] = { 0, 1, 2, 3, 4, 5, 6, 7 };
+ MemoryRegion region(&data, n);
+
+ ASSERT_EQ(0, region.LoadUnaligned<char>(0));
+ ASSERT_EQ(1u
+ + (2u << kBitsPerByte)
+ + (3u << 2 * kBitsPerByte)
+ + (4u << 3 * kBitsPerByte),
+ region.LoadUnaligned<uint32_t>(1));
+ ASSERT_EQ(5 + (6 << kBitsPerByte), region.LoadUnaligned<int16_t>(5));
+ ASSERT_EQ(7u, region.LoadUnaligned<unsigned char>(7));
+}
+
+TEST(MemoryRegion, StoreUnaligned) {
+ const size_t n = 8;
+ uint8_t data[n] = { 0, 0, 0, 0, 0, 0, 0, 0 };
+ MemoryRegion region(&data, n);
+
+ region.StoreUnaligned<unsigned char>(0u, 7);
+ region.StoreUnaligned<int16_t>(1, 6 + (5 << kBitsPerByte));
+ region.StoreUnaligned<uint32_t>(3,
+ 4u
+ + (3u << kBitsPerByte)
+ + (2u << 2 * kBitsPerByte)
+ + (1u << 3 * kBitsPerByte));
+ region.StoreUnaligned<char>(7, 0);
+
+ uint8_t expected[n] = { 7, 6, 5, 4, 3, 2, 1, 0 };
+ for (size_t i = 0; i < n; ++i) {
+ ASSERT_EQ(expected[i], data[i]);
+ }
+}
+}
diff --git a/runtime/stack.cc b/runtime/stack.cc
index 48becf688f..e420c57346 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -204,29 +204,32 @@ bool StackVisitor::GetVRegFromOptimizedCode(mirror::ArtMethod* m, uint16_t vreg,
DCHECK(code_item != nullptr) << PrettyMethod(m); // Can't be NULL or how would we compile
// its instructions?
DCHECK_LT(vreg, code_item->registers_size_);
- DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map,
- code_item->registers_size_);
- DexRegisterMap::LocationKind location_kind = dex_register_map.GetLocationKind(vreg);
+ DexRegisterMap dex_register_map =
+ code_info.GetDexRegisterMapOf(stack_map, code_item->registers_size_);
+ DexRegisterLocation::Kind location_kind = dex_register_map.GetLocationKind(vreg);
switch (location_kind) {
- case DexRegisterMap::kInStack: {
+ case DexRegisterLocation::Kind::kInStack: {
const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg);
const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
*val = *reinterpret_cast<const uint32_t*>(addr);
return true;
}
- case DexRegisterMap::kInRegister:
- case DexRegisterMap::kInFpuRegister: {
+ case DexRegisterLocation::Kind::kInRegister:
+ case DexRegisterLocation::Kind::kInFpuRegister: {
uint32_t reg = dex_register_map.GetMachineRegister(vreg);
return GetRegisterIfAccessible(reg, kind, val);
}
- case DexRegisterMap::kConstant:
+ case DexRegisterLocation::Kind::kConstant:
*val = dex_register_map.GetConstant(vreg);
return true;
- case DexRegisterMap::kNone:
+ case DexRegisterLocation::Kind::kNone:
return false;
+ default:
+ LOG(FATAL)
+ << "Unexpected location kind"
+ << DexRegisterLocation::PrettyDescriptor(dex_register_map.GetLocationInternalKind(vreg));
+ UNREACHABLE();
}
- UNREACHABLE();
- return false;
}
bool StackVisitor::GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const {
@@ -386,29 +389,29 @@ bool StackVisitor::SetVRegFromOptimizedCode(mirror::ArtMethod* m, uint16_t vreg,
DCHECK(code_item != nullptr) << PrettyMethod(m); // Can't be NULL or how would we compile
// its instructions?
DCHECK_LT(vreg, code_item->registers_size_);
- DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(stack_map,
- code_item->registers_size_);
- DexRegisterMap::LocationKind location_kind = dex_register_map.GetLocationKind(vreg);
+ DexRegisterMap dex_register_map =
+ code_info.GetDexRegisterMapOf(stack_map, code_item->registers_size_);
+ DexRegisterLocation::Kind location_kind = dex_register_map.GetLocationKind(vreg);
uint32_t dex_pc = m->ToDexPc(cur_quick_frame_pc_, false);
switch (location_kind) {
- case DexRegisterMap::kInStack: {
+ case DexRegisterLocation::Kind::kInStack: {
const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg);
uint8_t* addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + offset;
*reinterpret_cast<uint32_t*>(addr) = new_value;
return true;
}
- case DexRegisterMap::kInRegister:
- case DexRegisterMap::kInFpuRegister: {
+ case DexRegisterLocation::Kind::kInRegister:
+ case DexRegisterLocation::Kind::kInFpuRegister: {
uint32_t reg = dex_register_map.GetMachineRegister(vreg);
return SetRegisterIfAccessible(reg, new_value, kind);
}
- case DexRegisterMap::kConstant:
+ case DexRegisterLocation::Kind::kConstant:
LOG(ERROR) << StringPrintf("Cannot change value of DEX register v%u used as a constant at "
"DEX pc 0x%x (native pc 0x%x) of method %s",
vreg, dex_pc, native_pc_offset,
PrettyMethod(cur_quick_frame_->AsMirrorPtr()).c_str());
return false;
- case DexRegisterMap::kNone:
+ case DexRegisterLocation::Kind::kNone:
LOG(ERROR) << StringPrintf("No location for DEX register v%u at DEX pc 0x%x "
"(native pc 0x%x) of method %s",
vreg, dex_pc, native_pc_offset,
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 6d996722b4..c98162306c 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -23,6 +23,11 @@
namespace art {
+// Size of a frame slot, in bytes. This constant is a signed value,
+// to please the compiler in arithmetic operations involving int32_t
+// (signed) values.
+static ssize_t constexpr kFrameSlotSize = 4;
+
/**
* Classes in the following file are wrapper on stack map information backed
* by a MemoryRegion. As such they read and write to the region, they don't have
@@ -58,6 +63,8 @@ class InlineInfo {
}
private:
+ // TODO: Instead of plain types such as "uint8_t", introduce
+ // typedefs (and document the memory layout of InlineInfo).
static constexpr int kDepthOffset = 0;
static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);
@@ -68,82 +75,327 @@ class InlineInfo {
friend class StackMapStream;
};
+// Dex register location container used by DexRegisterMap and StackMapStream.
+class DexRegisterLocation {
+ public:
+ /*
+ * The location kind used to populate the Dex register information in a
+ * StackMapStream can either be:
+ * - kNone: the register has no location yet, meaning it has not been set;
+ * - kConstant: value holds the constant;
+ * - kStack: value holds the stack offset;
+ * - kRegister: value holds the physical register number;
+ * - kFpuRegister: value holds the physical register number.
+ *
+ * In addition, DexRegisterMap also uses these values:
+ * - kInStackLargeOffset: value holds a "large" stack offset (greater than
+ * 128 bytes);
+ * - kConstantLargeValue: value holds a "large" constant (lower than or
+ * equal to -16, or greater than 16).
+ */
+ enum class Kind : uint8_t {
+ // Short location kinds, for entries fitting on one byte (3 bits
+ // for the kind, 5 bits for the value) in a DexRegisterMap.
+ kNone = 0, // 0b000
+ kInStack = 1, // 0b001
+ kInRegister = 2, // 0b010
+ kInFpuRegister = 3, // 0b011
+ kConstant = 4, // 0b100
+
+ // Large location kinds, requiring a 5-byte encoding (1 byte for the
+ // kind, 4 bytes for the value).
+
+ // Stack location at a large offset, meaning that the offset value
+ // divided by the stack frame slot size (4 bytes) cannot fit on a
+ // 5-bit unsigned integer (i.e., this offset value is greater than
+ // or equal to 2^5 * 4 = 128 bytes).
+ kInStackLargeOffset = 5, // 0b101
+
+ // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
+ // lower than -2^(5-1) = -16, or greater than or equal to
+ // 2^(5-1) - 1 = 15).
+ kConstantLargeValue = 6, // 0b110
+
+ kLastLocationKind = kConstantLargeValue
+ };
+
+ static_assert(
+ sizeof(Kind) == 1u,
+ "art::DexRegisterLocation::Kind has a size different from one byte.");
+
+ static const char* PrettyDescriptor(Kind kind) {
+ switch (kind) {
+ case Kind::kNone:
+ return "none";
+ case Kind::kInStack:
+ return "in stack";
+ case Kind::kInRegister:
+ return "in register";
+ case Kind::kInFpuRegister:
+ return "in fpu register";
+ case Kind::kConstant:
+ return "as constant";
+ case Kind::kInStackLargeOffset:
+ return "in stack (large offset)";
+ case Kind::kConstantLargeValue:
+ return "as constant (large value)";
+ default:
+ UNREACHABLE();
+ }
+ }
+
+ static bool IsShortLocationKind(Kind kind) {
+ switch (kind) {
+ case Kind::kNone:
+ case Kind::kInStack:
+ case Kind::kInRegister:
+ case Kind::kInFpuRegister:
+ case Kind::kConstant:
+ return true;
+
+ case Kind::kInStackLargeOffset:
+ case Kind::kConstantLargeValue:
+ return false;
+
+ default:
+ UNREACHABLE();
+ }
+ }
+
+ // Convert `kind` to a "surface" kind, i.e. one that doesn't include
+ // any value with a "large" qualifier.
+ // TODO: Introduce another enum type for the surface kind?
+ static Kind ConvertToSurfaceKind(Kind kind) {
+ switch (kind) {
+ case Kind::kNone:
+ case Kind::kInStack:
+ case Kind::kInRegister:
+ case Kind::kInFpuRegister:
+ case Kind::kConstant:
+ return kind;
+
+ case Kind::kInStackLargeOffset:
+ return Kind::kInStack;
+
+ case Kind::kConstantLargeValue:
+ return Kind::kConstant;
+
+ default:
+ UNREACHABLE();
+ }
+ }
+
+ DexRegisterLocation(Kind kind, int32_t value)
+ : kind_(kind), value_(value) {}
+
+ // Get the "surface" kind of the location, i.e., the one that doesn't
+ // include any value with a "large" qualifier.
+ Kind GetKind() const {
+ return ConvertToSurfaceKind(kind_);
+ }
+
+ // Get the value of the location.
+ int32_t GetValue() const { return value_; }
+
+ // Get the actual kind of the location.
+ Kind GetInternalKind() const { return kind_; }
+
+ private:
+ Kind kind_;
+ int32_t value_;
+};
+
/**
* Information on dex register values for a specific PC. The information is
* of the form:
* [location_kind, register_value]+.
- *
- * The location_kind for a Dex register can either be:
- * - kConstant: register_value holds the constant,
- * - kStack: register_value holds the stack offset,
- * - kRegister: register_value holds the physical register number.
- * - kFpuRegister: register_value holds the physical register number.
- * - kNone: the register has no location yet, meaning it has not been set.
+ * either on 1 or 5 bytes (see art::DexRegisterLocation::Kind).
*/
class DexRegisterMap {
public:
explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
- enum LocationKind {
- kNone,
- kInStack,
- kInRegister,
- kInFpuRegister,
- kConstant
- };
+ // Short (compressed) location, fitting on one byte.
+ typedef uint8_t ShortLocation;
+
+ void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
+ DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
+ int32_t value = dex_register_location.GetValue();
+ if (DexRegisterLocation::IsShortLocationKind(kind)) {
+ // Short location. Compress the kind and the value as a single byte.
+ if (kind == DexRegisterLocation::Kind::kInStack) {
+ // Instead of storing stack offsets expressed in bytes for
+ // short stack locations, store slot offsets. A stack offset
+ // is a multiple of 4 (kFrameSlotSize). This means that by
+ // dividing it by 4, we can fit values from the [0, 128)
+ // interval in a short stack location, and not just values
+ // from the [0, 32) interval.
+ DCHECK_EQ(value % kFrameSlotSize, 0);
+ value /= kFrameSlotSize;
+ }
+ DCHECK(IsUint<kValueBits>(value)) << value;
+ region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
+ } else {
+ // Large location. Write the location on one byte and the value
+ // on 4 bytes.
+ DCHECK(!IsUint<kValueBits>(value)) << value;
+ if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
+ // Also divide large stack offsets by 4 for the sake of consistency.
+ DCHECK_EQ(value % kFrameSlotSize, 0);
+ value /= kFrameSlotSize;
+ }
+ // Data can be unaligned as the written Dex register locations can
+ // either be 1-byte or 5-byte wide. Use
+ // art::MemoryRegion::StoreUnaligned instead of
+ // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
+ region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
+ region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
+ }
+ }
- static const char* PrettyDescriptor(LocationKind kind) {
- switch (kind) {
- case kNone:
- return "none";
- case kInStack:
- return "in stack";
- case kInRegister:
- return "in register";
- case kInFpuRegister:
- return "in fpu register";
- case kConstant:
- return "as constant";
+ // Find the offset of the Dex register location number `dex_register_index`.
+ size_t FindLocationOffset(uint16_t dex_register_index) const {
+ size_t offset = kFixedSize;
+ // Skip the first `dex_register_index - 1` entries.
+ for (uint16_t i = 0; i < dex_register_index; ++i) {
+ // Read the first next byte and inspect its first 3 bits to decide
+ // whether it is a short or a large location.
+ DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
+ if (DexRegisterLocation::IsShortLocationKind(kind)) {
+ // Short location. Skip the current byte.
+ offset += SingleShortEntrySize();
+ } else {
+ // Large location. Skip the 5 next bytes.
+ offset += SingleLargeEntrySize();
+ }
}
- UNREACHABLE();
- return nullptr;
+ return offset;
}
- LocationKind GetLocationKind(uint16_t register_index) const {
- return region_.Load<LocationKind>(
- kFixedSize + register_index * SingleEntrySize());
+ // Get the surface kind.
+ DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_index) const {
+ return DexRegisterLocation::ConvertToSurfaceKind(GetLocationInternalKind(dex_register_index));
}
- void SetRegisterInfo(uint16_t register_index, LocationKind kind, int32_t value) {
- size_t entry = kFixedSize + register_index * SingleEntrySize();
- region_.Store<LocationKind>(entry, kind);
- region_.Store<int32_t>(entry + sizeof(LocationKind), value);
+ // Get the internal kind.
+ DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_index) const {
+ size_t offset = FindLocationOffset(dex_register_index);
+ return ExtractKindAtOffset(offset);
}
- int32_t GetValue(uint16_t register_index) const {
- return region_.Load<int32_t>(
- kFixedSize + sizeof(LocationKind) + register_index * SingleEntrySize());
+ // TODO: Rename as GetDexRegisterLocation?
+ DexRegisterLocation GetLocationKindAndValue(uint16_t dex_register_index) const {
+ size_t offset = FindLocationOffset(dex_register_index);
+ // Read the first byte and inspect its first 3 bits to get the location.
+ ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
+ DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
+ if (DexRegisterLocation::IsShortLocationKind(kind)) {
+ // Short location. Extract the value from the remaining 5 bits.
+ int32_t value = ExtractValueFromShortLocation(first_byte);
+ if (kind == DexRegisterLocation::Kind::kInStack) {
+ // Convert the stack slot (short) offset to a byte offset value.
+ value *= kFrameSlotSize;
+ }
+ return DexRegisterLocation(kind, value);
+ } else {
+ // Large location. Read the four next bytes to get the value.
+ int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
+ if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
+ // Convert the stack slot (large) offset to a byte offset value.
+ value *= kFrameSlotSize;
+ }
+ return DexRegisterLocation(kind, value);
+ }
}
- int32_t GetStackOffsetInBytes(uint16_t register_index) const {
- DCHECK(GetLocationKind(register_index) == kInStack);
- // We currently encode the offset in bytes.
- return GetValue(register_index);
+ int32_t GetStackOffsetInBytes(uint16_t dex_register_index) const {
+ DexRegisterLocation location = GetLocationKindAndValue(dex_register_index);
+ DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
+ // GetLocationKindAndValue returns the offset in bytes.
+ return location.GetValue();
}
- int32_t GetConstant(uint16_t register_index) const {
- DCHECK(GetLocationKind(register_index) == kConstant);
- return GetValue(register_index);
+ int32_t GetConstant(uint16_t dex_register_index) const {
+ DexRegisterLocation location = GetLocationKindAndValue(dex_register_index);
+ DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
+ return location.GetValue();
}
- int32_t GetMachineRegister(uint16_t register_index) const {
- DCHECK(GetLocationKind(register_index) == kInRegister
- || GetLocationKind(register_index) == kInFpuRegister);
- return GetValue(register_index);
+ int32_t GetMachineRegister(uint16_t dex_register_index) const {
+ DexRegisterLocation location = GetLocationKindAndValue(dex_register_index);
+ DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
+ || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
+ << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
+ return location.GetValue();
}
- static size_t SingleEntrySize() {
- return sizeof(LocationKind) + sizeof(int32_t);
+ // Compute the compressed kind of `location`.
+ static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
+ switch (location.GetInternalKind()) {
+ case DexRegisterLocation::Kind::kNone:
+ DCHECK_EQ(location.GetValue(), 0);
+ return DexRegisterLocation::Kind::kNone;
+
+ case DexRegisterLocation::Kind::kInRegister:
+ DCHECK_GE(location.GetValue(), 0);
+ DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
+ return DexRegisterLocation::Kind::kInRegister;
+
+ case DexRegisterLocation::Kind::kInFpuRegister:
+ DCHECK_GE(location.GetValue(), 0);
+ DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
+ return DexRegisterLocation::Kind::kInFpuRegister;
+
+ case DexRegisterLocation::Kind::kInStack:
+ DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
+ return IsUint<DexRegisterMap::kValueBits>(location.GetValue() / kFrameSlotSize)
+ ? DexRegisterLocation::Kind::kInStack
+ : DexRegisterLocation::Kind::kInStackLargeOffset;
+
+ case DexRegisterLocation::Kind::kConstant:
+ return IsUint<DexRegisterMap::kValueBits>(location.GetValue())
+ ? DexRegisterLocation::Kind::kConstant
+ : DexRegisterLocation::Kind::kConstantLargeValue;
+
+ default:
+ LOG(FATAL) << "Unexpected location kind"
+ << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
+ UNREACHABLE();
+ }
+ }
+
+ // Can `location` be turned into a short location?
+ static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
+ switch (location.GetInternalKind()) {
+ case DexRegisterLocation::Kind::kNone:
+ case DexRegisterLocation::Kind::kInRegister:
+ case DexRegisterLocation::Kind::kInFpuRegister:
+ return true;
+
+ case DexRegisterLocation::Kind::kInStack:
+ DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
+ return IsUint<kValueBits>(location.GetValue() / kFrameSlotSize);
+
+ case DexRegisterLocation::Kind::kConstant:
+ return IsUint<kValueBits>(location.GetValue());
+
+ default:
+ UNREACHABLE();
+ }
+ }
+
+ static size_t EntrySize(const DexRegisterLocation& location) {
+ return CanBeEncodedAsShortLocation(location)
+ ? DexRegisterMap::SingleShortEntrySize()
+ : DexRegisterMap::SingleLargeEntrySize();
+ }
+
+ static size_t SingleShortEntrySize() {
+ return sizeof(ShortLocation);
+ }
+
+ static size_t SingleLargeEntrySize() {
+ return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
}
size_t Size() const {
@@ -153,7 +405,43 @@ class DexRegisterMap {
static constexpr int kFixedSize = 0;
private:
+ // Width of the kind "field" in a short location, in bits.
+ static constexpr size_t kKindBits = 3;
+ // Width of the value "field" in a short location, in bits.
+ static constexpr size_t kValueBits = 5;
+
+ static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
+ static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
+ static constexpr size_t kKindOffset = 0;
+ static constexpr size_t kValueOffset = kKindBits;
+
+ static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
+ DCHECK(IsUint<kKindBits>(static_cast<uint8_t>(kind))) << static_cast<uint8_t>(kind);
+ DCHECK(IsUint<kValueBits>(value)) << value;
+ return (static_cast<uint8_t>(kind) & kKindMask) << kKindOffset
+ | (value & kValueMask) << kValueOffset;
+ }
+
+ static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
+ uint8_t kind = (location >> kKindOffset) & kKindMask;
+ DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
+ return static_cast<DexRegisterLocation::Kind>(kind);
+ }
+
+ static int32_t ExtractValueFromShortLocation(ShortLocation location) {
+ return (location >> kValueOffset) & kValueMask;
+ }
+
+ // Extract a location kind from the byte at position `offset`.
+ DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
+ ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
+ return ExtractKindFromShortLocation(first_byte);
+ }
+
MemoryRegion region_;
+
+ friend class CodeInfo;
+ friend class StackMapStream;
};
/**
@@ -187,7 +475,7 @@ class StackMap {
}
void SetNativePcOffset(uint32_t native_pc_offset) {
- return region_.Store<uint32_t>(kNativePcOffsetOffset, native_pc_offset);
+ region_.Store<uint32_t>(kNativePcOffsetOffset, native_pc_offset);
}
uint32_t GetDexRegisterMapOffset() const {
@@ -195,7 +483,7 @@ class StackMap {
}
void SetDexRegisterMapOffset(uint32_t offset) {
- return region_.Store<uint32_t>(kDexRegisterMapOffsetOffset, offset);
+ region_.Store<uint32_t>(kDexRegisterMapOffsetOffset, offset);
}
uint32_t GetInlineDescriptorOffset() const {
@@ -203,7 +491,7 @@ class StackMap {
}
void SetInlineDescriptorOffset(uint32_t offset) {
- return region_.Store<uint32_t>(kInlineDescriptorOffsetOffset, offset);
+ region_.Store<uint32_t>(kInlineDescriptorOffsetOffset, offset);
}
uint32_t GetRegisterMask() const {
@@ -238,9 +526,9 @@ class StackMap {
&& region_.size() == other.region_.size();
}
- static size_t ComputeAlignedStackMapSize(size_t stack_mask_size) {
+ static size_t ComputeAlignedStackMapSize(size_t stack_map_size) {
// On ARM, the stack maps must be 4-byte aligned.
- return RoundUp(StackMap::kFixedSize + stack_mask_size, 4);
+ return RoundUp(StackMap::kFixedSize + stack_map_size, 4);
}
// Special (invalid) offset for the DexRegisterMapOffset field meaning
@@ -252,6 +540,8 @@ class StackMap {
static constexpr uint32_t kNoInlineInfo = -1;
private:
+ // TODO: Instead of plain types such as "uint32_t", introduce
+ // typedefs (and document the memory layout of StackMap).
static constexpr int kDexPcOffset = 0;
static constexpr int kNativePcOffsetOffset = kDexPcOffset + sizeof(uint32_t);
static constexpr int kDexRegisterMapOffsetOffset = kNativePcOffsetOffset + sizeof(uint32_t);
@@ -317,11 +607,15 @@ class CodeInfo {
return StackMap::ComputeAlignedStackMapSize(GetStackMaskSize());
}
+ uint32_t GetStackMapsOffset() const {
+ return kFixedSize;
+ }
+
DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
DCHECK(stack_map.HasDexRegisterMap());
uint32_t offset = stack_map.GetDexRegisterMapOffset();
- return DexRegisterMap(region_.Subregion(offset,
- DexRegisterMap::kFixedSize + number_of_dex_registers * DexRegisterMap::SingleEntrySize()));
+ size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
+ return DexRegisterMap(region_.Subregion(offset, size));
}
InlineInfo GetInlineInfoOf(StackMap stack_map) const {
@@ -356,6 +650,8 @@ class CodeInfo {
}
private:
+ // TODO: Instead of plain types such as "uint32_t", introduce
+ // typedefs (and document the memory layout of CodeInfo).
static constexpr int kOverallSizeOffset = 0;
static constexpr int kNumberOfStackMapsOffset = kOverallSizeOffset + sizeof(uint32_t);
static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
@@ -367,6 +663,33 @@ class CodeInfo {
: region_.Subregion(kFixedSize, StackMapSize() * GetNumberOfStackMaps());
}
+ // Compute the size of a Dex register map starting at offset `origin` in
+ // `region_` and containing `number_of_dex_registers` locations.
+ size_t ComputeDexRegisterMapSize(uint32_t origin, uint32_t number_of_dex_registers) const {
+ // TODO: Ideally, we would like to use art::DexRegisterMap::Size or
+ // art::DexRegisterMap::FindLocationOffset, but the DexRegisterMap is not
+ // yet built. Try to factor common code.
+ size_t offset = origin + DexRegisterMap::kFixedSize;
+ // Skip the first `number_of_dex_registers - 1` entries.
+ for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
+ // Read the first next byte and inspect its first 3 bits to decide
+ // whether it is a short or a large location.
+ DexRegisterMap::ShortLocation first_byte =
+ region_.LoadUnaligned<DexRegisterMap::ShortLocation>(offset);
+ DexRegisterLocation::Kind kind =
+ DexRegisterMap::ExtractKindFromShortLocation(first_byte);
+ if (DexRegisterLocation::IsShortLocationKind(kind)) {
+ // Short location. Skip the current byte.
+ offset += DexRegisterMap::SingleShortEntrySize();
+ } else {
+ // Large location. Skip the 5 next bytes.
+ offset += DexRegisterMap::SingleLargeEntrySize();
+ }
+ }
+ size_t size = offset - origin;
+ return size;
+ }
+
MemoryRegion region_;
friend class StackMapStream;
};
diff --git a/runtime/utils.h b/runtime/utils.h
index d294f4b1a1..0c11610932 100644
--- a/runtime/utils.h
+++ b/runtime/utils.h
@@ -173,6 +173,24 @@ static inline uint32_t High32Bits(uint64_t value) {
return static_cast<uint32_t>(value >> 32);
}
+// Traits class providing an unsigned integer type of (byte) size `n`.
+template <size_t n>
+struct UnsignedIntegerType {
+ // No defined `type`.
+};
+
+template <>
+struct UnsignedIntegerType<1> { typedef uint8_t type; };
+
+template <>
+struct UnsignedIntegerType<2> { typedef uint16_t type; };
+
+template <>
+struct UnsignedIntegerType<4> { typedef uint32_t type; };
+
+template <>
+struct UnsignedIntegerType<8> { typedef uint64_t type; };
+
// Type identity.
template <typename T>
struct TypeIdentity {