[optimizing] Don't record None locations in the stack maps.
- moved environment recording from code generator to stack map stream
- added creation/loading factory methods for the DexRegisterMap (hides
internal details)
- added new tests
Change-Id: Ic8b6d044f0d8255c6759c19a41df332ef37876fe
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index a6ab208..9ebf887 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -577,142 +577,42 @@
pc_info.native_pc = GetAssembler()->CodeSize();
pc_infos_.Add(pc_info);
- // Populate stack map information.
-
+ uint32_t inlining_depth = 0;
if (instruction == nullptr) {
// For stack overflow checks.
- stack_map_stream_.AddStackMapEntry(dex_pc, pc_info.native_pc, 0, 0, 0, 0);
- return;
- }
+ stack_map_stream_.RecordEnvironment(
+ /* environment */ nullptr,
+ /* environment_size */ 0,
+ /* locations */ nullptr,
+ dex_pc,
+ pc_info.native_pc,
+ /* register_mask */ 0,
+ inlining_depth);
+ } else {
+ LocationSummary* locations = instruction->GetLocations();
+ HEnvironment* environment = instruction->GetEnvironment();
+ size_t environment_size = instruction->EnvironmentSize();
- LocationSummary* locations = instruction->GetLocations();
- HEnvironment* environment = instruction->GetEnvironment();
-
- size_t environment_size = instruction->EnvironmentSize();
-
- size_t inlining_depth = 0;
- uint32_t register_mask = locations->GetRegisterMask();
- if (locations->OnlyCallsOnSlowPath()) {
- // In case of slow path, we currently set the location of caller-save registers
- // to register (instead of their stack location when pushed before the slow-path
- // call). Therefore register_mask contains both callee-save and caller-save
- // registers that hold objects. We must remove the caller-save from the mask, since
- // they will be overwritten by the callee.
- register_mask &= core_callee_save_mask_;
- }
- // The register mask must be a subset of callee-save registers.
- DCHECK_EQ(register_mask & core_callee_save_mask_, register_mask);
- stack_map_stream_.AddStackMapEntry(
- dex_pc, pc_info.native_pc, register_mask,
- locations->GetStackMask(), environment_size, inlining_depth);
-
- // Walk over the environment, and record the location of dex registers.
- for (size_t i = 0; i < environment_size; ++i) {
- HInstruction* current = environment->GetInstructionAt(i);
- if (current == nullptr) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kNone, 0);
- continue;
+ uint32_t register_mask = locations->GetRegisterMask();
+ if (locations->OnlyCallsOnSlowPath()) {
+ // In case of slow path, we currently set the location of caller-save registers
+ // to register (instead of their stack location when pushed before the slow-path
+ // call). Therefore register_mask contains both callee-save and caller-save
+ // registers that hold objects. We must remove the caller-save from the mask, since
+ // they will be overwritten by the callee.
+ register_mask &= core_callee_save_mask_;
}
+ // The register mask must be a subset of callee-save registers.
+ DCHECK_EQ(register_mask & core_callee_save_mask_, register_mask);
- Location location = locations->GetEnvironmentAt(i);
- switch (location.GetKind()) {
- case Location::kConstant: {
- DCHECK_EQ(current, location.GetConstant());
- if (current->IsLongConstant()) {
- int64_t value = current->AsLongConstant()->GetValue();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
- Low32Bits(value));
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
- High32Bits(value));
- ++i;
- DCHECK_LT(i, environment_size);
- } else if (current->IsDoubleConstant()) {
- int64_t value = bit_cast<double, int64_t>(current->AsDoubleConstant()->GetValue());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
- Low32Bits(value));
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant,
- High32Bits(value));
- ++i;
- DCHECK_LT(i, environment_size);
- } else if (current->IsIntConstant()) {
- int32_t value = current->AsIntConstant()->GetValue();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
- } else if (current->IsNullConstant()) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, 0);
- } else {
- DCHECK(current->IsFloatConstant());
- int32_t value = bit_cast<float, int32_t>(current->AsFloatConstant()->GetValue());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
- }
- break;
- }
-
- case Location::kStackSlot: {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
- location.GetStackIndex());
- break;
- }
-
- case Location::kDoubleStackSlot: {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
- location.GetStackIndex());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
- location.GetHighStackIndex(kVRegSize));
- ++i;
- DCHECK_LT(i, environment_size);
- break;
- }
-
- case Location::kRegister : {
- int id = location.reg();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
- if (current->GetType() == Primitive::kPrimLong) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
- ++i;
- DCHECK_LT(i, environment_size);
- }
- break;
- }
-
- case Location::kFpuRegister : {
- int id = location.reg();
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
- if (current->GetType() == Primitive::kPrimDouble) {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
- ++i;
- DCHECK_LT(i, environment_size);
- }
- break;
- }
-
- case Location::kFpuRegisterPair : {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister,
- location.low());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister,
- location.high());
- ++i;
- DCHECK_LT(i, environment_size);
- break;
- }
-
- case Location::kRegisterPair : {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister,
- location.low());
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister,
- location.high());
- ++i;
- DCHECK_LT(i, environment_size);
- break;
- }
-
- case Location::kInvalid: {
- stack_map_stream_.AddDexRegisterEntry(DexRegisterLocation::Kind::kNone, 0);
- break;
- }
-
- default:
- LOG(FATAL) << "Unexpected kind " << location.GetKind();
- }
+ // Populate stack map information.
+ stack_map_stream_.RecordEnvironment(environment,
+ environment_size,
+ locations,
+ dex_pc,
+ pc_info.native_pc,
+ register_mask,
+ inlining_depth);
}
}
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index b8f4572..b574148 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -29,8 +29,6 @@
namespace art {
-static size_t constexpr kVRegSize = 4;
-
// Binary encoding of 2^32 for type double.
static int64_t constexpr k2Pow32EncodingForDouble = INT64_C(0x41F0000000000000);
// Binary encoding of 2^31 for type double.
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 863bab2..3168801 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -20,6 +20,7 @@
#include "base/bit_vector.h"
#include "base/value_object.h"
#include "memory_region.h"
+#include "nodes.h"
#include "stack_map.h"
#include "utils/growable_array.h"
@@ -32,8 +33,9 @@
class StackMapStream : public ValueObject {
public:
explicit StackMapStream(ArenaAllocator* allocator)
- : stack_maps_(allocator, 10),
- dex_register_maps_(allocator, 10 * 4),
+ : allocator_(allocator),
+ stack_maps_(allocator, 10),
+ dex_register_locations_(allocator, 10 * 4),
inline_infos_(allocator, 2),
stack_mask_max_(-1),
number_of_stack_maps_with_inline_info_(0) {}
@@ -52,8 +54,9 @@
BitVector* sp_mask;
uint32_t num_dex_registers;
uint8_t inlining_depth;
- size_t dex_register_maps_start_index;
+ size_t dex_register_locations_start_index;
size_t inline_infos_start_index;
+ BitVector* live_dex_registers_mask;
};
struct InlineInfoEntry {
@@ -65,7 +68,8 @@
uint32_t register_mask,
BitVector* sp_mask,
uint32_t num_dex_registers,
- uint8_t inlining_depth) {
+ uint8_t inlining_depth,
+ BitVector* live_dex_registers_mask) {
StackMapEntry entry;
entry.dex_pc = dex_pc;
entry.native_pc_offset = native_pc_offset;
@@ -73,8 +77,9 @@
entry.sp_mask = sp_mask;
entry.num_dex_registers = num_dex_registers;
entry.inlining_depth = inlining_depth;
- entry.dex_register_maps_start_index = dex_register_maps_.Size();
+ entry.dex_register_locations_start_index = dex_register_locations_.Size();
entry.inline_infos_start_index = inline_infos_.Size();
+ entry.live_dex_registers_mask = live_dex_registers_mask;
stack_maps_.Add(entry);
if (sp_mask != nullptr) {
@@ -85,11 +90,146 @@
}
}
- void AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
- // Ensure we only use non-compressed location kind at this stage.
- DCHECK(DexRegisterLocation::IsShortLocationKind(kind))
- << DexRegisterLocation::PrettyDescriptor(kind);
- dex_register_maps_.Add(DexRegisterLocation(kind, value));
+ void RecordEnvironment(HEnvironment* environment,
+ size_t environment_size,
+ LocationSummary* locations,
+ uint32_t dex_pc,
+ uint32_t native_pc,
+ uint32_t register_mask,
+ uint32_t inlining_depth) {
+ if (environment == nullptr) {
+ // For stack overflow checks.
+ AddStackMapEntry(dex_pc, native_pc, 0, 0, 0, inlining_depth, nullptr);
+ return;
+ }
+
+ BitVector* live_dex_registers_mask = new (allocator_) ArenaBitVector(allocator_, 0, true);
+
+ AddStackMapEntry(
+ dex_pc, native_pc, register_mask,
+ locations->GetStackMask(), environment_size, inlining_depth, live_dex_registers_mask);
+
+ // Walk over the environment, and record the location of dex registers.
+ for (size_t i = 0; i < environment_size; ++i) {
+ HInstruction* current = environment->GetInstructionAt(i);
+ if (current == nullptr) {
+ // No need to store anything, the `live_dex_registers_mask` will hold the
+ // information that this register is not live.
+ continue;
+ }
+
+ Location location = locations->GetEnvironmentAt(i);
+ switch (location.GetKind()) {
+ case Location::kConstant: {
+ DCHECK_EQ(current, location.GetConstant());
+ if (current->IsLongConstant()) {
+ // TODO: Consider moving setting the bit in AddDexRegisterEntry to avoid
+ // doing it manually here.
+ live_dex_registers_mask->SetBit(i);
+ live_dex_registers_mask->SetBit(i + 1);
+ int64_t value = current->AsLongConstant()->GetValue();
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, Low32Bits(value));
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, High32Bits(value));
+ ++i;
+ DCHECK_LT(i, environment_size);
+ } else if (current->IsDoubleConstant()) {
+ live_dex_registers_mask->SetBit(i);
+ live_dex_registers_mask->SetBit(i + 1);
+ int64_t value = bit_cast<double, int64_t>(current->AsDoubleConstant()->GetValue());
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, Low32Bits(value));
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, High32Bits(value));
+ ++i;
+ DCHECK_LT(i, environment_size);
+ } else if (current->IsIntConstant()) {
+ live_dex_registers_mask->SetBit(i);
+ int32_t value = current->AsIntConstant()->GetValue();
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
+ } else if (current->IsNullConstant()) {
+ live_dex_registers_mask->SetBit(i);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, 0);
+ } else {
+ DCHECK(current->IsFloatConstant()) << current->DebugName();
+ live_dex_registers_mask->SetBit(i);
+ int32_t value = bit_cast<float, int32_t>(current->AsFloatConstant()->GetValue());
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, value);
+ }
+ break;
+ }
+
+ case Location::kStackSlot: {
+ live_dex_registers_mask->SetBit(i);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
+ location.GetStackIndex());
+ break;
+ }
+
+ case Location::kDoubleStackSlot: {
+ live_dex_registers_mask->SetBit(i);
+ live_dex_registers_mask->SetBit(i + 1);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, location.GetStackIndex());
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack,
+ location.GetHighStackIndex(kVRegSize));
+ ++i;
+ DCHECK_LT(i, environment_size);
+ break;
+ }
+
+ case Location::kRegister : {
+ live_dex_registers_mask->SetBit(i);
+ int id = location.reg();
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
+ if (current->GetType() == Primitive::kPrimLong) {
+ live_dex_registers_mask->SetBit(i + 1);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, id);
+ ++i;
+ DCHECK_LT(i, environment_size);
+ }
+ break;
+ }
+
+ case Location::kFpuRegister : {
+ live_dex_registers_mask->SetBit(i);
+ int id = location.reg();
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
+ if (current->GetType() == Primitive::kPrimDouble) {
+ live_dex_registers_mask->SetBit(i + 1);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, id);
+ ++i;
+ DCHECK_LT(i, environment_size);
+ }
+ break;
+ }
+
+ case Location::kFpuRegisterPair : {
+ live_dex_registers_mask->SetBit(i);
+ live_dex_registers_mask->SetBit(i + 1);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, location.low());
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, location.high());
+ ++i;
+ DCHECK_LT(i, environment_size);
+ break;
+ }
+
+ case Location::kRegisterPair : {
+ live_dex_registers_mask->SetBit(i);
+ live_dex_registers_mask->SetBit(i + 1);
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, location.low());
+ AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, location.high());
+ ++i;
+ DCHECK_LT(i, environment_size);
+ break;
+ }
+
+ case Location::kInvalid: {
+ // No need to store anything, the `live_dex_registers_mask` will hold the
+ // information that this register is not live.
+ break;
+ }
+
+ default:
+ LOG(FATAL) << "Unexpected kind " << location.GetKind();
+ }
+ }
}
void AddInlineInfoEntry(uint32_t method_index) {
@@ -118,22 +258,26 @@
// Compute the size of the Dex register map of `entry`.
size_t ComputeDexRegisterMapSize(const StackMapEntry& entry) const {
size_t size = DexRegisterMap::kFixedSize;
- for (size_t j = 0; j < entry.num_dex_registers; ++j) {
- DexRegisterLocation dex_register_location =
- dex_register_maps_.Get(entry.dex_register_maps_start_index + j);
- size += DexRegisterMap::EntrySize(dex_register_location);
+ // Add the bit mask for the dex register liveness.
+ size += DexRegisterMap::LiveBitMaskSize(entry.num_dex_registers);
+ for (size_t dex_register_number = 0, index_in_dex_register_locations = 0;
+ dex_register_number < entry.num_dex_registers;
+ ++dex_register_number) {
+ if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) {
+ DexRegisterLocation dex_register_location = dex_register_locations_.Get(
+ entry.dex_register_locations_start_index + index_in_dex_register_locations);
+ size += DexRegisterMap::EntrySize(dex_register_location);
+ index_in_dex_register_locations++;
+ }
}
return size;
}
// Compute the size of all the Dex register maps.
size_t ComputeDexRegisterMapsSize() const {
- size_t size = stack_maps_.Size() * DexRegisterMap::kFixedSize;
- // The size of each register location depends on the type of
- // the entry.
- for (size_t i = 0, e = dex_register_maps_.Size(); i < e; ++i) {
- DexRegisterLocation entry = dex_register_maps_.Get(i);
- size += DexRegisterMap::EntrySize(entry);
+ size_t size = 0;
+ for (size_t i = 0; i < stack_maps_.Size(); ++i) {
+ size += ComputeDexRegisterMapSize(stack_maps_.Get(i));
}
return size;
}
@@ -161,7 +305,7 @@
size_t stack_mask_size = ComputeStackMaskSize();
uint8_t* memory_start = region.start();
- MemoryRegion dex_register_maps_region = region.Subregion(
+ MemoryRegion dex_register_locations_region = region.Subregion(
ComputeDexRegisterMapsStart(),
ComputeDexRegisterMapsSize());
@@ -189,7 +333,7 @@
if (entry.num_dex_registers != 0) {
// Set the Dex register map.
MemoryRegion register_region =
- dex_register_maps_region.Subregion(
+ dex_register_locations_region.Subregion(
next_dex_register_map_offset,
ComputeDexRegisterMapSize(entry));
next_dex_register_map_offset += register_region.size();
@@ -198,11 +342,20 @@
// Offset in `dex_register_map` where to store the next register entry.
size_t offset = DexRegisterMap::kFixedSize;
- for (size_t j = 0; j < entry.num_dex_registers; ++j) {
- DexRegisterLocation dex_register_location =
- dex_register_maps_.Get(entry.dex_register_maps_start_index + j);
- dex_register_map.SetRegisterInfo(offset, dex_register_location);
- offset += DexRegisterMap::EntrySize(dex_register_location);
+ dex_register_map.SetLiveBitMask(offset,
+ entry.num_dex_registers,
+ *entry.live_dex_registers_mask);
+ offset += DexRegisterMap::LiveBitMaskSize(entry.num_dex_registers);
+ for (size_t dex_register_number = 0, index_in_dex_register_locations = 0;
+ dex_register_number < entry.num_dex_registers;
+ ++dex_register_number) {
+ if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) {
+ DexRegisterLocation dex_register_location = dex_register_locations_.Get(
+ entry.dex_register_locations_start_index + index_in_dex_register_locations);
+ dex_register_map.SetRegisterInfo(offset, dex_register_location);
+ offset += DexRegisterMap::EntrySize(dex_register_location);
+ ++index_in_dex_register_locations;
+ }
}
// Ensure we reached the end of the Dex registers region.
DCHECK_EQ(offset, register_region.size());
@@ -232,12 +385,24 @@
}
private:
+ void AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
+ // Ensure we only use non-compressed location kind at this stage.
+ DCHECK(DexRegisterLocation::IsShortLocationKind(kind))
+ << DexRegisterLocation::PrettyDescriptor(kind);
+ dex_register_locations_.Add(DexRegisterLocation(kind, value));
+ }
+
+ ArenaAllocator* allocator_;
GrowableArray<StackMapEntry> stack_maps_;
- GrowableArray<DexRegisterLocation> dex_register_maps_;
+ GrowableArray<DexRegisterLocation> dex_register_locations_;
GrowableArray<InlineInfoEntry> inline_infos_;
int stack_mask_max_;
size_t number_of_stack_maps_with_inline_info_;
+ ART_FRIEND_TEST(StackMapTest, Test1);
+ ART_FRIEND_TEST(StackMapTest, Test2);
+ ART_FRIEND_TEST(StackMapTest, TestNonLiveDexRegisters);
+
DISALLOW_COPY_AND_ASSIGN(StackMapStream);
};
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 3a5f806..4606bd6 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -31,19 +31,17 @@
return true;
}
-static size_t ComputeDexRegisterMapSize(const DexRegisterMap& dex_registers,
- size_t number_of_dex_registers) {
- return dex_registers.FindLocationOffset(number_of_dex_registers);
-}
-
TEST(StackMapTest, Test1) {
ArenaPool pool;
ArenaAllocator arena(&pool);
StackMapStream stream(&arena);
ArenaBitVector sp_mask(&arena, 0, false);
+ ArenaBitVector live_registers_mask(&arena, 0, true);
+ live_registers_mask.SetBit(0);
+ live_registers_mask.SetBit(1);
size_t number_of_dex_registers = 2;
- stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0, &live_registers_mask);
stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, 0);
stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, -2);
@@ -68,10 +66,9 @@
ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- ASSERT_EQ(6u, dex_registers.Size());
- ASSERT_EQ(6u, ComputeDexRegisterMapSize(dex_registers, number_of_dex_registers));
- DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0);
- DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1);
+ ASSERT_EQ(7u, dex_registers.Size());
+ DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
+ DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1, number_of_dex_registers);
ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetKind());
ASSERT_EQ(DexRegisterLocation::Kind::kConstant, location1.GetKind());
ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetInternalKind());
@@ -91,7 +88,10 @@
sp_mask1.SetBit(2);
sp_mask1.SetBit(4);
size_t number_of_dex_registers = 2;
- stream.AddStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
+ ArenaBitVector live_registers_mask1(&arena, 0, true);
+ live_registers_mask1.SetBit(0);
+ live_registers_mask1.SetBit(1);
+ stream.AddStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2, &live_registers_mask1);
stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInStack, 0);
stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, -2);
stream.AddInlineInfoEntry(42);
@@ -100,7 +100,10 @@
ArenaBitVector sp_mask2(&arena, 0, true);
sp_mask2.SetBit(3);
sp_mask1.SetBit(8);
- stream.AddStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0);
+ ArenaBitVector live_registers_mask2(&arena, 0, true);
+ live_registers_mask2.SetBit(0);
+ live_registers_mask2.SetBit(1);
+ stream.AddStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0, &live_registers_mask2);
stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInRegister, 18);
stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kInFpuRegister, 3);
@@ -128,10 +131,11 @@
ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_registers =
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- ASSERT_EQ(6u, dex_registers.Size());
- ASSERT_EQ(6u, ComputeDexRegisterMapSize(dex_registers, number_of_dex_registers));
- DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0);
- DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1);
+ ASSERT_EQ(7u, dex_registers.Size());
+ DexRegisterLocation location0 =
+ dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
+ DexRegisterLocation location1 =
+ dex_registers.GetLocationKindAndValue(1, number_of_dex_registers);
ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetKind());
ASSERT_EQ(DexRegisterLocation::Kind::kConstant, location1.GetKind());
ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetInternalKind());
@@ -161,10 +165,11 @@
ASSERT_TRUE(stack_map.HasDexRegisterMap());
DexRegisterMap dex_registers =
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- ASSERT_EQ(2u, dex_registers.Size());
- ASSERT_EQ(2u, ComputeDexRegisterMapSize(dex_registers, number_of_dex_registers));
- DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0);
- DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1);
+ ASSERT_EQ(3u, dex_registers.Size());
+ DexRegisterLocation location0 =
+ dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
+ DexRegisterLocation location1 =
+ dex_registers.GetLocationKindAndValue(1, number_of_dex_registers);
ASSERT_EQ(DexRegisterLocation::Kind::kInRegister, location0.GetKind());
ASSERT_EQ(DexRegisterLocation::Kind::kInFpuRegister, location1.GetKind());
ASSERT_EQ(DexRegisterLocation::Kind::kInRegister, location0.GetInternalKind());
@@ -176,4 +181,33 @@
}
}
+TEST(StackMapTest, TestNonLiveDexRegisters) {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ StackMapStream stream(&arena);
+
+ ArenaBitVector sp_mask(&arena, 0, false);
+ ArenaBitVector live_registers_mask(&arena, 0, true);
+ live_registers_mask.SetBit(1);
+ uint32_t number_of_dex_registers = 2;
+ stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0, &live_registers_mask);
+ stream.AddDexRegisterEntry(DexRegisterLocation::Kind::kConstant, -2);
+
+ size_t size = stream.ComputeNeededSize();
+ void* memory = arena.Alloc(size, kArenaAllocMisc);
+ MemoryRegion region(memory, size);
+ stream.FillIn(region);
+
+ CodeInfo code_info(region);
+ StackMap stack_map = code_info.GetStackMapAt(0);
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
+ DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, 2);
+ ASSERT_EQ(DexRegisterLocation::Kind::kNone,
+ dex_registers.GetLocationKind(0, number_of_dex_registers));
+ ASSERT_EQ(DexRegisterLocation::Kind::kConstant,
+ dex_registers.GetLocationKind(1, number_of_dex_registers));
+ ASSERT_EQ(-2, dex_registers.GetConstant(1, number_of_dex_registers));
+ ASSERT_FALSE(stack_map.HasInlineInfo());
+}
+
} // namespace art