summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/driver/compiler_driver.cc2
-rw-r--r--compiler/driver/compiler_driver_test.cc14
-rw-r--r--compiler/exception_test.cc3
-rw-r--r--compiler/optimizing/code_generator.cc1
-rw-r--r--compiler/optimizing/intrinsics.cc5
-rw-r--r--compiler/optimizing/stack_map_stream.cc582
-rw-r--r--compiler/optimizing/stack_map_stream.h220
-rw-r--r--compiler/optimizing/stack_map_test.cc340
-rw-r--r--compiler/verifier_deps_test.cc46
9 files changed, 398 insertions, 815 deletions
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index 16f2d0f2cc..653e9edb45 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -391,7 +391,7 @@ static optimizer::DexToDexCompiler::CompilationLevel GetDexToDexCompilationLevel
DCHECK(driver.GetCompilerOptions().IsQuickeningCompilationEnabled());
const char* descriptor = dex_file.GetClassDescriptor(class_def);
ClassLinker* class_linker = runtime->GetClassLinker();
- mirror::Class* klass = class_linker->FindClass(self, descriptor, class_loader);
+ ObjPtr<mirror::Class> klass = class_linker->FindClass(self, descriptor, class_loader);
if (klass == nullptr) {
CHECK(self->IsExceptionPending());
self->ClearException();
diff --git a/compiler/driver/compiler_driver_test.cc b/compiler/driver/compiler_driver_test.cc
index 856cb36266..491e61f9b5 100644
--- a/compiler/driver/compiler_driver_test.cc
+++ b/compiler/driver/compiler_driver_test.cc
@@ -88,7 +88,7 @@ class CompilerDriverTest : public CommonCompilerTest {
StackHandleScope<1> hs(soa.Self());
Handle<mirror::ClassLoader> loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader)));
- mirror::Class* c = class_linker->FindClass(soa.Self(), descriptor, loader);
+ ObjPtr<mirror::Class> c = class_linker->FindClass(soa.Self(), descriptor, loader);
CHECK(c != nullptr);
const auto pointer_size = class_linker->GetImagePointerSize();
for (auto& m : c->GetMethods(pointer_size)) {
@@ -115,14 +115,14 @@ TEST_F(CompilerDriverTest, DISABLED_LARGE_CompileDexLibCore) {
ObjPtr<mirror::DexCache> dex_cache = class_linker_->FindDexCache(soa.Self(), dex);
EXPECT_EQ(dex.NumStringIds(), dex_cache->NumStrings());
for (size_t i = 0; i < dex_cache->NumStrings(); i++) {
- const mirror::String* string = dex_cache->GetResolvedString(dex::StringIndex(i));
+ const ObjPtr<mirror::String> string = dex_cache->GetResolvedString(dex::StringIndex(i));
EXPECT_TRUE(string != nullptr) << "string_idx=" << i;
}
EXPECT_EQ(dex.NumTypeIds(), dex_cache->NumResolvedTypes());
for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
- mirror::Class* type = dex_cache->GetResolvedType(dex::TypeIndex(i));
- EXPECT_TRUE(type != nullptr) << "type_idx=" << i
- << " " << dex.GetTypeDescriptor(dex.GetTypeId(dex::TypeIndex(i)));
+ const ObjPtr<mirror::Class> type = dex_cache->GetResolvedType(dex::TypeIndex(i));
+ EXPECT_TRUE(type != nullptr)
+ << "type_idx=" << i << " " << dex.GetTypeDescriptor(dex.GetTypeId(dex::TypeIndex(i)));
}
EXPECT_TRUE(dex_cache->StaticMethodSize() == dex_cache->NumResolvedMethods()
|| dex.NumMethodIds() == dex_cache->NumResolvedMethods());
@@ -228,7 +228,7 @@ class CompilerDriverProfileTest : public CompilerDriverTest {
StackHandleScope<1> hs(self);
Handle<mirror::ClassLoader> h_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader)));
- mirror::Class* klass = class_linker->FindClass(self, clazz.c_str(), h_loader);
+ ObjPtr<mirror::Class> klass = class_linker->FindClass(self, clazz.c_str(), h_loader);
ASSERT_NE(klass, nullptr);
const auto pointer_size = class_linker->GetImagePointerSize();
@@ -289,7 +289,7 @@ class CompilerDriverVerifyTest : public CompilerDriverTest {
StackHandleScope<1> hs(self);
Handle<mirror::ClassLoader> h_loader(
hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader)));
- mirror::Class* klass = class_linker->FindClass(self, clazz.c_str(), h_loader);
+ ObjPtr<mirror::Class> klass = class_linker->FindClass(self, clazz.c_str(), h_loader);
ASSERT_NE(klass, nullptr);
EXPECT_TRUE(klass->IsVerified());
diff --git a/compiler/exception_test.cc b/compiler/exception_test.cc
index da1db4593b..15c07870a1 100644
--- a/compiler/exception_test.cc
+++ b/compiler/exception_test.cc
@@ -34,6 +34,7 @@
#include "mirror/object_array-inl.h"
#include "mirror/stack_trace_element.h"
#include "oat_quick_method_header.h"
+#include "obj_ptr-inl.h"
#include "optimizing/stack_map_stream.h"
#include "runtime-inl.h"
#include "scoped_thread_state_change-inl.h"
@@ -122,7 +123,7 @@ class ExceptionTest : public CommonRuntimeTest {
ArtMethod* method_g_;
private:
- mirror::Class* my_klass_;
+ ObjPtr<mirror::Class> my_klass_;
};
TEST_F(ExceptionTest, FindCatchHandler) {
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index b358bfabe0..4791fa3fba 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -63,6 +63,7 @@
#include "parallel_move_resolver.h"
#include "scoped_thread_state_change-inl.h"
#include "ssa_liveness_analysis.h"
+#include "stack_map.h"
#include "stack_map_stream.h"
#include "thread-current-inl.h"
#include "utils/assembler.h"
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index dfe6d791c6..056f533398 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -272,7 +272,8 @@ IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo
ClassLinker* class_linker = runtime->GetClassLinker();
gc::Heap* heap = runtime->GetHeap();
IntegerValueOfInfo info;
- info.integer_cache = class_linker->FindSystemClass(self, "Ljava/lang/Integer$IntegerCache;");
+ info.integer_cache =
+ class_linker->FindSystemClass(self, "Ljava/lang/Integer$IntegerCache;").Ptr();
if (info.integer_cache == nullptr) {
self->ClearException();
return info;
@@ -281,7 +282,7 @@ IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo
// Optimization only works if the class is initialized and in the boot image.
return info;
}
- info.integer = class_linker->FindSystemClass(self, "Ljava/lang/Integer;");
+ info.integer = class_linker->FindSystemClass(self, "Ljava/lang/Integer;").Ptr();
if (info.integer == nullptr) {
self->ClearException();
return info;
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index b1dcb68415..fad0d7be1b 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -22,6 +22,7 @@
#include "optimizing/optimizing_compiler.h"
#include "runtime.h"
#include "scoped_thread_state_change-inl.h"
+#include "stack_map.h"
namespace art {
@@ -36,404 +37,234 @@ void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offs
void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
uint32_t native_pc_offset,
uint32_t register_mask,
- BitVector* sp_mask,
+ BitVector* stack_mask,
uint32_t num_dex_registers,
- uint8_t inlining_depth) {
- DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
- current_entry_.dex_pc = dex_pc;
- current_entry_.packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_);
- current_entry_.register_mask = register_mask;
- current_entry_.sp_mask = sp_mask;
- current_entry_.inlining_depth = inlining_depth;
- current_entry_.inline_infos_start_index = inline_infos_.size();
- current_entry_.stack_mask_index = 0;
- current_entry_.dex_method_index = dex::kDexNoIndex;
- current_entry_.dex_register_entry.num_dex_registers = num_dex_registers;
- current_entry_.dex_register_entry.locations_start_index = dex_register_locations_.size();
- current_entry_.dex_register_entry.live_dex_registers_mask = nullptr;
- if (num_dex_registers != 0u) {
- current_entry_.dex_register_entry.live_dex_registers_mask =
- ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
- current_entry_.dex_register_entry.live_dex_registers_mask->ClearAllBits();
+ uint8_t inlining_depth ATTRIBUTE_UNUSED) {
+ DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
+ in_stack_map_ = true;
+
+ current_stack_map_ = StackMapEntry {
+ .packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_),
+ .dex_pc = dex_pc,
+ .register_mask_index = kNoValue,
+ .stack_mask_index = kNoValue,
+ .inline_info_index = kNoValue,
+ .dex_register_mask_index = kNoValue,
+ .dex_register_map_index = kNoValue,
+ };
+ if (register_mask != 0) {
+ uint32_t shift = LeastSignificantBit(register_mask);
+ RegisterMaskEntry entry = { register_mask >> shift, shift };
+ current_stack_map_.register_mask_index = register_masks_.Dedup(&entry);
+ }
+ // The compiler assumes the bit vector will be read during PrepareForFillIn(),
+ // and it might modify the data before that. Therefore, just store the pointer.
+ // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
+ lazy_stack_masks_.push_back(stack_mask);
+ current_inline_infos_ = 0;
+ current_dex_registers_.clear();
+ expected_num_dex_registers_ = num_dex_registers;
+
+ if (kIsDebugBuild) {
+ dcheck_num_dex_registers_.push_back(num_dex_registers);
}
- current_dex_register_ = 0;
}
void StackMapStream::EndStackMapEntry() {
- current_entry_.dex_register_map_index = AddDexRegisterMapEntry(current_entry_.dex_register_entry);
- stack_maps_.push_back(current_entry_);
- current_entry_ = StackMapEntry();
+ DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
+ in_stack_map_ = false;
+ DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
+
+ // Mark the last inline info as last in the list for the stack map.
+ if (current_inline_infos_ > 0) {
+ inline_infos_[inline_infos_.size() - 1].is_last = InlineInfo::kLast;
+ }
+
+ stack_maps_.Add(current_stack_map_);
}
void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
- if (kind != DexRegisterLocation::Kind::kNone) {
- // Ensure we only use non-compressed location kind at this stage.
- DCHECK(DexRegisterLocation::IsShortLocationKind(kind)) << kind;
- DexRegisterLocation location(kind, value);
-
- // Look for Dex register `location` in the location catalog (using the
- // companion hash map of locations to indices). Use its index if it
- // is already in the location catalog. If not, insert it (in the
- // location catalog and the hash map) and use the newly created index.
- auto it = location_catalog_entries_indices_.Find(location);
- if (it != location_catalog_entries_indices_.end()) {
- // Retrieve the index from the hash map.
- dex_register_locations_.push_back(it->second);
- } else {
- // Create a new entry in the location catalog and the hash map.
- size_t index = location_catalog_entries_.size();
- location_catalog_entries_.push_back(location);
- dex_register_locations_.push_back(index);
- location_catalog_entries_indices_.Insert(std::make_pair(location, index));
- }
- DexRegisterMapEntry* const entry = in_inline_frame_
- ? &current_inline_info_.dex_register_entry
- : &current_entry_.dex_register_entry;
- DCHECK_LT(current_dex_register_, entry->num_dex_registers);
- entry->live_dex_registers_mask->SetBit(current_dex_register_);
- entry->hash += (1 <<
- (current_dex_register_ % (sizeof(DexRegisterMapEntry::hash) * kBitsPerByte)));
- entry->hash += static_cast<uint32_t>(value);
- entry->hash += static_cast<uint32_t>(kind);
+ current_dex_registers_.push_back(DexRegisterLocation(kind, value));
+
+ // We have collected all the dex registers for StackMap/InlineInfo - create the map.
+ if (current_dex_registers_.size() == expected_num_dex_registers_) {
+ CreateDexRegisterMap();
}
- current_dex_register_++;
}
void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
- current_entry_.invoke_type = invoke_type;
- current_entry_.dex_method_index = dex_method_index;
+ uint32_t packed_native_pc = current_stack_map_.packed_native_pc;
+ invoke_infos_.Add(InvokeInfoEntry {
+ .packed_native_pc = packed_native_pc,
+ .invoke_type = invoke_type,
+ .method_info_index = method_infos_.Dedup(&dex_method_index),
+ });
}
void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
uint32_t dex_pc,
uint32_t num_dex_registers,
const DexFile* outer_dex_file) {
- DCHECK(!in_inline_frame_);
- in_inline_frame_ = true;
+ DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
+ in_inline_info_ = true;
+ DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
+
+ InlineInfoEntry entry = {
+ .is_last = InlineInfo::kMore,
+ .dex_pc = dex_pc,
+ .method_info_index = kNoValue,
+ .art_method_hi = kNoValue,
+ .art_method_lo = kNoValue,
+ .dex_register_mask_index = kNoValue,
+ .dex_register_map_index = kNoValue,
+ };
if (EncodeArtMethodInInlineInfo(method)) {
- current_inline_info_.method = method;
+ entry.art_method_hi = High32Bits(reinterpret_cast<uintptr_t>(method));
+ entry.art_method_lo = Low32Bits(reinterpret_cast<uintptr_t>(method));
} else {
if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
ScopedObjectAccess soa(Thread::Current());
DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
}
- current_inline_info_.method_index = method->GetDexMethodIndexUnchecked();
+ uint32_t dex_method_index = method->GetDexMethodIndexUnchecked();
+ entry.method_info_index = method_infos_.Dedup(&dex_method_index);
}
- current_inline_info_.dex_pc = dex_pc;
- current_inline_info_.dex_register_entry.num_dex_registers = num_dex_registers;
- current_inline_info_.dex_register_entry.locations_start_index = dex_register_locations_.size();
- current_inline_info_.dex_register_entry.live_dex_registers_mask = nullptr;
- if (num_dex_registers != 0) {
- current_inline_info_.dex_register_entry.live_dex_registers_mask =
- ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
- current_inline_info_.dex_register_entry.live_dex_registers_mask->ClearAllBits();
+ if (current_inline_infos_++ == 0) {
+ current_stack_map_.inline_info_index = inline_infos_.size();
+ }
+ inline_infos_.Add(entry);
+
+ current_dex_registers_.clear();
+ expected_num_dex_registers_ = num_dex_registers;
+
+ if (kIsDebugBuild) {
+ dcheck_num_dex_registers_.push_back(num_dex_registers);
}
- current_dex_register_ = 0;
}
void StackMapStream::EndInlineInfoEntry() {
- current_inline_info_.dex_register_map_index =
- AddDexRegisterMapEntry(current_inline_info_.dex_register_entry);
- DCHECK(in_inline_frame_);
- DCHECK_EQ(current_dex_register_, current_inline_info_.dex_register_entry.num_dex_registers)
- << "Inline information contains less registers than expected";
- in_inline_frame_ = false;
- inline_infos_.push_back(current_inline_info_);
- current_inline_info_ = InlineInfoEntry();
+ DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
+ in_inline_info_ = false;
+ DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
}
-size_t StackMapStream::ComputeDexRegisterLocationCatalogSize() const {
- size_t size = DexRegisterLocationCatalog::kFixedSize;
- for (const DexRegisterLocation& dex_register_location : location_catalog_entries_) {
- size += DexRegisterLocationCatalog::EntrySize(dex_register_location);
+// Create dex register map (bitmap + indices + catalogue entries)
+// based on the currently accumulated list of DexRegisterLocations.
+void StackMapStream::CreateDexRegisterMap() {
+ // Create mask and map based on current registers.
+ temp_dex_register_mask_.ClearAllBits();
+ temp_dex_register_map_.clear();
+ for (size_t i = 0; i < current_dex_registers_.size(); i++) {
+ DexRegisterLocation reg = current_dex_registers_[i];
+ if (reg.IsLive()) {
+ DexRegisterEntry entry = DexRegisterEntry {
+ .kind = static_cast<uint32_t>(reg.GetKind()),
+ .packed_value = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()),
+ };
+ temp_dex_register_mask_.SetBit(i);
+ temp_dex_register_map_.push_back(dex_register_catalog_.Dedup(&entry));
+ }
}
- return size;
-}
-size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) const {
- // For num_dex_registers == 0u live_dex_registers_mask may be null.
- if (num_dex_registers == 0u) {
- return 0u; // No register map will be emitted.
+ // Set the mask and map for the current StackMap/InlineInfo.
+ uint32_t mask_index = StackMap::kNoValue; // Represents mask with all zero bits.
+ if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
+ mask_index = dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
+ temp_dex_register_mask_.GetNumberOfBits());
}
- size_t number_of_live_dex_registers = live_dex_registers_mask->NumSetBits();
- if (live_dex_registers_mask->NumSetBits() == 0) {
- return 0u; // No register map will be emitted.
+ uint32_t map_index = dex_register_maps_.Dedup(temp_dex_register_map_.data(),
+ temp_dex_register_map_.size());
+ if (current_inline_infos_ > 0) {
+ inline_infos_[inline_infos_.size() - 1].dex_register_mask_index = mask_index;
+ inline_infos_[inline_infos_.size() - 1].dex_register_map_index = map_index;
+ } else {
+ current_stack_map_.dex_register_mask_index = mask_index;
+ current_stack_map_.dex_register_map_index = map_index;
}
- DCHECK(live_dex_registers_mask != nullptr);
-
- // Size of the map in bytes.
- size_t size = DexRegisterMap::kFixedSize;
- // Add the live bit mask for the Dex register liveness.
- size += DexRegisterMap::GetLiveBitMaskSize(num_dex_registers);
- // Compute the size of the set of live Dex register entries.
- size_t map_entries_size_in_bits =
- DexRegisterMap::SingleEntrySizeInBits(catalog_size) * number_of_live_dex_registers;
- size_t map_entries_size_in_bytes =
- RoundUp(map_entries_size_in_bits, kBitsPerByte) / kBitsPerByte;
- size += map_entries_size_in_bytes;
- return size;
}
void StackMapStream::FillInMethodInfo(MemoryRegion region) {
{
- MethodInfo info(region.begin(), method_indices_.size());
- for (size_t i = 0; i < method_indices_.size(); ++i) {
- info.SetMethodIndex(i, method_indices_[i]);
+ MethodInfo info(region.begin(), method_infos_.size());
+ for (size_t i = 0; i < method_infos_.size(); ++i) {
+ info.SetMethodIndex(i, method_infos_[i]);
}
}
if (kIsDebugBuild) {
// Check the data matches.
MethodInfo info(region.begin());
const size_t count = info.NumMethodIndices();
- DCHECK_EQ(count, method_indices_.size());
+ DCHECK_EQ(count, method_infos_.size());
for (size_t i = 0; i < count; ++i) {
- DCHECK_EQ(info.GetMethodIndex(i), method_indices_[i]);
+ DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i]);
}
}
}
-template<typename Vector>
-static MemoryRegion EncodeMemoryRegion(Vector* out, size_t* bit_offset, uint32_t bit_length) {
- uint32_t byte_length = BitsToBytesRoundUp(bit_length);
- EncodeVarintBits(out, bit_offset, byte_length);
- *bit_offset = RoundUp(*bit_offset, kBitsPerByte);
- out->resize(out->size() + byte_length);
- MemoryRegion region(out->data() + *bit_offset / kBitsPerByte, byte_length);
- *bit_offset += kBitsPerByte * byte_length;
- return region;
-}
-
size_t StackMapStream::PrepareForFillIn() {
- size_t bit_offset = 0;
- out_.clear();
-
- // Decide the offsets of dex register map entries, but do not write them out yet.
- // Needs to be done first as it modifies the stack map entry.
- size_t dex_register_map_bytes = 0;
- for (DexRegisterMapEntry& entry : dex_register_entries_) {
- size_t size = entry.ComputeSize(location_catalog_entries_.size());
- entry.offset = size == 0 ? DexRegisterMapEntry::kOffsetUnassigned : dex_register_map_bytes;
- dex_register_map_bytes += size;
- }
-
- // Must be done before calling ComputeInlineInfoEncoding since ComputeInlineInfoEncoding requires
- // dex_method_index_idx to be filled in.
- PrepareMethodIndices();
-
- // Dedup stack masks. Needs to be done first as it modifies the stack map entry.
- BitmapTableBuilder stack_mask_builder(allocator_);
- for (StackMapEntry& stack_map : stack_maps_) {
- BitVector* mask = stack_map.sp_mask;
- size_t num_bits = (mask != nullptr) ? mask->GetNumberOfBits() : 0;
- if (num_bits != 0) {
- stack_map.stack_mask_index = stack_mask_builder.Dedup(mask->GetRawStorage(), num_bits);
- } else {
- stack_map.stack_mask_index = StackMap::kNoValue;
- }
- }
-
- // Dedup register masks. Needs to be done first as it modifies the stack map entry.
- BitTableBuilder<std::array<uint32_t, RegisterMask::kCount>> register_mask_builder(allocator_);
- for (StackMapEntry& stack_map : stack_maps_) {
- uint32_t register_mask = stack_map.register_mask;
- if (register_mask != 0) {
- uint32_t shift = LeastSignificantBit(register_mask);
- std::array<uint32_t, RegisterMask::kCount> entry = {
- register_mask >> shift,
- shift,
- };
- stack_map.register_mask_index = register_mask_builder.Dedup(&entry);
- } else {
- stack_map.register_mask_index = StackMap::kNoValue;
+ static_assert(sizeof(StackMapEntry) == StackMap::kCount * sizeof(uint32_t), "Layout");
+ static_assert(sizeof(InvokeInfoEntry) == InvokeInfo::kCount * sizeof(uint32_t), "Layout");
+ static_assert(sizeof(InlineInfoEntry) == InlineInfo::kCount * sizeof(uint32_t), "Layout");
+ static_assert(sizeof(DexRegisterEntry) == DexRegisterInfo::kCount * sizeof(uint32_t), "Layout");
+ DCHECK_EQ(out_.size(), 0u);
+
+ // Read the stack masks now. The compiler might have updated them.
+ for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
+ BitVector* stack_mask = lazy_stack_masks_[i];
+ if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
+ stack_maps_[i].stack_mask_index =
+ stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
}
}
- // Allocate space for dex register maps.
- EncodeMemoryRegion(&out_, &bit_offset, dex_register_map_bytes * kBitsPerByte);
-
- // Write dex register catalog.
- EncodeVarintBits(&out_, &bit_offset, location_catalog_entries_.size());
- size_t location_catalog_bytes = ComputeDexRegisterLocationCatalogSize();
- MemoryRegion dex_register_location_catalog_region =
- EncodeMemoryRegion(&out_, &bit_offset, location_catalog_bytes * kBitsPerByte);
- DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
- // Offset in `dex_register_location_catalog` where to store the next
- // register location.
- size_t location_catalog_offset = DexRegisterLocationCatalog::kFixedSize;
- for (DexRegisterLocation dex_register_location : location_catalog_entries_) {
- dex_register_location_catalog.SetRegisterInfo(location_catalog_offset, dex_register_location);
- location_catalog_offset += DexRegisterLocationCatalog::EntrySize(dex_register_location);
- }
- // Ensure we reached the end of the Dex registers location_catalog.
- DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
-
- // Write stack maps.
- BitTableBuilder<std::array<uint32_t, StackMap::kCount>> stack_map_builder(allocator_);
- BitTableBuilder<std::array<uint32_t, InvokeInfo::kCount>> invoke_info_builder(allocator_);
- BitTableBuilder<std::array<uint32_t, InlineInfo::kCount>> inline_info_builder(allocator_);
- for (const StackMapEntry& entry : stack_maps_) {
- if (entry.dex_method_index != dex::kDexNoIndex) {
- std::array<uint32_t, InvokeInfo::kCount> invoke_info_entry {
- entry.packed_native_pc,
- entry.invoke_type,
- entry.dex_method_index_idx
- };
- invoke_info_builder.Add(invoke_info_entry);
- }
-
- // Set the inlining info.
- uint32_t inline_info_index = inline_info_builder.size();
- DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
- for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
- InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
- uint32_t method_index_idx = inline_entry.dex_method_index_idx;
- uint32_t extra_data = 1;
- if (inline_entry.method != nullptr) {
- method_index_idx = High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method));
- extra_data = Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method));
- }
- std::array<uint32_t, InlineInfo::kCount> inline_info_entry {
- (depth == entry.inlining_depth - 1) ? InlineInfo::kLast : InlineInfo::kMore,
- method_index_idx,
- inline_entry.dex_pc,
- extra_data,
- dex_register_entries_[inline_entry.dex_register_map_index].offset,
- };
- inline_info_builder.Add(inline_info_entry);
- }
- std::array<uint32_t, StackMap::kCount> stack_map_entry {
- entry.packed_native_pc,
- entry.dex_pc,
- dex_register_entries_[entry.dex_register_map_index].offset,
- entry.inlining_depth != 0 ? inline_info_index : InlineInfo::kNoValue,
- entry.register_mask_index,
- entry.stack_mask_index,
- };
- stack_map_builder.Add(stack_map_entry);
- }
- stack_map_builder.Encode(&out_, &bit_offset);
- invoke_info_builder.Encode(&out_, &bit_offset);
- inline_info_builder.Encode(&out_, &bit_offset);
- register_mask_builder.Encode(&out_, &bit_offset);
- stack_mask_builder.Encode(&out_, &bit_offset);
+ size_t bit_offset = 0;
+ stack_maps_.Encode(&out_, &bit_offset);
+ register_masks_.Encode(&out_, &bit_offset);
+ stack_masks_.Encode(&out_, &bit_offset);
+ invoke_infos_.Encode(&out_, &bit_offset);
+ inline_infos_.Encode(&out_, &bit_offset);
+ dex_register_masks_.Encode(&out_, &bit_offset);
+ dex_register_maps_.Encode(&out_, &bit_offset);
+ dex_register_catalog_.Encode(&out_, &bit_offset);
return UnsignedLeb128Size(out_.size()) + out_.size();
}
void StackMapStream::FillInCodeInfo(MemoryRegion region) {
- DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
+ DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
+ DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn";
DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) + out_.size());
uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size());
region.CopyFromVector(ptr - region.begin(), out_);
- // Write dex register maps.
- CodeInfo code_info(region);
- for (DexRegisterMapEntry& entry : dex_register_entries_) {
- size_t entry_size = entry.ComputeSize(location_catalog_entries_.size());
- if (entry_size != 0) {
- DexRegisterMap dex_register_map(
- code_info.dex_register_maps_.Subregion(entry.offset, entry_size),
- entry.num_dex_registers,
- code_info);
- FillInDexRegisterMap(dex_register_map,
- entry.num_dex_registers,
- *entry.live_dex_registers_mask,
- entry.locations_start_index);
- }
- }
-
// Verify all written data in debug build.
if (kIsDebugBuild) {
CheckCodeInfo(region);
}
}
-void StackMapStream::FillInDexRegisterMap(DexRegisterMap dex_register_map,
- uint32_t num_dex_registers,
- const BitVector& live_dex_registers_mask,
- uint32_t start_index_in_dex_register_locations) const {
- dex_register_map.SetLiveBitMask(num_dex_registers, live_dex_registers_mask);
- // Set the dex register location mapping data.
- size_t number_of_live_dex_registers = live_dex_registers_mask.NumSetBits();
- DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
- DCHECK_LE(start_index_in_dex_register_locations,
- dex_register_locations_.size() - number_of_live_dex_registers);
- for (size_t index_in_dex_register_locations = 0;
- index_in_dex_register_locations != number_of_live_dex_registers;
- ++index_in_dex_register_locations) {
- size_t location_catalog_entry_index = dex_register_locations_[
- start_index_in_dex_register_locations + index_in_dex_register_locations];
- dex_register_map.SetLocationCatalogEntryIndex(
- index_in_dex_register_locations,
- location_catalog_entry_index,
- location_catalog_entries_.size());
- }
-}
-
-size_t StackMapStream::AddDexRegisterMapEntry(const DexRegisterMapEntry& entry) {
- const size_t current_entry_index = dex_register_entries_.size();
- auto entries_it = dex_map_hash_to_stack_map_indices_.find(entry.hash);
- if (entries_it == dex_map_hash_to_stack_map_indices_.end()) {
- // We don't have a perfect hash functions so we need a list to collect all stack maps
- // which might have the same dex register map.
- ScopedArenaVector<uint32_t> stack_map_indices(allocator_->Adapter(kArenaAllocStackMapStream));
- stack_map_indices.push_back(current_entry_index);
- dex_map_hash_to_stack_map_indices_.Put(entry.hash, std::move(stack_map_indices));
- } else {
- // We might have collisions, so we need to check whether or not we really have a match.
- for (uint32_t test_entry_index : entries_it->second) {
- if (DexRegisterMapEntryEquals(dex_register_entries_[test_entry_index], entry)) {
- return test_entry_index;
- }
- }
- entries_it->second.push_back(current_entry_index);
- }
- dex_register_entries_.push_back(entry);
- return current_entry_index;
-}
-
-bool StackMapStream::DexRegisterMapEntryEquals(const DexRegisterMapEntry& a,
- const DexRegisterMapEntry& b) const {
- if ((a.live_dex_registers_mask == nullptr) != (b.live_dex_registers_mask == nullptr)) {
- return false;
- }
- if (a.num_dex_registers != b.num_dex_registers) {
- return false;
- }
- if (a.num_dex_registers != 0u) {
- DCHECK(a.live_dex_registers_mask != nullptr);
- DCHECK(b.live_dex_registers_mask != nullptr);
- if (!a.live_dex_registers_mask->Equal(b.live_dex_registers_mask)) {
- return false;
- }
- size_t number_of_live_dex_registers = a.live_dex_registers_mask->NumSetBits();
- DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
- DCHECK_LE(a.locations_start_index,
- dex_register_locations_.size() - number_of_live_dex_registers);
- DCHECK_LE(b.locations_start_index,
- dex_register_locations_.size() - number_of_live_dex_registers);
- auto a_begin = dex_register_locations_.begin() + a.locations_start_index;
- auto b_begin = dex_register_locations_.begin() + b.locations_start_index;
- if (!std::equal(a_begin, a_begin + number_of_live_dex_registers, b_begin)) {
- return false;
- }
- }
- return true;
-}
-
// Helper for CheckCodeInfo - check that register map has the expected content.
void StackMapStream::CheckDexRegisterMap(const DexRegisterMap& dex_register_map,
- size_t num_dex_registers,
- BitVector* live_dex_registers_mask,
- size_t dex_register_locations_index) const {
- for (size_t reg = 0; reg < num_dex_registers; reg++) {
+ size_t dex_register_mask_index,
+ size_t dex_register_map_index) const {
+ if (dex_register_map_index == kNoValue) {
+ DCHECK(!dex_register_map.IsValid());
+ return;
+ }
+ BitMemoryRegion live_dex_registers_mask = (dex_register_mask_index == kNoValue)
+ ? BitMemoryRegion()
+ : BitMemoryRegion(dex_register_masks_[dex_register_mask_index]);
+ for (size_t reg = 0; reg < dex_register_map.size(); reg++) {
// Find the location we tried to encode.
DexRegisterLocation expected = DexRegisterLocation::None();
- if (live_dex_registers_mask->IsBitSet(reg)) {
- size_t catalog_index = dex_register_locations_[dex_register_locations_index++];
- expected = location_catalog_entries_[catalog_index];
+ if (reg < live_dex_registers_mask.size_in_bits() && live_dex_registers_mask.LoadBit(reg)) {
+ size_t catalog_index = dex_register_maps_[dex_register_map_index++];
+ DexRegisterLocation::Kind kind =
+ static_cast<DexRegisterLocation::Kind>(dex_register_catalog_[catalog_index].kind);
+ uint32_t packed_value = dex_register_catalog_[catalog_index].packed_value;
+ expected = DexRegisterLocation(kind, DexRegisterInfo::UnpackValue(kind, packed_value));
}
// Compare to the seen location.
if (expected.GetKind() == DexRegisterLocation::Kind::kNone) {
@@ -446,108 +277,75 @@ void StackMapStream::CheckDexRegisterMap(const DexRegisterMap& dex_register_map,
DCHECK_EQ(expected.GetValue(), seen.GetValue());
}
}
- if (num_dex_registers == 0) {
- DCHECK(!dex_register_map.IsValid());
- }
-}
-
-void StackMapStream::PrepareMethodIndices() {
- CHECK(method_indices_.empty());
- method_indices_.resize(stack_maps_.size() + inline_infos_.size());
- ScopedArenaUnorderedMap<uint32_t, size_t> dedupe(allocator_->Adapter(kArenaAllocStackMapStream));
- for (StackMapEntry& stack_map : stack_maps_) {
- const size_t index = dedupe.size();
- const uint32_t method_index = stack_map.dex_method_index;
- if (method_index != dex::kDexNoIndex) {
- stack_map.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
- method_indices_[index] = method_index;
- }
- }
- for (InlineInfoEntry& inline_info : inline_infos_) {
- const size_t index = dedupe.size();
- const uint32_t method_index = inline_info.method_index;
- CHECK_NE(method_index, dex::kDexNoIndex);
- inline_info.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
- method_indices_[index] = method_index;
- }
- method_indices_.resize(dedupe.size());
}
// Check that all StackMapStream inputs are correctly encoded by trying to read them back.
void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
CodeInfo code_info(region);
DCHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
- DCHECK_EQ(code_info.GetNumberOfLocationCatalogEntries(), location_catalog_entries_.size());
- size_t invoke_info_index = 0;
+ const uint32_t* num_dex_registers = dcheck_num_dex_registers_.data();
for (size_t s = 0; s < stack_maps_.size(); ++s) {
const StackMap stack_map = code_info.GetStackMapAt(s);
- StackMapEntry entry = stack_maps_[s];
+ const StackMapEntry& entry = stack_maps_[s];
// Check main stack map fields.
DCHECK_EQ(stack_map.GetNativePcOffset(instruction_set_),
StackMap::UnpackNativePc(entry.packed_native_pc, instruction_set_));
DCHECK_EQ(stack_map.GetDexPc(), entry.dex_pc);
DCHECK_EQ(stack_map.GetRegisterMaskIndex(), entry.register_mask_index);
- DCHECK_EQ(code_info.GetRegisterMaskOf(stack_map), entry.register_mask);
+ RegisterMaskEntry expected_register_mask = (entry.register_mask_index == kNoValue)
+ ? RegisterMaskEntry{}
+ : register_masks_[entry.register_mask_index];
+ DCHECK_EQ(code_info.GetRegisterMaskOf(stack_map),
+ expected_register_mask.value << expected_register_mask.shift);
DCHECK_EQ(stack_map.GetStackMaskIndex(), entry.stack_mask_index);
+ BitMemoryRegion expected_stack_mask = (entry.stack_mask_index == kNoValue)
+ ? BitMemoryRegion()
+ : BitMemoryRegion(stack_masks_[entry.stack_mask_index]);
BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
- if (entry.sp_mask != nullptr) {
- DCHECK_GE(stack_mask.size_in_bits(), entry.sp_mask->GetNumberOfBits());
- for (size_t b = 0; b < stack_mask.size_in_bits(); b++) {
- DCHECK_EQ(stack_mask.LoadBit(b), entry.sp_mask->IsBitSet(b)) << b;
- }
- } else {
- DCHECK_EQ(stack_mask.size_in_bits(), 0u);
+ for (size_t b = 0; b < expected_stack_mask.size_in_bits(); b++) {
+ bool seen = b < stack_mask.size_in_bits() && stack_mask.LoadBit(b);
+ DCHECK_EQ(expected_stack_mask.LoadBit(b), seen);
}
- if (entry.dex_method_index != dex::kDexNoIndex) {
- InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index);
- DCHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
- StackMap::UnpackNativePc(entry.packed_native_pc, instruction_set_));
- DCHECK_EQ(invoke_info.GetInvokeType(), entry.invoke_type);
- DCHECK_EQ(invoke_info.GetMethodIndexIdx(), entry.dex_method_index_idx);
- invoke_info_index++;
- }
- CheckDexRegisterMap(code_info.GetDexRegisterMapOf(
- stack_map, entry.dex_register_entry.num_dex_registers),
- entry.dex_register_entry.num_dex_registers,
- entry.dex_register_entry.live_dex_registers_mask,
- entry.dex_register_entry.locations_start_index);
+ CheckDexRegisterMap(code_info.GetDexRegisterMapOf(stack_map, *(num_dex_registers++)),
+ entry.dex_register_mask_index,
+ entry.dex_register_map_index);
// Check inline info.
- DCHECK_EQ(stack_map.HasInlineInfo(), (entry.inlining_depth != 0));
- if (entry.inlining_depth != 0) {
+ DCHECK_EQ(stack_map.HasInlineInfo(), (entry.inline_info_index != kNoValue));
+ if (stack_map.HasInlineInfo()) {
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
- DCHECK_EQ(inline_info.GetDepth(), entry.inlining_depth);
- for (size_t d = 0; d < entry.inlining_depth; ++d) {
- size_t inline_info_index = entry.inline_infos_start_index + d;
+ size_t inlining_depth = inline_info.GetDepth();
+ for (size_t d = 0; d < inlining_depth; ++d) {
+ size_t inline_info_index = entry.inline_info_index + d;
DCHECK_LT(inline_info_index, inline_infos_.size());
- InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
+ const InlineInfoEntry& inline_entry = inline_infos_[inline_info_index];
DCHECK_EQ(inline_info.GetDexPcAtDepth(d), inline_entry.dex_pc);
- if (inline_info.EncodesArtMethodAtDepth(d)) {
- DCHECK_EQ(inline_info.GetArtMethodAtDepth(d),
- inline_entry.method);
- } else {
+ if (!inline_info.EncodesArtMethodAtDepth(d)) {
const size_t method_index_idx =
inline_info.GetMethodIndexIdxAtDepth(d);
- DCHECK_EQ(method_index_idx, inline_entry.dex_method_index_idx);
- DCHECK_EQ(method_indices_[method_index_idx], inline_entry.method_index);
+ DCHECK_EQ(method_index_idx, inline_entry.method_info_index);
}
-
CheckDexRegisterMap(code_info.GetDexRegisterMapAtDepth(
- d,
- inline_info,
- inline_entry.dex_register_entry.num_dex_registers),
- inline_entry.dex_register_entry.num_dex_registers,
- inline_entry.dex_register_entry.live_dex_registers_mask,
- inline_entry.dex_register_entry.locations_start_index);
+ d, inline_info, *(num_dex_registers++)),
+ inline_entry.dex_register_mask_index,
+ inline_entry.dex_register_map_index);
}
}
}
+ for (size_t i = 0; i < invoke_infos_.size(); i++) {
+ InvokeInfo invoke_info = code_info.GetInvokeInfo(i);
+ const InvokeInfoEntry& entry = invoke_infos_[i];
+ DCHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
+ StackMap::UnpackNativePc(entry.packed_native_pc, instruction_set_));
+ DCHECK_EQ(invoke_info.GetInvokeType(), entry.invoke_type);
+ DCHECK_EQ(invoke_info.GetMethodIndexIdx(), entry.method_info_index);
+ }
}
size_t StackMapStream::ComputeMethodInfoSize() const {
DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__;
- return MethodInfo::ComputeSize(method_indices_.size());
+ return MethodInfo::ComputeSize(method_infos_.size());
}
} // namespace art
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 6d505b95db..cefe165a67 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -17,42 +17,20 @@
#ifndef ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
#define ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
+#include "base/allocator.h"
+#include "base/arena_bit_vector.h"
+#include "base/bit_table.h"
#include "base/bit_vector-inl.h"
-#include "base/hash_map.h"
#include "base/memory_region.h"
#include "base/scoped_arena_containers.h"
#include "base/value_object.h"
+#include "dex_register_location.h"
#include "method_info.h"
#include "nodes.h"
-#include "stack_map.h"
namespace art {
-// Helper to build art::StackMapStream::LocationCatalogEntriesIndices.
-class LocationCatalogEntriesIndicesEmptyFn {
- public:
- void MakeEmpty(std::pair<DexRegisterLocation, size_t>& item) const {
- item.first = DexRegisterLocation::None();
- }
- bool IsEmpty(const std::pair<DexRegisterLocation, size_t>& item) const {
- return item.first == DexRegisterLocation::None();
- }
-};
-
-// Hash function for art::StackMapStream::LocationCatalogEntriesIndices.
-// This hash function does not create collisions.
-class DexRegisterLocationHashFn {
- public:
- size_t operator()(DexRegisterLocation key) const {
- // Concatenate `key`s fields to create a 64-bit value to be hashed.
- int64_t kind_and_value =
- (static_cast<int64_t>(key.kind_) << 32) | static_cast<int64_t>(key.value_);
- return inner_hash_fn_(kind_and_value);
- }
- private:
- std::hash<int64_t> inner_hash_fn_;
-};
-
+class DexRegisterMap;
/**
* Collects and builds stack maps for a method. All the stack maps
@@ -61,71 +39,26 @@ class DexRegisterLocationHashFn {
class StackMapStream : public ValueObject {
public:
explicit StackMapStream(ScopedArenaAllocator* allocator, InstructionSet instruction_set)
- : allocator_(allocator),
- instruction_set_(instruction_set),
- stack_maps_(allocator->Adapter(kArenaAllocStackMapStream)),
- location_catalog_entries_(allocator->Adapter(kArenaAllocStackMapStream)),
- location_catalog_entries_indices_(allocator->Adapter(kArenaAllocStackMapStream)),
- dex_register_locations_(allocator->Adapter(kArenaAllocStackMapStream)),
- inline_infos_(allocator->Adapter(kArenaAllocStackMapStream)),
- method_indices_(allocator->Adapter(kArenaAllocStackMapStream)),
- dex_register_entries_(allocator->Adapter(kArenaAllocStackMapStream)),
+ : instruction_set_(instruction_set),
+ stack_maps_(allocator),
+ register_masks_(allocator),
+ stack_masks_(allocator),
+ invoke_infos_(allocator),
+ inline_infos_(allocator),
+ dex_register_masks_(allocator),
+ dex_register_maps_(allocator),
+ dex_register_catalog_(allocator),
out_(allocator->Adapter(kArenaAllocStackMapStream)),
- dex_map_hash_to_stack_map_indices_(std::less<uint32_t>(),
- allocator->Adapter(kArenaAllocStackMapStream)),
- current_entry_(),
- current_inline_info_(),
- current_dex_register_(0),
- in_inline_frame_(false) {
- stack_maps_.reserve(10);
- out_.reserve(64);
- location_catalog_entries_.reserve(4);
- dex_register_locations_.reserve(10 * 4);
- inline_infos_.reserve(2);
+ method_infos_(allocator),
+ lazy_stack_masks_(allocator->Adapter(kArenaAllocStackMapStream)),
+ in_stack_map_(false),
+ in_inline_info_(false),
+ current_inline_infos_(0),
+ current_dex_registers_(allocator->Adapter(kArenaAllocStackMapStream)),
+ temp_dex_register_mask_(allocator, 32, true, kArenaAllocStackMapStream),
+ temp_dex_register_map_(allocator->Adapter(kArenaAllocStackMapStream)) {
}
- // A dex register map entry for a single stack map entry, contains what registers are live as
- // well as indices into the location catalog.
- class DexRegisterMapEntry {
- public:
- static const uint32_t kOffsetUnassigned = -1;
-
- BitVector* live_dex_registers_mask;
- uint32_t num_dex_registers;
- size_t locations_start_index;
- // Computed fields
- size_t hash = 0;
- uint32_t offset = kOffsetUnassigned;
-
- size_t ComputeSize(size_t catalog_size) const;
- };
-
- // See runtime/stack_map.h to know what these fields contain.
- struct StackMapEntry {
- uint32_t dex_pc;
- uint32_t packed_native_pc;
- uint32_t register_mask;
- BitVector* sp_mask;
- uint32_t inlining_depth;
- size_t inline_infos_start_index;
- uint32_t stack_mask_index;
- uint32_t register_mask_index;
- DexRegisterMapEntry dex_register_entry;
- size_t dex_register_map_index;
- InvokeType invoke_type;
- uint32_t dex_method_index;
- uint32_t dex_method_index_idx; // Index into dex method index table.
- };
-
- struct InlineInfoEntry {
- uint32_t dex_pc; // dex::kDexNoIndex for intrinsified native methods.
- ArtMethod* method;
- uint32_t method_index;
- DexRegisterMapEntry dex_register_entry;
- size_t dex_register_map_index;
- uint32_t dex_method_index_idx; // Index into the dex method index table.
- };
-
void BeginStackMapEntry(uint32_t dex_pc,
uint32_t native_pc_offset,
uint32_t register_mask,
@@ -160,58 +93,87 @@ class StackMapStream : public ValueObject {
size_t ComputeMethodInfoSize() const;
private:
- size_t ComputeDexRegisterLocationCatalogSize() const;
+ static constexpr uint32_t kNoValue = -1;
+
+ // The fields must be uint32_t and mirror the StackMap accessor in stack_map.h!
+ struct StackMapEntry {
+ uint32_t packed_native_pc;
+ uint32_t dex_pc;
+ uint32_t register_mask_index;
+ uint32_t stack_mask_index;
+ uint32_t inline_info_index;
+ uint32_t dex_register_mask_index;
+ uint32_t dex_register_map_index;
+ };
+
+ // The fields must be uint32_t and mirror the InlineInfo accessor in stack_map.h!
+ struct InlineInfoEntry {
+ uint32_t is_last;
+ uint32_t dex_pc;
+ uint32_t method_info_index;
+ uint32_t art_method_hi;
+ uint32_t art_method_lo;
+ uint32_t dex_register_mask_index;
+ uint32_t dex_register_map_index;
+ };
- // Prepare and deduplicate method indices.
- void PrepareMethodIndices();
+ // The fields must be uint32_t and mirror the InvokeInfo accessor in stack_map.h!
+ struct InvokeInfoEntry {
+ uint32_t packed_native_pc;
+ uint32_t invoke_type;
+ uint32_t method_info_index;
+ };
- // Deduplicate entry if possible and return the corresponding index into dex_register_entries_
- // array. If entry is not a duplicate, a new entry is added to dex_register_entries_.
- size_t AddDexRegisterMapEntry(const DexRegisterMapEntry& entry);
+ // The fields must be uint32_t and mirror the DexRegisterInfo accessor in stack_map.h!
+ struct DexRegisterEntry {
+ uint32_t kind;
+ uint32_t packed_value;
+ };
- // Return true if the two dex register map entries are equal.
- bool DexRegisterMapEntryEquals(const DexRegisterMapEntry& a, const DexRegisterMapEntry& b) const;
+ // The fields must be uint32_t and mirror the RegisterMask accessor in stack_map.h!
+ struct RegisterMaskEntry {
+ uint32_t value;
+ uint32_t shift;
+ };
- // Fill in the corresponding entries of a register map.
- void FillInDexRegisterMap(DexRegisterMap dex_register_map,
- uint32_t num_dex_registers,
- const BitVector& live_dex_registers_mask,
- uint32_t start_index_in_dex_register_locations) const;
+ void CreateDexRegisterMap();
void CheckDexRegisterMap(const DexRegisterMap& dex_register_map,
- size_t num_dex_registers,
- BitVector* live_dex_registers_mask,
- size_t dex_register_locations_index) const;
+ size_t dex_register_mask_index,
+ size_t dex_register_map_index) const;
void CheckCodeInfo(MemoryRegion region) const;
- ScopedArenaAllocator* const allocator_;
const InstructionSet instruction_set_;
- ScopedArenaVector<StackMapEntry> stack_maps_;
-
- // A catalog of unique [location_kind, register_value] pairs (per method).
- ScopedArenaVector<DexRegisterLocation> location_catalog_entries_;
- // Map from Dex register location catalog entries to their indices in the
- // location catalog.
- using LocationCatalogEntriesIndices = ScopedArenaHashMap<DexRegisterLocation,
- size_t,
- LocationCatalogEntriesIndicesEmptyFn,
- DexRegisterLocationHashFn>;
- LocationCatalogEntriesIndices location_catalog_entries_indices_;
-
- // A set of concatenated maps of Dex register locations indices to `location_catalog_entries_`.
- ScopedArenaVector<size_t> dex_register_locations_;
- ScopedArenaVector<InlineInfoEntry> inline_infos_;
- ScopedArenaVector<uint32_t> method_indices_;
- ScopedArenaVector<DexRegisterMapEntry> dex_register_entries_;
-
+ BitTableBuilder<StackMapEntry> stack_maps_;
+ BitTableBuilder<RegisterMaskEntry> register_masks_;
+ BitmapTableBuilder stack_masks_;
+ BitTableBuilder<InvokeInfoEntry> invoke_infos_;
+ BitTableBuilder<InlineInfoEntry> inline_infos_;
+ BitmapTableBuilder dex_register_masks_;
+ BitTableBuilder<uint32_t> dex_register_maps_;
+ BitTableBuilder<DexRegisterEntry> dex_register_catalog_;
ScopedArenaVector<uint8_t> out_;
- ScopedArenaSafeMap<uint32_t, ScopedArenaVector<uint32_t>> dex_map_hash_to_stack_map_indices_;
+ BitTableBuilder<uint32_t> method_infos_;
+
+ ScopedArenaVector<BitVector*> lazy_stack_masks_;
+
+ // Variables which track the current state between Begin/End calls;
+ bool in_stack_map_;
+ bool in_inline_info_;
+ StackMapEntry current_stack_map_;
+ uint32_t current_inline_infos_;
+ ScopedArenaVector<DexRegisterLocation> current_dex_registers_;
+ size_t expected_num_dex_registers_;
+
+ // Temporary variables used in CreateDexRegisterMap.
+ // They are here so that we can reuse the reserved memory.
+ ArenaBitVector temp_dex_register_mask_;
+ ScopedArenaVector<uint32_t> temp_dex_register_map_;
- StackMapEntry current_entry_;
- InlineInfoEntry current_inline_info_;
- uint32_t current_dex_register_;
- bool in_inline_frame_;
+ // Records num_dex_registers for every StackMapEntry and InlineInfoEntry.
+ // Only used in debug builds to verify the dex registers at the end.
+ std::vector<uint32_t> dcheck_num_dex_registers_;
DISALLOW_COPY_AND_ASSIGN(StackMapStream);
};
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 112771847c..262c240bc7 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -45,6 +45,8 @@ static bool CheckStackMask(
using Kind = DexRegisterLocation::Kind;
+constexpr static uint32_t kPcAlign = GetInstructionSetInstructionAlignment(kRuntimeISA);
+
TEST(StackMapTest, Test1) {
MallocArenaPool pool;
ArenaStack arena_stack(&pool);
@@ -53,7 +55,7 @@ TEST(StackMapTest, Test1) {
ArenaBitVector sp_mask(&allocator, 0, false);
size_t number_of_dex_registers = 2;
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Short location.
stream.EndStackMapEntry();
@@ -68,18 +70,12 @@ TEST(StackMapTest, Test1) {
uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(2u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
- // The Dex register location catalog contains:
- // - one 1-byte short Dex register location, and
- // - one 5-byte large Dex register location.
- size_t expected_location_catalog_size = 1u + 5u;
- ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
StackMap stack_map = code_info.GetStackMapAt(0);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
ASSERT_EQ(0u, stack_map.GetDexPc());
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask));
@@ -89,30 +85,17 @@ TEST(StackMapTest, Test1) {
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
- ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask, and
- // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
- size_t expected_dex_register_map_size = 1u + 1u;
- ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+ ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind(0));
ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(1));
- ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(1));
ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0));
ASSERT_EQ(-2, dex_register_map.GetConstant(1));
- size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(0u, index0);
- ASSERT_EQ(1u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+ DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
ASSERT_EQ(Kind::kInStack, location0.GetKind());
ASSERT_EQ(Kind::kConstant, location1.GetKind());
- ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
- ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
@@ -131,7 +114,7 @@ TEST(StackMapTest, Test2) {
sp_mask1.SetBit(4);
size_t number_of_dex_registers = 2;
size_t number_of_dex_registers_in_inline_info = 0;
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 2);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
@@ -143,7 +126,7 @@ TEST(StackMapTest, Test2) {
ArenaBitVector sp_mask2(&allocator, 0, true);
sp_mask2.SetBit(3);
sp_mask2.SetBit(8);
- stream.BeginStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInRegister, 18); // Short location.
stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location.
stream.EndStackMapEntry();
@@ -151,7 +134,7 @@ TEST(StackMapTest, Test2) {
ArenaBitVector sp_mask3(&allocator, 0, true);
sp_mask3.SetBit(1);
sp_mask3.SetBit(5);
- stream.BeginStackMapEntry(2, 192, 0xAB, &sp_mask3, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInRegister, 6); // Short location.
stream.AddDexRegisterEntry(Kind::kInRegisterHigh, 8); // Short location.
stream.EndStackMapEntry();
@@ -159,7 +142,7 @@ TEST(StackMapTest, Test2) {
ArenaBitVector sp_mask4(&allocator, 0, true);
sp_mask4.SetBit(6);
sp_mask4.SetBit(7);
- stream.BeginStackMapEntry(3, 256, 0xCD, &sp_mask4, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location, same in stack map 2.
stream.AddDexRegisterEntry(Kind::kInFpuRegisterHigh, 1); // Short location.
stream.EndStackMapEntry();
@@ -174,20 +157,14 @@ TEST(StackMapTest, Test2) {
uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(7u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
- // The Dex register location catalog contains:
- // - six 1-byte short Dex register locations, and
- // - one 5-byte large Dex register location.
- size_t expected_location_catalog_size = 6u * 1u + 5u;
- ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
// First stack map.
{
StackMap stack_map = code_info.GetStackMapAt(0);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
ASSERT_EQ(0u, stack_map.GetDexPc());
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
@@ -197,30 +174,17 @@ TEST(StackMapTest, Test2) {
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
- ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask, and
- // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
- size_t expected_dex_register_map_size = 1u + 1u;
- ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+ ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind(0));
ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(1));
- ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(1));
ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0));
ASSERT_EQ(-2, dex_register_map.GetConstant(1));
- size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(0u, index0);
- ASSERT_EQ(1u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+ DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
ASSERT_EQ(Kind::kInStack, location0.GetKind());
ASSERT_EQ(Kind::kConstant, location1.GetKind());
- ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
- ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
@@ -237,9 +201,9 @@ TEST(StackMapTest, Test2) {
{
StackMap stack_map = code_info.GetStackMapAt(1);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u * kPcAlign)));
ASSERT_EQ(1u, stack_map.GetDexPc());
- ASSERT_EQ(128u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(128u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask2));
@@ -249,30 +213,17 @@ TEST(StackMapTest, Test2) {
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
- ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask, and
- // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
- size_t expected_dex_register_map_size = 1u + 1u;
- ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+ ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind(0));
ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind(1));
- ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind(1));
ASSERT_EQ(18, dex_register_map.GetMachineRegister(0));
ASSERT_EQ(3, dex_register_map.GetMachineRegister(1));
- size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(2u, index0);
- ASSERT_EQ(3u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+ DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(2);
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(3);
ASSERT_EQ(Kind::kInRegister, location0.GetKind());
ASSERT_EQ(Kind::kInFpuRegister, location1.GetKind());
- ASSERT_EQ(Kind::kInRegister, location0.GetInternalKind());
- ASSERT_EQ(Kind::kInFpuRegister, location1.GetInternalKind());
ASSERT_EQ(18, location0.GetValue());
ASSERT_EQ(3, location1.GetValue());
@@ -283,9 +234,9 @@ TEST(StackMapTest, Test2) {
{
StackMap stack_map = code_info.GetStackMapAt(2);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u * kPcAlign)));
ASSERT_EQ(2u, stack_map.GetDexPc());
- ASSERT_EQ(192u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(192u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask3));
@@ -295,30 +246,17 @@ TEST(StackMapTest, Test2) {
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
- ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask, and
- // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
- size_t expected_dex_register_map_size = 1u + 1u;
- ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+ ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind(0));
ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map.GetLocationKind(1));
- ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map.GetLocationInternalKind(1));
ASSERT_EQ(6, dex_register_map.GetMachineRegister(0));
ASSERT_EQ(8, dex_register_map.GetMachineRegister(1));
- size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(4u, index0);
- ASSERT_EQ(5u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+ DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(4);
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(5);
ASSERT_EQ(Kind::kInRegister, location0.GetKind());
ASSERT_EQ(Kind::kInRegisterHigh, location1.GetKind());
- ASSERT_EQ(Kind::kInRegister, location0.GetInternalKind());
- ASSERT_EQ(Kind::kInRegisterHigh, location1.GetInternalKind());
ASSERT_EQ(6, location0.GetValue());
ASSERT_EQ(8, location1.GetValue());
@@ -329,9 +267,9 @@ TEST(StackMapTest, Test2) {
{
StackMap stack_map = code_info.GetStackMapAt(3);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u * kPcAlign)));
ASSERT_EQ(3u, stack_map.GetDexPc());
- ASSERT_EQ(256u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(256u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask4));
@@ -341,30 +279,17 @@ TEST(StackMapTest, Test2) {
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
- ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask, and
- // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
- size_t expected_dex_register_map_size = 1u + 1u;
- ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+ ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind(0));
ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map.GetLocationKind(1));
- ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map.GetLocationInternalKind(1));
ASSERT_EQ(3, dex_register_map.GetMachineRegister(0));
ASSERT_EQ(1, dex_register_map.GetMachineRegister(1));
- size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(3u, index0); // Shared with second stack map.
- ASSERT_EQ(6u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+ DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(3);
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(6);
ASSERT_EQ(Kind::kInFpuRegister, location0.GetKind());
ASSERT_EQ(Kind::kInFpuRegisterHigh, location1.GetKind());
- ASSERT_EQ(Kind::kInFpuRegister, location0.GetInternalKind());
- ASSERT_EQ(Kind::kInFpuRegisterHigh, location1.GetInternalKind());
ASSERT_EQ(3, location0.GetValue());
ASSERT_EQ(1, location1.GetValue());
@@ -384,7 +309,7 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
sp_mask1.SetBit(4);
const size_t number_of_dex_registers = 2;
const size_t number_of_dex_registers_in_inline_info = 2;
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 1);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 1);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
@@ -403,20 +328,14 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(2u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
- // The Dex register location catalog contains:
- // - one 1-byte short Dex register locations, and
- // - one 5-byte large Dex register location.
- const size_t expected_location_catalog_size = 1u + 5u;
- ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
// First stack map.
{
StackMap stack_map = code_info.GetStackMapAt(0);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
ASSERT_EQ(0u, stack_map.GetDexPc());
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
@@ -425,30 +344,17 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
DexRegisterMap map(code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers));
ASSERT_TRUE(map.IsDexRegisterLive(0));
ASSERT_TRUE(map.IsDexRegisterLive(1));
- ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask, and
- // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
- size_t expected_map_size = 1u + 1u;
- ASSERT_EQ(expected_map_size, map.Size());
+ ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kInStack, map.GetLocationKind(0));
ASSERT_EQ(Kind::kConstant, map.GetLocationKind(1));
- ASSERT_EQ(Kind::kInStack, map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kConstantLargeValue, map.GetLocationInternalKind(1));
ASSERT_EQ(0, map.GetStackOffsetInBytes(0));
ASSERT_EQ(-2, map.GetConstant(1));
- const size_t index0 = map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- const size_t index1 = map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(0u, index0);
- ASSERT_EQ(1u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+ DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
ASSERT_EQ(Kind::kInStack, location0.GetKind());
ASSERT_EQ(Kind::kConstant, location1.GetKind());
- ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
- ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
@@ -456,8 +362,8 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
// one.
ASSERT_TRUE(stack_map.HasInlineInfo());
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
- EXPECT_EQ(inline_info.GetDexRegisterMapOffsetAtDepth(0),
- stack_map.GetDexRegisterMapOffset());
+ EXPECT_EQ(inline_info.GetDexRegisterMapIndexAtDepth(0),
+ stack_map.GetDexRegisterMapIndex());
}
}
@@ -469,7 +375,7 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kNone, 0); // No location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
@@ -484,17 +390,12 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(1u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
- // The Dex register location catalog contains:
- // - one 5-byte large Dex register location.
- size_t expected_location_catalog_size = 5u;
- ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
StackMap stack_map = code_info.GetStackMapAt(0);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
ASSERT_EQ(0u, stack_map.GetDexPc());
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
ASSERT_TRUE(stack_map.HasDexRegisterMap());
@@ -502,100 +403,19 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0));
ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
- ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
- // The Dex register map contains:
- // - one 1-byte live bit mask.
- // No space is allocated for the sole location catalog entry index, as it is useless.
- size_t expected_dex_register_map_size = 1u + 0u;
- ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+ ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters());
ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationKind(0));
ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(1));
- ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationInternalKind(0));
- ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(1));
ASSERT_EQ(-2, dex_register_map.GetConstant(1));
- size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
- size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
- ASSERT_EQ(DexRegisterLocationCatalog::kNoLocationEntryIndex, index0);
- ASSERT_EQ(0u, index1);
- DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
- DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
- ASSERT_EQ(Kind::kNone, location0.GetKind());
+ DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(0);
ASSERT_EQ(Kind::kConstant, location1.GetKind());
- ASSERT_EQ(Kind::kNone, location0.GetInternalKind());
- ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
- ASSERT_EQ(0, location0.GetValue());
ASSERT_EQ(-2, location1.GetValue());
ASSERT_FALSE(stack_map.HasInlineInfo());
}
-// Generate a stack map whose dex register offset is
-// StackMap::kNoDexRegisterMapSmallEncoding, and ensure we do
-// not treat it as kNoDexRegisterMap.
-TEST(StackMapTest, DexRegisterMapOffsetOverflow) {
- MallocArenaPool pool;
- ArenaStack arena_stack(&pool);
- ScopedArenaAllocator allocator(&arena_stack);
- StackMapStream stream(&allocator, kRuntimeISA);
-
- ArenaBitVector sp_mask(&allocator, 0, false);
- uint32_t number_of_dex_registers = 1024;
- // Create the first stack map (and its Dex register map).
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- uint32_t number_of_dex_live_registers_in_dex_register_map_0 = number_of_dex_registers - 8;
- for (uint32_t i = 0; i < number_of_dex_live_registers_in_dex_register_map_0; ++i) {
- // Use two different Dex register locations to populate this map,
- // as using a single value (in the whole CodeInfo object) would
- // make this Dex register mapping data empty (see
- // art::DexRegisterMap::SingleEntrySizeInBits).
- stream.AddDexRegisterEntry(Kind::kConstant, i % 2); // Short location.
- }
- stream.EndStackMapEntry();
- // Create the second stack map (and its Dex register map).
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
- for (uint32_t i = 0; i < number_of_dex_registers; ++i) {
- stream.AddDexRegisterEntry(Kind::kConstant, 0); // Short location.
- }
- stream.EndStackMapEntry();
-
- size_t size = stream.PrepareForFillIn();
- void* memory = allocator.Alloc(size, kArenaAllocMisc);
- MemoryRegion region(memory, size);
- stream.FillInCodeInfo(region);
-
- CodeInfo code_info(region);
- // The location catalog contains two entries (DexRegisterLocation(kConstant, 0)
- // and DexRegisterLocation(kConstant, 1)), therefore the location catalog index
- // has a size of 1 bit.
- uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
- ASSERT_EQ(2u, number_of_catalog_entries);
- ASSERT_EQ(1u, DexRegisterMap::SingleEntrySizeInBits(number_of_catalog_entries));
-
- // The first Dex register map contains:
- // - a live register bit mask for 1024 registers (that is, 128 bytes of
- // data); and
- // - Dex register mapping information for 1016 1-bit Dex (live) register
- // locations (that is, 127 bytes of data).
- // Hence it has a size of 255 bytes, and therefore...
- ASSERT_EQ(128u, DexRegisterMap::GetLiveBitMaskSize(number_of_dex_registers));
- StackMap stack_map0 = code_info.GetStackMapAt(0);
- DexRegisterMap dex_register_map0 =
- code_info.GetDexRegisterMapOf(stack_map0, number_of_dex_registers);
- ASSERT_EQ(127u, dex_register_map0.GetLocationMappingDataSize(number_of_catalog_entries));
- ASSERT_EQ(255u, dex_register_map0.Size());
-
- StackMap stack_map1 = code_info.GetStackMapAt(1);
- ASSERT_TRUE(stack_map1.HasDexRegisterMap());
- // ...the offset of the second Dex register map (relative to the
- // beginning of the Dex register maps region) is 255 (i.e.,
- // kNoDexRegisterMapSmallEncoding).
- ASSERT_NE(stack_map1.GetDexRegisterMapOffset(),
- StackMap::kNoValue);
- ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(), 0xFFu);
-}
-
TEST(StackMapTest, TestShareDexRegisterMap) {
MallocArenaPool pool;
ArenaStack arena_stack(&pool);
@@ -605,17 +425,17 @@ TEST(StackMapTest, TestShareDexRegisterMap) {
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
// First stack map.
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
// Second stack map, which should share the same dex register map.
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
// Third stack map (doesn't share the dex register map).
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
stream.AddDexRegisterEntry(Kind::kInRegister, 2); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
@@ -646,12 +466,12 @@ TEST(StackMapTest, TestShareDexRegisterMap) {
ASSERT_EQ(-2, dex_registers2.GetConstant(1));
// Verify dex register map offsets.
- ASSERT_EQ(sm0.GetDexRegisterMapOffset(),
- sm1.GetDexRegisterMapOffset());
- ASSERT_NE(sm0.GetDexRegisterMapOffset(),
- sm2.GetDexRegisterMapOffset());
- ASSERT_NE(sm1.GetDexRegisterMapOffset(),
- sm2.GetDexRegisterMapOffset());
+ ASSERT_EQ(sm0.GetDexRegisterMapIndex(),
+ sm1.GetDexRegisterMapIndex());
+ ASSERT_NE(sm0.GetDexRegisterMapIndex(),
+ sm2.GetDexRegisterMapIndex());
+ ASSERT_NE(sm1.GetDexRegisterMapIndex(),
+ sm2.GetDexRegisterMapIndex());
}
TEST(StackMapTest, TestNoDexRegisterMap) {
@@ -662,11 +482,12 @@ TEST(StackMapTest, TestNoDexRegisterMap) {
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 0;
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
stream.EndStackMapEntry();
number_of_dex_registers = 1;
- stream.BeginStackMapEntry(1, 68, 0x4, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask, number_of_dex_registers, 0);
+ stream.AddDexRegisterEntry(Kind::kNone, 0);
stream.EndStackMapEntry();
size_t size = stream.PrepareForFillIn();
@@ -679,14 +500,12 @@ TEST(StackMapTest, TestNoDexRegisterMap) {
uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
ASSERT_EQ(0u, number_of_catalog_entries);
- DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
- ASSERT_EQ(0u, location_catalog.Size());
StackMap stack_map = code_info.GetStackMapAt(0);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
ASSERT_EQ(0u, stack_map.GetDexPc());
- ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
ASSERT_FALSE(stack_map.HasDexRegisterMap());
@@ -694,12 +513,12 @@ TEST(StackMapTest, TestNoDexRegisterMap) {
stack_map = code_info.GetStackMapAt(1);
ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1)));
- ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68)));
+ ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68 * kPcAlign)));
ASSERT_EQ(1u, stack_map.GetDexPc());
- ASSERT_EQ(68u, stack_map.GetNativePcOffset(kRuntimeISA));
+ ASSERT_EQ(68u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(stack_map));
- ASSERT_FALSE(stack_map.HasDexRegisterMap());
+ ASSERT_TRUE(stack_map.HasDexRegisterMap());
ASSERT_FALSE(stack_map.HasInlineInfo());
}
@@ -715,7 +534,7 @@ TEST(StackMapTest, InlineTest) {
sp_mask1.SetBit(4);
// First stack map.
- stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, 2, 2);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, 2, 2);
stream.AddDexRegisterEntry(Kind::kInStack, 0);
stream.AddDexRegisterEntry(Kind::kConstant, 4);
@@ -731,7 +550,7 @@ TEST(StackMapTest, InlineTest) {
stream.EndStackMapEntry();
// Second stack map.
- stream.BeginStackMapEntry(2, 22, 0x3, &sp_mask1, 2, 3);
+ stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1, 2, 3);
stream.AddDexRegisterEntry(Kind::kInStack, 56);
stream.AddDexRegisterEntry(Kind::kConstant, 0);
@@ -749,13 +568,13 @@ TEST(StackMapTest, InlineTest) {
stream.EndStackMapEntry();
// Third stack map.
- stream.BeginStackMapEntry(4, 56, 0x3, &sp_mask1, 2, 0);
+ stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1, 2, 0);
stream.AddDexRegisterEntry(Kind::kNone, 0);
stream.AddDexRegisterEntry(Kind::kConstant, 4);
stream.EndStackMapEntry();
// Fourth stack map.
- stream.BeginStackMapEntry(6, 78, 0x3, &sp_mask1, 2, 3);
+ stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1, 2, 3);
stream.AddDexRegisterEntry(Kind::kInStack, 56);
stream.AddDexRegisterEntry(Kind::kConstant, 0);
@@ -869,6 +688,7 @@ TEST(StackMapTest, InlineTest) {
}
TEST(StackMapTest, PackedNativePcTest) {
+ // Test minimum alignments, and decoding.
uint32_t packed_thumb2 =
StackMap::PackNativePc(kThumb2InstructionAlignment, InstructionSet::kThumb2);
uint32_t packed_arm64 =
@@ -904,9 +724,9 @@ TEST(StackMapTest, TestDeduplicateStackMask) {
ArenaBitVector sp_mask(&allocator, 0, true);
sp_mask.SetBit(1);
sp_mask.SetBit(4);
- stream.BeginStackMapEntry(0, 4, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0);
stream.EndStackMapEntry();
- stream.BeginStackMapEntry(0, 8, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0);
stream.EndStackMapEntry();
size_t size = stream.PrepareForFillIn();
@@ -917,8 +737,8 @@ TEST(StackMapTest, TestDeduplicateStackMask) {
CodeInfo code_info(region);
ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
- StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4);
- StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8);
+ StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4 * kPcAlign);
+ StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8 * kPcAlign);
EXPECT_EQ(stack_map1.GetStackMaskIndex(),
stack_map2.GetStackMaskIndex());
}
@@ -931,13 +751,13 @@ TEST(StackMapTest, TestInvokeInfo) {
ArenaBitVector sp_mask(&allocator, 0, true);
sp_mask.SetBit(1);
- stream.BeginStackMapEntry(0, 4, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0);
stream.AddInvoke(kSuper, 1);
stream.EndStackMapEntry();
- stream.BeginStackMapEntry(0, 8, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0);
stream.AddInvoke(kStatic, 3);
stream.EndStackMapEntry();
- stream.BeginStackMapEntry(0, 16, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 16 * kPcAlign, 0x3, &sp_mask, 0, 0);
stream.AddInvoke(kDirect, 65535);
stream.EndStackMapEntry();
@@ -954,9 +774,9 @@ TEST(StackMapTest, TestInvokeInfo) {
MethodInfo method_info(method_info_region.begin());
ASSERT_EQ(3u, code_info.GetNumberOfStackMaps());
- InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4));
- InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8));
- InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16));
+ InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4 * kPcAlign));
+ InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8 * kPcAlign));
+ InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16 * kPcAlign));
InvokeInfo invoke_invalid(code_info.GetInvokeInfoForNativePcOffset(12));
EXPECT_FALSE(invoke_invalid.IsValid()); // No entry for that index.
EXPECT_TRUE(invoke1.IsValid());
@@ -964,13 +784,13 @@ TEST(StackMapTest, TestInvokeInfo) {
EXPECT_TRUE(invoke3.IsValid());
EXPECT_EQ(invoke1.GetInvokeType(), kSuper);
EXPECT_EQ(invoke1.GetMethodIndex(method_info), 1u);
- EXPECT_EQ(invoke1.GetNativePcOffset(kRuntimeISA), 4u);
+ EXPECT_EQ(invoke1.GetNativePcOffset(kRuntimeISA), 4u * kPcAlign);
EXPECT_EQ(invoke2.GetInvokeType(), kStatic);
EXPECT_EQ(invoke2.GetMethodIndex(method_info), 3u);
- EXPECT_EQ(invoke2.GetNativePcOffset(kRuntimeISA), 8u);
+ EXPECT_EQ(invoke2.GetNativePcOffset(kRuntimeISA), 8u * kPcAlign);
EXPECT_EQ(invoke3.GetInvokeType(), kDirect);
EXPECT_EQ(invoke3.GetMethodIndex(method_info), 65535u);
- EXPECT_EQ(invoke3.GetNativePcOffset(kRuntimeISA), 16u);
+ EXPECT_EQ(invoke3.GetNativePcOffset(kRuntimeISA), 16u * kPcAlign);
}
} // namespace art
diff --git a/compiler/verifier_deps_test.cc b/compiler/verifier_deps_test.cc
index c0892ff466..3fe2ec0ac0 100644
--- a/compiler/verifier_deps_test.cc
+++ b/compiler/verifier_deps_test.cc
@@ -65,17 +65,16 @@ class VerifierDepsTest : public CommonCompilerTest {
callbacks_.reset(new VerifierDepsCompilerCallbacks());
}
- mirror::Class* FindClassByName(const std::string& name, ScopedObjectAccess* soa)
+ ObjPtr<mirror::Class> FindClassByName(ScopedObjectAccess& soa, const std::string& name)
REQUIRES_SHARED(Locks::mutator_lock_) {
- StackHandleScope<1> hs(Thread::Current());
+ StackHandleScope<1> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader_handle(
- hs.NewHandle(soa->Decode<mirror::ClassLoader>(class_loader_)));
- mirror::Class* klass = class_linker_->FindClass(Thread::Current(),
- name.c_str(),
- class_loader_handle);
+ hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader_)));
+ ObjPtr<mirror::Class> klass =
+ class_linker_->FindClass(soa.Self(), name.c_str(), class_loader_handle);
if (klass == nullptr) {
- DCHECK(Thread::Current()->IsExceptionPending());
- Thread::Current()->ClearException();
+ DCHECK(soa.Self()->IsExceptionPending());
+ soa.Self()->ClearException();
}
return klass;
}
@@ -114,16 +113,16 @@ class VerifierDepsTest : public CommonCompilerTest {
callbacks->SetVerifierDeps(verifier_deps_.get());
}
- void LoadDexFile(ScopedObjectAccess* soa, const char* name1, const char* name2 = nullptr)
+ void LoadDexFile(ScopedObjectAccess& soa, const char* name1, const char* name2 = nullptr)
REQUIRES_SHARED(Locks::mutator_lock_) {
class_loader_ = (name2 == nullptr) ? LoadDex(name1) : LoadMultiDex(name1, name2);
dex_files_ = GetDexFiles(class_loader_);
primary_dex_file_ = dex_files_.front();
SetVerifierDeps(dex_files_);
- StackHandleScope<1> hs(soa->Self());
+ StackHandleScope<1> hs(soa.Self());
Handle<mirror::ClassLoader> loader =
- hs.NewHandle(soa->Decode<mirror::ClassLoader>(class_loader_));
+ hs.NewHandle(soa.Decode<mirror::ClassLoader>(class_loader_));
for (const DexFile* dex_file : dex_files_) {
class_linker_->RegisterDexFile(*dex_file, loader.Get());
}
@@ -133,16 +132,16 @@ class VerifierDepsTest : public CommonCompilerTest {
compiler_driver_->SetDexFilesForOatFile(dex_files_);
}
- void LoadDexFile(ScopedObjectAccess* soa) REQUIRES_SHARED(Locks::mutator_lock_) {
+ void LoadDexFile(ScopedObjectAccess& soa) REQUIRES_SHARED(Locks::mutator_lock_) {
LoadDexFile(soa, "VerifierDeps");
CHECK_EQ(dex_files_.size(), 1u);
- klass_Main_ = FindClassByName("LMain;", soa);
+ klass_Main_ = FindClassByName(soa, "LMain;");
CHECK(klass_Main_ != nullptr);
}
bool VerifyMethod(const std::string& method_name) {
ScopedObjectAccess soa(Thread::Current());
- LoadDexFile(&soa);
+ LoadDexFile(soa);
StackHandleScope<2> hs(soa.Self());
Handle<mirror::ClassLoader> class_loader_handle(
@@ -193,7 +192,7 @@ class VerifierDepsTest : public CommonCompilerTest {
void VerifyDexFile(const char* multidex = nullptr) {
{
ScopedObjectAccess soa(Thread::Current());
- LoadDexFile(&soa, "VerifierDeps", multidex);
+ LoadDexFile(soa, "VerifierDeps", multidex);
}
SetupCompilerDriver();
VerifyWithCompilerDriver(/* verifier_deps */ nullptr);
@@ -204,13 +203,14 @@ class VerifierDepsTest : public CommonCompilerTest {
bool is_strict,
bool is_assignable) {
ScopedObjectAccess soa(Thread::Current());
- LoadDexFile(&soa);
- mirror::Class* klass_dst = FindClassByName(dst, &soa);
+ LoadDexFile(soa);
+ StackHandleScope<1> hs(soa.Self());
+ Handle<mirror::Class> klass_dst = hs.NewHandle(FindClassByName(soa, dst));
DCHECK(klass_dst != nullptr) << dst;
- mirror::Class* klass_src = FindClassByName(src, &soa);
+ ObjPtr<mirror::Class> klass_src = FindClassByName(soa, src);
DCHECK(klass_src != nullptr) << src;
verifier_deps_->AddAssignability(*primary_dex_file_,
- klass_dst,
+ klass_dst.Get(),
klass_src,
is_strict,
is_assignable);
@@ -453,12 +453,12 @@ class VerifierDepsTest : public CommonCompilerTest {
std::vector<const DexFile*> dex_files_;
const DexFile* primary_dex_file_;
jobject class_loader_;
- mirror::Class* klass_Main_;
+ ObjPtr<mirror::Class> klass_Main_;
};
TEST_F(VerifierDepsTest, StringToId) {
ScopedObjectAccess soa(Thread::Current());
- LoadDexFile(&soa);
+ LoadDexFile(soa);
dex::StringIndex id_Main1 = verifier_deps_->GetIdFromString(*primary_dex_file_, "LMain;");
ASSERT_LT(id_Main1.index_, primary_dex_file_->NumStringIds());
@@ -1441,7 +1441,7 @@ TEST_F(VerifierDepsTest, CompilerDriver) {
for (bool verify_failure : { false, true }) {
{
ScopedObjectAccess soa(Thread::Current());
- LoadDexFile(&soa, "VerifierDeps", multi);
+ LoadDexFile(soa, "VerifierDeps", multi);
}
VerifyWithCompilerDriver(/* verifier_deps */ nullptr);
@@ -1450,7 +1450,7 @@ TEST_F(VerifierDepsTest, CompilerDriver) {
{
ScopedObjectAccess soa(Thread::Current());
- LoadDexFile(&soa, "VerifierDeps", multi);
+ LoadDexFile(soa, "VerifierDeps", multi);
}
verifier::VerifierDeps decoded_deps(dex_files_, ArrayRef<const uint8_t>(buffer));
if (verify_failure) {