summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Mathieu Chartier <mathieuc@google.com> 2017-02-16 02:03:30 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2017-02-16 02:03:30 +0000
commit9fb10fb39bcb3d9a4dc7e16f8c1d38dcc112639c (patch)
tree031b225f354460292f43210960ff200067123ab2
parent28de3b69db7f79852b93dc1c94c7402ba2e99e0b (diff)
parentd776ff08e07494327716f0d2ea1a774b2ebfbca9 (diff)
Merge "Add invoke infos to stack maps"
-rw-r--r--compiler/optimizing/code_generator.cc17
-rw-r--r--compiler/optimizing/stack_map_stream.cc45
-rw-r--r--compiler/optimizing/stack_map_stream.h12
-rw-r--r--compiler/optimizing/stack_map_test.cc45
-rw-r--r--oatdump/oatdump.cc13
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc154
-rw-r--r--runtime/oat.h2
-rw-r--r--runtime/stack_map.h107
8 files changed, 334 insertions, 61 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index bac16cd5df..8dd423fcbb 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -58,6 +58,9 @@
namespace art {
+// If true, we record the static and direct invokes in the invoke infos.
+static constexpr bool kEnableDexLayoutOptimizations = false;
+
// Return whether a location is consistent with a type.
static bool CheckType(Primitive::Type type, Location location) {
if (location.IsFpuRegister()
@@ -801,7 +804,18 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction,
outer_environment_size,
inlining_depth);
- EmitEnvironment(instruction->GetEnvironment(), slow_path);
+ HEnvironment* const environment = instruction->GetEnvironment();
+ EmitEnvironment(environment, slow_path);
+ // Record invoke info, the common case for the trampoline is super and static invokes. Only
+ // record these to reduce oat file size.
+ if (kEnableDexLayoutOptimizations) {
+ if (environment != nullptr &&
+ instruction->IsInvoke() &&
+ instruction->IsInvokeStaticOrDirect()) {
+ HInvoke* const invoke = instruction->AsInvoke();
+ stack_map_stream_.AddInvoke(invoke->GetInvokeType(), invoke->GetDexMethodIndex());
+ }
+ }
stack_map_stream_.EndStackMapEntry();
HLoopInformation* info = instruction->GetBlock()->GetLoopInformation();
@@ -818,7 +832,6 @@ void CodeGenerator::RecordPcInfo(HInstruction* instruction,
EmitEnvironment(instruction->GetEnvironment(), slow_path);
stack_map_stream_.EndStackMapEntry();
if (kIsDebugBuild) {
- HEnvironment* environment = instruction->GetEnvironment();
for (size_t i = 0, environment_size = environment->Size(); i < environment_size; ++i) {
HInstruction* in_environment = environment->GetInstructionAt(i);
if (in_environment != nullptr) {
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 1bcc8e1ace..eeae96e6c2 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -41,12 +41,12 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
current_entry_.inlining_depth = inlining_depth;
current_entry_.inline_infos_start_index = inline_infos_.size();
current_entry_.stack_mask_index = 0;
+ current_entry_.dex_method_index = DexFile::kDexNoIndex;
current_entry_.dex_register_entry.num_dex_registers = num_dex_registers;
current_entry_.dex_register_entry.locations_start_index = dex_register_locations_.size();
current_entry_.dex_register_entry.live_dex_registers_mask = (num_dex_registers != 0)
? ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream)
: nullptr;
-
if (sp_mask != nullptr) {
stack_mask_max_ = std::max(stack_mask_max_, sp_mask->GetHighestBitSet());
}
@@ -99,6 +99,11 @@ void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t
current_dex_register_++;
}
+void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
+ current_entry_.invoke_type = invoke_type;
+ current_entry_.dex_method_index = dex_method_index;
+}
+
void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
uint32_t dex_pc,
uint32_t num_dex_registers,
@@ -166,6 +171,7 @@ size_t StackMapStream::PrepareForFillIn() {
encoding.inline_info.num_entries,
encoding.register_mask.num_entries,
encoding.stack_mask.num_entries);
+ ComputeInvokeInfoEncoding(&encoding);
DCHECK_EQ(code_info_encoding_.size(), 0u);
encoding.Compress(&code_info_encoding_);
encoding.ComputeTableOffsets();
@@ -212,6 +218,24 @@ size_t StackMapStream::ComputeDexRegisterMapsSize() const {
return size;
}
+void StackMapStream::ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding) {
+ DCHECK(encoding != nullptr);
+ uint32_t native_pc_max = 0;
+ uint16_t method_index_max = 0;
+ size_t invoke_infos_count = 0;
+ size_t invoke_type_max = 0;
+ for (const StackMapEntry& entry : stack_maps_) {
+ if (entry.dex_method_index != DexFile::kDexNoIndex) {
+ native_pc_max = std::max(native_pc_max, entry.native_pc_code_offset.CompressedValue());
+ method_index_max = std::max(method_index_max, static_cast<uint16_t>(entry.dex_method_index));
+ invoke_type_max = std::max(invoke_type_max, static_cast<size_t>(entry.invoke_type));
+ ++invoke_infos_count;
+ }
+ }
+ encoding->invoke_info.num_entries = invoke_infos_count;
+ encoding->invoke_info.encoding.SetFromSizes(native_pc_max, invoke_type_max, method_index_max);
+}
+
void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
size_t dex_register_maps_bytes) {
uint32_t method_index_max = 0;
@@ -304,6 +328,7 @@ void StackMapStream::FillIn(MemoryRegion region) {
ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false, kArenaAllocStackMapStream);
uintptr_t next_dex_register_map_offset = 0;
uintptr_t next_inline_info_index = 0;
+ size_t invoke_info_idx = 0;
for (size_t i = 0, e = stack_maps_.size(); i < e; ++i) {
StackMap stack_map = code_info.GetStackMapAt(i, encoding);
StackMapEntry entry = stack_maps_[i];
@@ -318,6 +343,14 @@ void StackMapStream::FillIn(MemoryRegion region) {
dex_register_locations_region);
stack_map.SetDexRegisterMapOffset(encoding.stack_map.encoding, offset);
+ if (entry.dex_method_index != DexFile::kDexNoIndex) {
+ InvokeInfo invoke_info(code_info.GetInvokeInfo(encoding, invoke_info_idx));
+ invoke_info.SetNativePcCodeOffset(encoding.invoke_info.encoding, entry.native_pc_code_offset);
+ invoke_info.SetInvokeType(encoding.invoke_info.encoding, entry.invoke_type);
+ invoke_info.SetMethodIndex(encoding.invoke_info.encoding, entry.dex_method_index);
+ ++invoke_info_idx;
+ }
+
// Set the inlining info.
if (entry.inlining_depth != 0) {
InlineInfo inline_info = code_info.GetInlineInfo(next_inline_info_index, encoding);
@@ -528,6 +561,7 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
CodeInfo code_info(region);
CodeInfoEncoding encoding = code_info.ExtractEncoding();
DCHECK_EQ(code_info.GetNumberOfStackMaps(encoding), stack_maps_.size());
+ size_t invoke_info_index = 0;
for (size_t s = 0; s < stack_maps_.size(); ++s) {
const StackMap stack_map = code_info.GetStackMapAt(s, encoding);
const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
@@ -552,7 +586,14 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
DCHECK_EQ(stack_mask.LoadBit(b), 0u);
}
}
-
+ if (entry.dex_method_index != DexFile::kDexNoIndex) {
+ InvokeInfo invoke_info = code_info.GetInvokeInfo(encoding, invoke_info_index);
+ DCHECK_EQ(invoke_info.GetNativePcOffset(encoding.invoke_info.encoding, instruction_set_),
+ entry.native_pc_code_offset.Uint32Value(instruction_set_));
+ DCHECK_EQ(invoke_info.GetInvokeType(encoding.invoke_info.encoding), entry.invoke_type);
+ DCHECK_EQ(invoke_info.GetMethodIndex(encoding.invoke_info.encoding), entry.dex_method_index);
+ invoke_info_index++;
+ }
CheckDexRegisterMap(code_info,
code_info.GetDexRegisterMapOf(
stack_map, encoding, entry.dex_register_entry.num_dex_registers),
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index bba3d51e62..4225a875b9 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -118,6 +118,8 @@ class StackMapStream : public ValueObject {
uint32_t register_mask_index;
DexRegisterMapEntry dex_register_entry;
size_t dex_register_map_index;
+ InvokeType invoke_type;
+ uint32_t dex_method_index;
};
struct InlineInfoEntry {
@@ -138,6 +140,8 @@ class StackMapStream : public ValueObject {
void AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value);
+ void AddInvoke(InvokeType type, uint32_t dex_method_index);
+
void BeginInlineInfoEntry(ArtMethod* method,
uint32_t dex_pc,
uint32_t num_dex_registers,
@@ -184,6 +188,14 @@ class StackMapStream : public ValueObject {
bool DexRegisterMapEntryEquals(const DexRegisterMapEntry& a, const DexRegisterMapEntry& b) const;
// Fill in the corresponding entries of a register map.
+ void ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding);
+
+ // Returns the index of an entry with the same dex register map as the current_entry,
+ // or kNoSameDexMapFound if no such entry exists.
+ size_t FindEntryWithTheSameDexMap();
+ bool HaveTheSameDexMaps(const StackMapEntry& a, const StackMapEntry& b) const;
+
+ // Fill in the corresponding entries of a register map.
void FillInDexRegisterMap(DexRegisterMap dex_register_map,
uint32_t num_dex_registers,
const BitVector& live_dex_registers_mask,
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 041695187b..330f7f28b6 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -934,7 +934,6 @@ TEST(StackMapTest, CodeOffsetTest) {
EXPECT_EQ(offset_mips64.Uint32Value(kMips64), kMips64InstructionAlignment);
}
-
TEST(StackMapTest, TestDeduplicateStackMask) {
ArenaPool pool;
ArenaAllocator arena(&pool);
@@ -963,4 +962,48 @@ TEST(StackMapTest, TestDeduplicateStackMask) {
stack_map2.GetStackMaskIndex(encoding.stack_map.encoding));
}
+TEST(StackMapTest, TestInvokeInfo) {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ StackMapStream stream(&arena, kRuntimeISA);
+
+ ArenaBitVector sp_mask(&arena, 0, true);
+ sp_mask.SetBit(1);
+ stream.BeginStackMapEntry(0, 4, 0x3, &sp_mask, 0, 0);
+ stream.AddInvoke(kSuper, 1);
+ stream.EndStackMapEntry();
+ stream.BeginStackMapEntry(0, 8, 0x3, &sp_mask, 0, 0);
+ stream.AddInvoke(kStatic, 3);
+ stream.EndStackMapEntry();
+ stream.BeginStackMapEntry(0, 16, 0x3, &sp_mask, 0, 0);
+ stream.AddInvoke(kDirect, 65535);
+ stream.EndStackMapEntry();
+
+ const size_t size = stream.PrepareForFillIn();
+ MemoryRegion region(arena.Alloc(size, kArenaAllocMisc), size);
+ stream.FillIn(region);
+
+ CodeInfo code_info(region);
+ CodeInfoEncoding encoding = code_info.ExtractEncoding();
+ ASSERT_EQ(3u, code_info.GetNumberOfStackMaps(encoding));
+
+ InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4, encoding));
+ InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8, encoding));
+ InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16, encoding));
+ InvokeInfo invoke_invalid(code_info.GetInvokeInfoForNativePcOffset(12, encoding));
+ EXPECT_FALSE(invoke_invalid.IsValid()); // No entry for that index.
+ EXPECT_TRUE(invoke1.IsValid());
+ EXPECT_TRUE(invoke2.IsValid());
+ EXPECT_TRUE(invoke3.IsValid());
+ EXPECT_EQ(invoke1.GetInvokeType(encoding.invoke_info.encoding), kSuper);
+ EXPECT_EQ(invoke1.GetMethodIndex(encoding.invoke_info.encoding), 1u);
+ EXPECT_EQ(invoke1.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA), 4u);
+ EXPECT_EQ(invoke2.GetInvokeType(encoding.invoke_info.encoding), kStatic);
+ EXPECT_EQ(invoke2.GetMethodIndex(encoding.invoke_info.encoding), 3u);
+ EXPECT_EQ(invoke2.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA), 8u);
+ EXPECT_EQ(invoke3.GetInvokeType(encoding.invoke_info.encoding), kDirect);
+ EXPECT_EQ(invoke3.GetMethodIndex(encoding.invoke_info.encoding), 65535u);
+ EXPECT_EQ(invoke3.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA), 16u);
+}
+
} // namespace art
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 9a3b28b16e..44132868a1 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -587,6 +587,7 @@ class OatDumper {
kByteKindCodeInfoLocationCatalog,
kByteKindCodeInfoDexRegisterMap,
kByteKindCodeInfoEncoding,
+ kByteKindCodeInfoInvokeInfo,
kByteKindCodeInfoStackMasks,
kByteKindCodeInfoRegisterMasks,
kByteKindStackMapNativePc,
@@ -637,6 +638,7 @@ class OatDumper {
Dump(os, "CodeInfoDexRegisterMap ", bits[kByteKindCodeInfoDexRegisterMap], sum);
Dump(os, "CodeInfoStackMasks ", bits[kByteKindCodeInfoStackMasks], sum);
Dump(os, "CodeInfoRegisterMasks ", bits[kByteKindCodeInfoRegisterMasks], sum);
+ Dump(os, "CodeInfoInvokeInfo ", bits[kByteKindCodeInfoInvokeInfo], sum);
// Stack map section.
const int64_t stack_map_bits = std::accumulate(bits + kByteKindStackMapFirst,
bits + kByteKindStackMapLast + 1,
@@ -1592,10 +1594,8 @@ class OatDumper {
CodeInfoEncoding encoding(helper.GetEncoding());
StackMapEncoding stack_map_encoding(encoding.stack_map.encoding);
const size_t num_stack_maps = encoding.stack_map.num_entries;
- std::vector<uint8_t> size_vector;
- encoding.Compress(&size_vector);
if (stats_.AddBitsIfUnique(Stats::kByteKindCodeInfoEncoding,
- size_vector.size() * kBitsPerByte,
+ encoding.HeaderSize() * kBitsPerByte,
oat_method.GetVmapTable())) {
// Stack maps
stats_.AddBits(
@@ -1627,6 +1627,13 @@ class OatDumper {
Stats::kByteKindCodeInfoRegisterMasks,
encoding.register_mask.encoding.BitSize() * encoding.register_mask.num_entries);
+ // Invoke infos
+ if (encoding.invoke_info.num_entries > 0u) {
+ stats_.AddBits(
+ Stats::kByteKindCodeInfoInvokeInfo,
+ encoding.invoke_info.encoding.BitSize() * encoding.invoke_info.num_entries);
+ }
+
// Location catalog
const size_t location_catalog_bytes =
helper.GetCodeInfo().GetDexRegisterLocationCatalogSize(encoding);
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index c2bca5305d..4c3990aad6 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -358,6 +358,29 @@ class QuickArgumentVisitor {
}
}
+ static bool GetInvokeType(ArtMethod** sp, InvokeType* invoke_type, uint32_t* dex_method_index)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK((*sp)->IsCalleeSaveMethod());
+ const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kSaveRefsAndArgs);
+ ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
+ reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
+ uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
+ const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
+ if (!current_code->IsOptimized()) {
+ return false;
+ }
+ uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
+ CodeInfo code_info = current_code->GetOptimizedCodeInfo();
+ CodeInfoEncoding encoding = code_info.ExtractEncoding();
+ InvokeInfo invoke(code_info.GetInvokeInfoForNativePcOffset(outer_pc_offset, encoding));
+ if (invoke.IsValid()) {
+ *invoke_type = static_cast<InvokeType>(invoke.GetInvokeType(encoding.invoke_info.encoding));
+ *dex_method_index = invoke.GetMethodIndex(encoding.invoke_info.encoding);
+ return true;
+ }
+ return false;
+ }
+
// For the given quick ref and args quick frame, return the caller's PC.
static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
DCHECK((*sp)->IsCalleeSaveMethod());
@@ -977,60 +1000,87 @@ extern "C" const void* artQuickResolutionTrampoline(
ArtMethod* caller = nullptr;
if (!called_method_known_on_entry) {
caller = QuickArgumentVisitor::GetCallingMethod(sp);
- uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
- const DexFile::CodeItem* code;
called_method.dex_file = caller->GetDexFile();
- code = caller->GetCodeItem();
- CHECK_LT(dex_pc, code->insns_size_in_code_units_);
- const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
- Instruction::Code instr_code = instr->Opcode();
- bool is_range;
- switch (instr_code) {
- case Instruction::INVOKE_DIRECT:
- invoke_type = kDirect;
- is_range = false;
- break;
- case Instruction::INVOKE_DIRECT_RANGE:
- invoke_type = kDirect;
- is_range = true;
- break;
- case Instruction::INVOKE_STATIC:
- invoke_type = kStatic;
- is_range = false;
- break;
- case Instruction::INVOKE_STATIC_RANGE:
- invoke_type = kStatic;
- is_range = true;
- break;
- case Instruction::INVOKE_SUPER:
- invoke_type = kSuper;
- is_range = false;
- break;
- case Instruction::INVOKE_SUPER_RANGE:
- invoke_type = kSuper;
- is_range = true;
- break;
- case Instruction::INVOKE_VIRTUAL:
- invoke_type = kVirtual;
- is_range = false;
- break;
- case Instruction::INVOKE_VIRTUAL_RANGE:
- invoke_type = kVirtual;
- is_range = true;
- break;
- case Instruction::INVOKE_INTERFACE:
- invoke_type = kInterface;
- is_range = false;
- break;
- case Instruction::INVOKE_INTERFACE_RANGE:
- invoke_type = kInterface;
- is_range = true;
- break;
- default:
- LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
- UNREACHABLE();
+
+ InvokeType stack_map_invoke_type;
+ uint32_t stack_map_dex_method_idx;
+ const bool found_stack_map = QuickArgumentVisitor::GetInvokeType(sp,
+ &stack_map_invoke_type,
+ &stack_map_dex_method_idx);
+ // For debug builds, we make sure both of the paths are consistent by also looking at the dex
+ // code.
+ if (!found_stack_map || kIsDebugBuild) {
+ uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
+ const DexFile::CodeItem* code;
+ code = caller->GetCodeItem();
+ CHECK_LT(dex_pc, code->insns_size_in_code_units_);
+ const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
+ Instruction::Code instr_code = instr->Opcode();
+ bool is_range;
+ switch (instr_code) {
+ case Instruction::INVOKE_DIRECT:
+ invoke_type = kDirect;
+ is_range = false;
+ break;
+ case Instruction::INVOKE_DIRECT_RANGE:
+ invoke_type = kDirect;
+ is_range = true;
+ break;
+ case Instruction::INVOKE_STATIC:
+ invoke_type = kStatic;
+ is_range = false;
+ break;
+ case Instruction::INVOKE_STATIC_RANGE:
+ invoke_type = kStatic;
+ is_range = true;
+ break;
+ case Instruction::INVOKE_SUPER:
+ invoke_type = kSuper;
+ is_range = false;
+ break;
+ case Instruction::INVOKE_SUPER_RANGE:
+ invoke_type = kSuper;
+ is_range = true;
+ break;
+ case Instruction::INVOKE_VIRTUAL:
+ invoke_type = kVirtual;
+ is_range = false;
+ break;
+ case Instruction::INVOKE_VIRTUAL_RANGE:
+ invoke_type = kVirtual;
+ is_range = true;
+ break;
+ case Instruction::INVOKE_INTERFACE:
+ invoke_type = kInterface;
+ is_range = false;
+ break;
+ case Instruction::INVOKE_INTERFACE_RANGE:
+ invoke_type = kInterface;
+ is_range = true;
+ break;
+ default:
+ LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
+ UNREACHABLE();
+ }
+ called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
+ // Check that the invoke matches what we expected, note that this path only happens for debug
+ // builds.
+ if (found_stack_map) {
+ DCHECK_EQ(stack_map_invoke_type, invoke_type);
+ if (invoke_type != kSuper) {
+ // Super may be sharpened.
+ DCHECK_EQ(stack_map_dex_method_idx, called_method.dex_method_index)
+ << called_method.dex_file->PrettyMethod(stack_map_dex_method_idx) << " "
+ << called_method.dex_file->PrettyMethod(called_method.dex_method_index);
+ }
+ } else {
+ VLOG(oat) << "Accessed dex file for invoke " << invoke_type << " "
+ << called_method.dex_method_index;
+ }
+ } else {
+ invoke_type = stack_map_invoke_type;
+ called_method.dex_method_index = stack_map_dex_method_idx;
}
- called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
} else {
invoke_type = kStatic;
called_method.dex_file = called->GetDexFile();
diff --git a/runtime/oat.h b/runtime/oat.h
index 0f6657b7ed..656b86812e 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,7 +32,7 @@ class InstructionSetFeatures;
class PACKED(4) OatHeader {
public:
static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' };
- static constexpr uint8_t kOatVersion[] = { '1', '1', '2', '\0' }; // Manual bump (Revert^3 hash-based DexCache types; stack maps).
+ static constexpr uint8_t kOatVersion[] = { '1', '1', '3', '\0' }; // Invoke info change.
static constexpr const char* kImageLocationKey = "image-location";
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index f7a64026b7..67f0b5715d 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -1145,6 +1145,94 @@ struct ByteSizedTable {
}
};
+// Format is [native pc, invoke type, method index].
+class InvokeInfoEncoding {
+ public:
+ void SetFromSizes(size_t native_pc_max,
+ size_t invoke_type_max,
+ size_t method_index_max) {
+ total_bit_size_ = 0;
+ DCHECK_EQ(kNativePcBitOffset, total_bit_size_);
+ total_bit_size_ += MinimumBitsToStore(native_pc_max);
+ invoke_type_bit_offset_ = total_bit_size_;
+ total_bit_size_ += MinimumBitsToStore(invoke_type_max);
+ method_index_bit_offset_ = total_bit_size_;
+ total_bit_size_ += MinimumBitsToStore(method_index_max);
+ }
+
+ ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const {
+ return FieldEncoding(kNativePcBitOffset, invoke_type_bit_offset_);
+ }
+
+ ALWAYS_INLINE FieldEncoding GetInvokeTypeEncoding() const {
+ return FieldEncoding(invoke_type_bit_offset_, method_index_bit_offset_);
+ }
+
+ ALWAYS_INLINE FieldEncoding GetMethodIndexEncoding() const {
+ return FieldEncoding(method_index_bit_offset_, total_bit_size_);
+ }
+
+ ALWAYS_INLINE size_t BitSize() const {
+ return total_bit_size_;
+ }
+
+ template<typename Vector>
+ void Encode(Vector* dest) const {
+ static_assert(alignof(InvokeInfoEncoding) == 1, "Should not require alignment");
+ const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
+ dest->insert(dest->end(), ptr, ptr + sizeof(*this));
+ }
+
+ void Decode(const uint8_t** ptr) {
+ *this = *reinterpret_cast<const InvokeInfoEncoding*>(*ptr);
+ *ptr += sizeof(*this);
+ }
+
+ private:
+ static constexpr uint8_t kNativePcBitOffset = 0;
+ uint8_t invoke_type_bit_offset_;
+ uint8_t method_index_bit_offset_;
+ uint8_t total_bit_size_;
+};
+
+class InvokeInfo {
+ public:
+ explicit InvokeInfo(BitMemoryRegion region) : region_(region) {}
+
+ ALWAYS_INLINE uint32_t GetNativePcOffset(const InvokeInfoEncoding& encoding,
+ InstructionSet instruction_set) const {
+ CodeOffset offset(
+ CodeOffset::FromCompressedOffset(encoding.GetNativePcEncoding().Load(region_)));
+ return offset.Uint32Value(instruction_set);
+ }
+
+ ALWAYS_INLINE void SetNativePcCodeOffset(const InvokeInfoEncoding& encoding,
+ CodeOffset native_pc_offset) {
+ encoding.GetNativePcEncoding().Store(region_, native_pc_offset.CompressedValue());
+ }
+
+ ALWAYS_INLINE uint32_t GetInvokeType(const InvokeInfoEncoding& encoding) const {
+ return encoding.GetInvokeTypeEncoding().Load(region_);
+ }
+
+ ALWAYS_INLINE void SetInvokeType(const InvokeInfoEncoding& encoding, uint32_t invoke_type) {
+ encoding.GetInvokeTypeEncoding().Store(region_, invoke_type);
+ }
+
+ ALWAYS_INLINE uint32_t GetMethodIndex(const InvokeInfoEncoding& encoding) const {
+ return encoding.GetMethodIndexEncoding().Load(region_);
+ }
+
+ ALWAYS_INLINE void SetMethodIndex(const InvokeInfoEncoding& encoding, uint32_t method_index) {
+ encoding.GetMethodIndexEncoding().Store(region_, method_index);
+ }
+
+ bool IsValid() const { return region_.pointer() != nullptr; }
+
+ private:
+ BitMemoryRegion region_;
+};
+
// Most of the fields are encoded as ULEB128 to save space.
struct CodeInfoEncoding {
static constexpr uint32_t kInvalidSize = static_cast<size_t>(-1);
@@ -1154,6 +1242,7 @@ struct CodeInfoEncoding {
BitEncodingTable<StackMapEncoding> stack_map;
BitEncodingTable<BitRegionEncoding> register_mask;
BitEncodingTable<BitRegionEncoding> stack_mask;
+ BitEncodingTable<InvokeInfoEncoding> invoke_info;
BitEncodingTable<InlineInfoEncoding> inline_info;
CodeInfoEncoding() {}
@@ -1165,6 +1254,7 @@ struct CodeInfoEncoding {
stack_map.Decode(&ptr);
register_mask.Decode(&ptr);
stack_mask.Decode(&ptr);
+ invoke_info.Decode(&ptr);
if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
inline_info.Decode(&ptr);
} else {
@@ -1183,6 +1273,7 @@ struct CodeInfoEncoding {
stack_map.Encode(dest);
register_mask.Encode(dest);
stack_mask.Encode(dest);
+ invoke_info.Encode(dest);
if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
inline_info.Encode(dest);
}
@@ -1199,6 +1290,7 @@ struct CodeInfoEncoding {
stack_map.UpdateBitOffset(&bit_offset);
register_mask.UpdateBitOffset(&bit_offset);
stack_mask.UpdateBitOffset(&bit_offset);
+ invoke_info.UpdateBitOffset(&bit_offset);
inline_info.UpdateBitOffset(&bit_offset);
cache_non_header_size = RoundUp(bit_offset, kBitsPerByte) / kBitsPerByte - HeaderSize();
}
@@ -1303,6 +1395,10 @@ class CodeInfo {
return encoding.stack_map.encoding.BitSize() * GetNumberOfStackMaps(encoding);
}
+ InvokeInfo GetInvokeInfo(const CodeInfoEncoding& encoding, size_t index) const {
+ return InvokeInfo(encoding.invoke_info.BitRegion(region_, index));
+ }
+
DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
const CodeInfoEncoding& encoding,
size_t number_of_dex_registers) const {
@@ -1426,6 +1522,17 @@ class CodeInfo {
return StackMap();
}
+ InvokeInfo GetInvokeInfoForNativePcOffset(uint32_t native_pc_offset,
+ const CodeInfoEncoding& encoding) {
+ for (size_t index = 0; index < encoding.invoke_info.num_entries; index++) {
+ InvokeInfo item = GetInvokeInfo(encoding, index);
+ if (item.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA) == native_pc_offset) {
+ return item;
+ }
+ }
+ return InvokeInfo(BitMemoryRegion());
+ }
+
// Dump this CodeInfo object on `os`. `code_offset` is the (absolute)
// native PC of the compiled method and `number_of_dex_registers` the
// number of Dex virtual registers used in this method. If