Add method frame info to CodeInfo.
The stored information will be used in follow-up CLs.
This temporarily increases .oat file size by 0.7%.
Test: test-art-host-gtest
Change-Id: Ie7d898b06398ae44287bb1e8153861ab112a216c
diff --git a/compiler/exception_test.cc b/compiler/exception_test.cc
index 8235be9..b56a991 100644
--- a/compiler/exception_test.cc
+++ b/compiler/exception_test.cc
@@ -77,13 +77,10 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stack_maps(&allocator, kRuntimeISA);
- stack_maps.BeginStackMapEntry(kDexPc,
- native_pc_offset,
- /* register_mask */ 0u,
- /* sp_mask */ nullptr,
- /* num_dex_registers */ 0u,
- /* inlining_depth */ 0u);
+ stack_maps.BeginMethod(4 * sizeof(void*), 0u, 0u, 0u);
+ stack_maps.BeginStackMapEntry(kDexPc, native_pc_offset);
stack_maps.EndStackMapEntry();
+ stack_maps.EndMethod();
const size_t stack_maps_size = stack_maps.PrepareForFillIn();
const size_t header_size = sizeof(OatQuickMethodHeader);
const size_t code_alignment = GetInstructionSetAlignment(kRuntimeISA);
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 2589869..21b2226 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -393,6 +393,11 @@
HGraphVisitor* instruction_visitor = GetInstructionVisitor();
DCHECK_EQ(current_block_index_, 0u);
+ GetStackMapStream()->BeginMethod(HasEmptyFrame() ? 0 : frame_size_,
+ core_spill_mask_,
+ fpu_spill_mask_,
+ GetGraph()->GetNumberOfVRegs());
+
size_t frame_start = GetAssembler()->CodeSize();
GenerateFrameEntry();
DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_));
@@ -435,6 +440,8 @@
// Finalize instructions in assember;
Finalize(allocator);
+
+ GetStackMapStream()->EndMethod();
}
void CodeGenerator::Finalize(CodeAllocator* allocator) {
@@ -1087,7 +1094,7 @@
if (instruction == nullptr) {
// For stack overflow checks and native-debug-info entries without dex register
// mapping (i.e. start of basic block or start of slow path).
- stack_map_stream->BeginStackMapEntry(dex_pc, native_pc, 0, 0, 0, 0);
+ stack_map_stream->BeginStackMapEntry(dex_pc, native_pc);
stack_map_stream->EndStackMapEntry();
return;
}
@@ -1135,8 +1142,6 @@
native_pc,
register_mask,
locations->GetStackMask(),
- outer_environment_size,
- inlining_depth,
kind);
EmitEnvironment(environment, slow_path);
// Record invoke info, the common case for the trampoline is super and static invokes. Only
@@ -1204,15 +1209,12 @@
uint32_t dex_pc = block->GetDexPc();
uint32_t num_vregs = graph_->GetNumberOfVRegs();
- uint32_t inlining_depth = 0; // Inlining of catch blocks is not supported at the moment.
uint32_t native_pc = GetAddressOf(block);
stack_map_stream->BeginStackMapEntry(dex_pc,
native_pc,
/* register_mask */ 0,
/* stack_mask */ nullptr,
- num_vregs,
- inlining_depth,
StackMap::Kind::Catch);
HInstruction* current_phi = block->GetFirstPhi();
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index 57f47af..cb98805 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -39,22 +39,33 @@
StackMap::PackNativePc(native_pc_offset, instruction_set_);
}
+void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
+ size_t core_spill_mask,
+ size_t fp_spill_mask,
+ uint32_t num_dex_registers) {
+ DCHECK(!in_method_) << "Mismatched Begin/End calls";
+ in_method_ = true;
+ DCHECK_EQ(frame_size_in_bytes_, 0u) << "BeginMethod was already called";
+
+ frame_size_in_bytes_ = frame_size_in_bytes;
+ core_spill_mask_ = core_spill_mask;
+ fp_spill_mask_ = fp_spill_mask;
+ num_dex_registers_ = num_dex_registers;
+}
+
+void StackMapStream::EndMethod() {
+ DCHECK(in_method_) << "Mismatched Begin/End calls";
+ in_method_ = false;
+}
+
void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
uint32_t native_pc_offset,
uint32_t register_mask,
BitVector* stack_mask,
- uint32_t num_dex_registers,
- uint8_t inlining_depth,
StackMap::Kind kind) {
+ DCHECK(in_method_) << "Call BeginMethod first";
DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
in_stack_map_ = true;
- // num_dex_registers_ is the constant per-method number of registers.
- // However we initially don't know what the value is, so lazily initialize it.
- if (num_dex_registers_ == 0) {
- num_dex_registers_ = num_dex_registers;
- } else if (num_dex_registers > 0) {
- DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count";
- }
current_stack_map_ = BitTableBuilder<StackMap>::Entry();
current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
@@ -84,7 +95,7 @@
lazy_stack_masks_.push_back(stack_mask);
current_inline_infos_.clear();
current_dex_registers_.clear();
- expected_num_dex_registers_ = num_dex_registers;
+ expected_num_dex_registers_ = num_dex_registers_;
if (kVerifyStackMaps) {
size_t stack_map_index = stack_maps_.size();
@@ -109,8 +120,6 @@
for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
}
- CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
- CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth);
});
}
}
@@ -118,9 +127,9 @@
void StackMapStream::EndStackMapEntry() {
DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
in_stack_map_ = false;
- DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
// Generate index into the InlineInfo table.
+ size_t inlining_depth = current_inline_infos_.size();
if (!current_inline_infos_.empty()) {
current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
current_stack_map_[StackMap::kInlineInfoIndex] =
@@ -128,9 +137,23 @@
}
// Generate delta-compressed dex register map.
- CreateDexRegisterMap();
+ size_t num_dex_registers = current_dex_registers_.size();
+ if (!current_dex_registers_.empty()) {
+ DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
+ CreateDexRegisterMap();
+ }
stack_maps_.Add(current_stack_map_);
+
+ if (kVerifyStackMaps) {
+ size_t stack_map_index = stack_maps_.size() - 1;
+ dchecks_.emplace_back([=](const CodeInfo& code_info) {
+ StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
+ CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
+ CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
+ CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth);
+ });
+ }
}
void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
@@ -157,6 +180,7 @@
uint32_t dex_pc,
uint32_t num_dex_registers,
const DexFile* outer_dex_file) {
+ DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
in_inline_info_ = true;
DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
@@ -301,7 +325,11 @@
}
}
- BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&out_);
+ EncodeUnsignedLeb128(&out_, frame_size_in_bytes_);
+ EncodeUnsignedLeb128(&out_, core_spill_mask_);
+ EncodeUnsignedLeb128(&out_, fp_spill_mask_);
+ EncodeUnsignedLeb128(&out_, num_dex_registers_);
+ BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&out_, out_.size() * kBitsPerByte);
stack_maps_.Encode(out);
register_masks_.Encode(out);
stack_masks_.Encode(out);
@@ -310,7 +338,6 @@
dex_register_masks_.Encode(out);
dex_register_maps_.Encode(out);
dex_register_catalog_.Encode(out);
- EncodeVarintBits(out, num_dex_registers_);
return UnsignedLeb128Size(out_.size()) + out_.size();
}
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 7d1820d..ed865b1 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -50,8 +50,6 @@
out_(allocator->Adapter(kArenaAllocStackMapStream)),
method_infos_(allocator),
lazy_stack_masks_(allocator->Adapter(kArenaAllocStackMapStream)),
- in_stack_map_(false),
- in_inline_info_(false),
current_stack_map_(),
current_inline_infos_(allocator->Adapter(kArenaAllocStackMapStream)),
current_dex_registers_(allocator->Adapter(kArenaAllocStackMapStream)),
@@ -61,12 +59,16 @@
temp_dex_register_map_(allocator->Adapter(kArenaAllocStackMapStream)) {
}
+ void BeginMethod(size_t frame_size_in_bytes,
+ size_t core_spill_mask,
+ size_t fp_spill_mask,
+ uint32_t num_dex_registers);
+ void EndMethod();
+
void BeginStackMapEntry(uint32_t dex_pc,
uint32_t native_pc_offset,
- uint32_t register_mask,
- BitVector* sp_mask,
- uint32_t num_dex_registers,
- uint8_t inlining_depth,
+ uint32_t register_mask = 0,
+ BitVector* sp_mask = nullptr,
StackMap::Kind kind = StackMap::Kind::Default);
void EndStackMapEntry();
@@ -103,6 +105,10 @@
void CreateDexRegisterMap();
const InstructionSet instruction_set_;
+ uint32_t frame_size_in_bytes_ = 0;
+ uint32_t core_spill_mask_ = 0;
+ uint32_t fp_spill_mask_ = 0;
+ uint32_t num_dex_registers_ = 0;
BitTableBuilder<StackMap> stack_maps_;
BitTableBuilder<RegisterMask> register_masks_;
BitmapTableBuilder stack_masks_;
@@ -111,7 +117,6 @@
BitmapTableBuilder dex_register_masks_;
BitTableBuilder<MaskInfo> dex_register_maps_;
BitTableBuilder<DexRegisterInfo> dex_register_catalog_;
- uint32_t num_dex_registers_ = 0; // TODO: Make this const and get the value in constructor.
ScopedArenaVector<uint8_t> out_;
BitTableBuilderBase<1> method_infos_;
@@ -119,8 +124,9 @@
ScopedArenaVector<BitVector*> lazy_stack_masks_;
// Variables which track the current state between Begin/End calls;
- bool in_stack_map_;
- bool in_inline_info_;
+ bool in_method_ = false;
+ bool in_stack_map_ = false;
+ bool in_inline_info_ = false;
BitTableBuilder<StackMap>::Entry current_stack_map_;
ScopedArenaVector<BitTableBuilder<InlineInfo>::Entry> current_inline_infos_;
ScopedArenaVector<DexRegisterLocation> current_dex_registers_;
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index fd85667..6241e0c 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -52,14 +52,16 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 2);
ArenaBitVector sp_mask(&allocator, 0, false);
size_t number_of_dex_registers = 2;
- stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Short location.
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -107,6 +109,7 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 2);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -114,7 +117,7 @@
sp_mask1.SetBit(4);
size_t number_of_dex_registers = 2;
size_t number_of_dex_registers_in_inline_info = 0;
- stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 2);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
@@ -126,7 +129,7 @@
ArenaBitVector sp_mask2(&allocator, 0, true);
sp_mask2.SetBit(3);
sp_mask2.SetBit(8);
- stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2);
stream.AddDexRegisterEntry(Kind::kInRegister, 18); // Short location.
stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location.
stream.EndStackMapEntry();
@@ -134,7 +137,7 @@
ArenaBitVector sp_mask3(&allocator, 0, true);
sp_mask3.SetBit(1);
sp_mask3.SetBit(5);
- stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3);
stream.AddDexRegisterEntry(Kind::kInRegister, 6); // Short location.
stream.AddDexRegisterEntry(Kind::kInRegisterHigh, 8); // Short location.
stream.EndStackMapEntry();
@@ -142,11 +145,12 @@
ArenaBitVector sp_mask4(&allocator, 0, true);
sp_mask4.SetBit(6);
sp_mask4.SetBit(7);
- stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4);
stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3); // Short location, same in stack map 2.
stream.AddDexRegisterEntry(Kind::kInFpuRegisterHigh, 1); // Short location.
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -303,6 +307,7 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 2);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -310,7 +315,7 @@
sp_mask1.SetBit(4);
const size_t number_of_dex_registers = 2;
const size_t number_of_dex_registers_in_inline_info = 2;
- stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 1);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
@@ -319,6 +324,7 @@
stream.EndInlineInfoEntry();
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -367,14 +373,16 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 2);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
- stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
stream.AddDexRegisterEntry(Kind::kNone, 0); // No location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -416,25 +424,27 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 2);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
// First stack map.
- stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
// Second stack map, which should share the same dex register map.
- stream.BeginStackMapEntry(0, 65 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 65 * kPcAlign, 0x3, &sp_mask);
stream.AddDexRegisterEntry(Kind::kInRegister, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
// Third stack map (doesn't share the dex register map).
- stream.BeginStackMapEntry(0, 66 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 66 * kPcAlign, 0x3, &sp_mask);
stream.AddDexRegisterEntry(Kind::kInRegister, 2); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -473,17 +483,19 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 1);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 0;
- stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
stream.EndStackMapEntry();
number_of_dex_registers = 1;
- stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask, number_of_dex_registers, 0);
+ stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask);
stream.AddDexRegisterEntry(Kind::kNone, 0);
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -521,6 +533,7 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 2);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -528,7 +541,7 @@
sp_mask1.SetBit(4);
// First stack map.
- stream.BeginStackMapEntry(0, 10 * kPcAlign, 0x3, &sp_mask1, 2, 2);
+ stream.BeginStackMapEntry(0, 10 * kPcAlign, 0x3, &sp_mask1);
stream.AddDexRegisterEntry(Kind::kInStack, 0);
stream.AddDexRegisterEntry(Kind::kConstant, 4);
@@ -544,7 +557,7 @@
stream.EndStackMapEntry();
// Second stack map.
- stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1, 2, 3);
+ stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1);
stream.AddDexRegisterEntry(Kind::kInStack, 56);
stream.AddDexRegisterEntry(Kind::kConstant, 0);
@@ -562,13 +575,13 @@
stream.EndStackMapEntry();
// Third stack map.
- stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1, 2, 0);
+ stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1);
stream.AddDexRegisterEntry(Kind::kNone, 0);
stream.AddDexRegisterEntry(Kind::kConstant, 4);
stream.EndStackMapEntry();
// Fourth stack map.
- stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1, 2, 3);
+ stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1);
stream.AddDexRegisterEntry(Kind::kInStack, 56);
stream.AddDexRegisterEntry(Kind::kConstant, 0);
@@ -584,6 +597,7 @@
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -725,15 +739,17 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 0);
ArenaBitVector sp_mask(&allocator, 0, true);
sp_mask.SetBit(1);
sp_mask.SetBit(4);
- stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask);
stream.EndStackMapEntry();
- stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask);
stream.EndStackMapEntry();
+ stream.EndMethod();
size_t size = stream.PrepareForFillIn();
void* memory = allocator.Alloc(size, kArenaAllocMisc);
MemoryRegion region(memory, size);
@@ -753,19 +769,21 @@
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
+ stream.BeginMethod(32, 0, 0, 0);
ArenaBitVector sp_mask(&allocator, 0, true);
sp_mask.SetBit(1);
- stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask);
stream.AddInvoke(kSuper, 1);
stream.EndStackMapEntry();
- stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask);
stream.AddInvoke(kStatic, 3);
stream.EndStackMapEntry();
- stream.BeginStackMapEntry(0, 16 * kPcAlign, 0x3, &sp_mask, 0, 0);
+ stream.BeginStackMapEntry(0, 16 * kPcAlign, 0x3, &sp_mask);
stream.AddInvoke(kDirect, 65535);
stream.EndStackMapEntry();
+ stream.EndMethod();
const size_t code_info_size = stream.PrepareForFillIn();
MemoryRegion code_info_region(allocator.Alloc(code_info_size, kArenaAllocMisc), code_info_size);
stream.FillInCodeInfo(code_info_region);
diff --git a/runtime/oat.h b/runtime/oat.h
index 22c6a39..02fad46 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,8 +32,8 @@
class PACKED(4) OatHeader {
public:
static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' };
- // Last oat version changed reason: Add Kind column to stack maps.
- static constexpr uint8_t kOatVersion[] = { '1', '4', '9', '\0' };
+ // Last oat version changed reason: Add method frame info to CodeInfo.
+ static constexpr uint8_t kOatVersion[] = { '1', '5', '0', '\0' };
static constexpr const char* kImageLocationKey = "image-location";
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 7f7f6fc..42d2478 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -22,14 +22,24 @@
#include "art_method.h"
#include "base/indenter.h"
#include "base/stats.h"
+#include "oat_quick_method_header.h"
#include "scoped_thread_state_change-inl.h"
namespace art {
+CodeInfo::CodeInfo(const OatQuickMethodHeader* header)
+ : CodeInfo(header->GetOptimizedCodeInfoPtr()) {
+}
+
void CodeInfo::Decode(const uint8_t* data) {
size_t non_header_size = DecodeUnsignedLeb128(&data);
size_ = UnsignedLeb128Size(non_header_size) + non_header_size;
- MemoryRegion region(const_cast<uint8_t*>(data), non_header_size);
+ const uint8_t* end = data + non_header_size;
+ frame_size_in_bytes_ = DecodeUnsignedLeb128(&data);
+ core_spill_mask_ = DecodeUnsignedLeb128(&data);
+ fp_spill_mask_ = DecodeUnsignedLeb128(&data);
+ number_of_dex_registers_ = DecodeUnsignedLeb128(&data);
+ MemoryRegion region(const_cast<uint8_t*>(data), end - data);
BitMemoryReader reader(BitMemoryRegion(region), /* bit_offset */ 0);
stack_maps_.Decode(reader);
register_masks_.Decode(reader);
@@ -39,8 +49,7 @@
dex_register_masks_.Decode(reader);
dex_register_maps_.Decode(reader);
dex_register_catalog_.Decode(reader);
- number_of_dex_registers_ = DecodeVarintBits(reader);
- CHECK_EQ(non_header_size, BitsToBytesRoundUp(reader.GetBitOffset())) << "Invalid CodeInfo";
+ CHECK_EQ(BitsToBytesRoundUp(reader.GetBitOffset()), region.size()) << "Invalid CodeInfo";
}
BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const {
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 83f0c05..cb43ced 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -19,6 +19,7 @@
#include <limits>
+#include "arch/instruction_set.h"
#include "base/bit_memory_region.h"
#include "base/bit_table.h"
#include "base/bit_utils.h"
@@ -28,10 +29,11 @@
#include "dex/dex_file_types.h"
#include "dex_register_location.h"
#include "method_info.h"
-#include "oat_quick_method_header.h"
+#include "quick/quick_method_frame_info.h"
namespace art {
+class OatQuickMethodHeader;
class VariableIndentationOutputStream;
// Size of a frame slot, in bytes. This constant is a signed value,
@@ -290,9 +292,7 @@
DCHECK_EQ(size_, region.size());
}
- explicit CodeInfo(const OatQuickMethodHeader* header)
- : CodeInfo(header->GetOptimizedCodeInfoPtr()) {
- }
+ explicit CodeInfo(const OatQuickMethodHeader* header);
size_t Size() const {
return size_;
@@ -435,6 +435,14 @@
// Accumulate code info size statistics into the given Stats tree.
void AddSizeStats(/*out*/ Stats* parent) const;
+ ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* data) {
+ DecodeUnsignedLeb128(&data);
+ return QuickMethodFrameInfo(
+ DecodeUnsignedLeb128(&data),
+ DecodeUnsignedLeb128(&data),
+ DecodeUnsignedLeb128(&data));
+ }
+
private:
// Returns lower bound (fist stack map which has pc greater or equal than the desired one).
// It ignores catch stack maps at the end (it is the same as if they had maximum pc value).
@@ -448,6 +456,10 @@
void Decode(const uint8_t* data);
size_t size_;
+ uint32_t frame_size_in_bytes_;
+ uint32_t core_spill_mask_;
+ uint32_t fp_spill_mask_;
+ uint32_t number_of_dex_registers_;
BitTable<StackMap> stack_maps_;
BitTable<RegisterMask> register_masks_;
BitTable<MaskInfo> stack_masks_;
@@ -456,7 +468,6 @@
BitTable<MaskInfo> dex_register_masks_;
BitTable<DexRegisterMapInfo> dex_register_maps_;
BitTable<DexRegisterInfo> dex_register_catalog_;
- uint32_t number_of_dex_registers_; // Excludes any inlined methods.
};
#undef ELEMENT_BYTE_OFFSET_AFTER