summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Mythri Alle <mythria@google.com> 2022-07-13 07:17:56 +0000
committer Mythri Alle <mythria@google.com> 2022-07-13 09:25:08 +0000
commit26aef1213dbdd7ab03688d898cf802c8c8d7e610 (patch)
tree62c107594123219d845a6730b4781706682ed7b0 /compiler/optimizing
parent4ec05bb85ba1107c8295a295eec7e70bace0d047 (diff)
Revert "Introduce a flag to check if JITed code has instrumentation support"
This reverts commit fc067a360d14db5f84fd4b58e0dee6cb04ee759b. Reason for revert: test failures on jit-on-first-use: https://android-build.googleplex.com/builds/submitted/8821659/art-jit-on-first-use/latest/view/logs/build_error.log Change-Id: Ie9bc243baac777ecc4f47cc961494ca6ab3ef4c6
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator.cc3
-rw-r--r--compiler/optimizing/optimizing_compiler.cc29
-rw-r--r--compiler/optimizing/stack_map_stream.cc5
-rw-r--r--compiler/optimizing/stack_map_stream.h4
-rw-r--r--compiler/optimizing/stack_map_test.cc56
5 files changed, 22 insertions, 75 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index d8fc3ba690..8bd4406332 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -389,8 +389,7 @@ void CodeGenerator::Compile(CodeAllocator* allocator) {
core_spill_mask_,
fpu_spill_mask_,
GetGraph()->GetNumberOfVRegs(),
- GetGraph()->IsCompilingBaseline(),
- GetGraph()->IsDebuggable());
+ GetGraph()->IsCompilingBaseline());
size_t frame_start = GetAssembler()->CodeSize();
GenerateFrameEntry();
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index b0fa251f55..a499c55757 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -1115,18 +1115,17 @@ CompiledMethod* OptimizingCompiler::Compile(const dex::CodeItem* code_item,
static ScopedArenaVector<uint8_t> CreateJniStackMap(ScopedArenaAllocator* allocator,
const JniCompiledMethod& jni_compiled_method,
- size_t code_size,
- bool debuggable) {
+ size_t code_size) {
// StackMapStream is quite large, so allocate it using the ScopedArenaAllocator
// to stay clear of the frame size limit.
std::unique_ptr<StackMapStream> stack_map_stream(
new (allocator) StackMapStream(allocator, jni_compiled_method.GetInstructionSet()));
- stack_map_stream->BeginMethod(jni_compiled_method.GetFrameSize(),
- jni_compiled_method.GetCoreSpillMask(),
- jni_compiled_method.GetFpSpillMask(),
- /* num_dex_registers= */ 0,
- /* baseline= */ false,
- debuggable);
+ stack_map_stream->BeginMethod(
+ jni_compiled_method.GetFrameSize(),
+ jni_compiled_method.GetCoreSpillMask(),
+ jni_compiled_method.GetFpSpillMask(),
+ /* num_dex_registers= */ 0,
+ /* baseline= */ false);
stack_map_stream->EndMethod(code_size);
return stack_map_stream->Encode();
}
@@ -1188,11 +1187,8 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
- ScopedArenaVector<uint8_t> stack_map =
- CreateJniStackMap(&stack_map_allocator,
- jni_compiled_method,
- jni_compiled_method.GetCode().size(),
- compiler_options.GetDebuggable() && compiler_options.IsJitCompiler());
+ ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap(
+ &stack_map_allocator, jni_compiled_method, jni_compiled_method.GetCode().size());
return CompiledMethod::SwapAllocCompiledMethod(
GetCompiledMethodStorage(),
jni_compiled_method.GetInstructionSet(),
@@ -1253,11 +1249,8 @@ bool OptimizingCompiler::JitCompile(Thread* self,
ArenaStack arena_stack(runtime->GetJitArenaPool());
// StackMapStream is large and it does not fit into this frame, so we need helper method.
ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map.
- ScopedArenaVector<uint8_t> stack_map =
- CreateJniStackMap(&stack_map_allocator,
- jni_compiled_method,
- jni_compiled_method.GetCode().size(),
- compiler_options.GetDebuggable() && compiler_options.IsJitCompiler());
+ ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap(
+ &stack_map_allocator, jni_compiled_method, jni_compiled_method.GetCode().size());
ArrayRef<const uint8_t> reserved_code;
ArrayRef<const uint8_t> reserved_data;
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index c13a35567b..f55bbee1c8 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -49,8 +49,7 @@ void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
size_t core_spill_mask,
size_t fp_spill_mask,
uint32_t num_dex_registers,
- bool baseline,
- bool debuggable) {
+ bool baseline) {
DCHECK(!in_method_) << "Mismatched Begin/End calls";
in_method_ = true;
DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
@@ -61,7 +60,6 @@ void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
fp_spill_mask_ = fp_spill_mask;
num_dex_registers_ = num_dex_registers;
baseline_ = baseline;
- debuggable_ = debuggable;
if (kVerifyStackMaps) {
dchecks_.emplace_back([=](const CodeInfo& code_info) {
@@ -369,7 +367,6 @@ ScopedArenaVector<uint8_t> StackMapStream::Encode() {
uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
flags |= baseline_ ? CodeInfo::kIsBaseline : 0;
- flags |= debuggable_ ? CodeInfo::kIsDebuggable : 0;
DCHECK_LE(flags, kVarintMax); // Ensure flags can be read directly as byte.
uint32_t bit_table_flags = 0;
ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 1aaa6aee9e..27145a174c 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -64,8 +64,7 @@ class StackMapStream : public DeletableArenaObject<kArenaAllocStackMapStream> {
size_t core_spill_mask,
size_t fp_spill_mask,
uint32_t num_dex_registers,
- bool baseline,
- bool debuggable);
+ bool baseline = false);
void EndMethod(size_t code_size);
void BeginStackMapEntry(uint32_t dex_pc,
@@ -126,7 +125,6 @@ class StackMapStream : public DeletableArenaObject<kArenaAllocStackMapStream> {
uint32_t fp_spill_mask_ = 0;
uint32_t num_dex_registers_ = 0;
bool baseline_;
- bool debuggable_;
BitTableBuilder<StackMap> stack_maps_;
BitTableBuilder<RegisterMask> register_masks_;
BitmapTableBuilder stack_masks_;
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 23af1f7fa1..f6a739e15a 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -52,12 +52,7 @@ TEST(StackMapTest, Test1) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 2,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 2);
ArenaBitVector sp_mask(&allocator, 0, false);
size_t number_of_dex_registers = 2;
@@ -111,12 +106,7 @@ TEST(StackMapTest, Test2) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 2,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 2);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -310,12 +300,7 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 2,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 2);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -378,12 +363,7 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 2,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 2);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
@@ -431,12 +411,7 @@ TEST(StackMapTest, TestShareDexRegisterMap) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 2,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 2);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
@@ -492,12 +467,7 @@ TEST(StackMapTest, TestNoDexRegisterMap) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 1,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 1);
ArenaBitVector sp_mask(&allocator, 0, false);
stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
@@ -542,12 +512,7 @@ TEST(StackMapTest, InlineTest) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 2,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 2);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -737,12 +702,7 @@ TEST(StackMapTest, TestDeduplicateStackMask) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(/* frame_size_in_bytes= */ 32,
- /* core_spill_mask= */ 0,
- /* fp_spill_mask= */ 0,
- /* num_dex_registers= */ 0,
- /* baseline= */ false,
- /* debuggable= */ false);
+ stream.BeginMethod(32, 0, 0, 0);
ArenaBitVector sp_mask(&allocator, 0, true);
sp_mask.SetBit(1);