diff options
author | 2022-07-14 09:38:49 +0000 | |
---|---|---|
committer | 2022-07-14 13:49:54 +0000 | |
commit | d9e8377d188b2ca0f71185ec3fecb241392184e0 (patch) | |
tree | db7f0c578855d8096391ed388d32eec85d6c47b8 /compiler/optimizing/optimizing_compiler.cc | |
parent | 863968fd6db8bb00c54ac5536f2b1f3d5f8ab813 (diff) |
Reland "Introduce a flag to check if JITed code has instrumentation support"
This reverts commit 26aef1213dbdd7ab03688d898cf802c8c8d7e610.
Reason for revert: Relanding after a fix. When checking if the caller
is deoptimizaeble we should consider the outer caller and not the
inlined method that we could be executing currently.
Bug: 222479430
Change-Id: I37cbc8f1b34113a36a92c3801db72b16d2b9c81a
Diffstat (limited to 'compiler/optimizing/optimizing_compiler.cc')
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 29 |
1 files changed, 18 insertions, 11 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index a499c55757..b0fa251f55 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -1115,17 +1115,18 @@ CompiledMethod* OptimizingCompiler::Compile(const dex::CodeItem* code_item, static ScopedArenaVector<uint8_t> CreateJniStackMap(ScopedArenaAllocator* allocator, const JniCompiledMethod& jni_compiled_method, - size_t code_size) { + size_t code_size, + bool debuggable) { // StackMapStream is quite large, so allocate it using the ScopedArenaAllocator // to stay clear of the frame size limit. std::unique_ptr<StackMapStream> stack_map_stream( new (allocator) StackMapStream(allocator, jni_compiled_method.GetInstructionSet())); - stack_map_stream->BeginMethod( - jni_compiled_method.GetFrameSize(), - jni_compiled_method.GetCoreSpillMask(), - jni_compiled_method.GetFpSpillMask(), - /* num_dex_registers= */ 0, - /* baseline= */ false); + stack_map_stream->BeginMethod(jni_compiled_method.GetFrameSize(), + jni_compiled_method.GetCoreSpillMask(), + jni_compiled_method.GetFpSpillMask(), + /* num_dex_registers= */ 0, + /* baseline= */ false, + debuggable); stack_map_stream->EndMethod(code_size); return stack_map_stream->Encode(); } @@ -1187,8 +1188,11 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags, MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub); ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map. - ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap( - &stack_map_allocator, jni_compiled_method, jni_compiled_method.GetCode().size()); + ScopedArenaVector<uint8_t> stack_map = + CreateJniStackMap(&stack_map_allocator, + jni_compiled_method, + jni_compiled_method.GetCode().size(), + compiler_options.GetDebuggable() && compiler_options.IsJitCompiler()); return CompiledMethod::SwapAllocCompiledMethod( GetCompiledMethodStorage(), jni_compiled_method.GetInstructionSet(), @@ -1249,8 +1253,11 @@ bool OptimizingCompiler::JitCompile(Thread* self, ArenaStack arena_stack(runtime->GetJitArenaPool()); // StackMapStream is large and it does not fit into this frame, so we need helper method. ScopedArenaAllocator stack_map_allocator(&arena_stack); // Will hold the stack map. - ScopedArenaVector<uint8_t> stack_map = CreateJniStackMap( - &stack_map_allocator, jni_compiled_method, jni_compiled_method.GetCode().size()); + ScopedArenaVector<uint8_t> stack_map = + CreateJniStackMap(&stack_map_allocator, + jni_compiled_method, + jni_compiled_method.GetCode().size(), + compiler_options.GetDebuggable() && compiler_options.IsJitCompiler()); ArrayRef<const uint8_t> reserved_code; ArrayRef<const uint8_t> reserved_data; |