summaryrefslogtreecommitdiff
path: root/compiler/optimizing/stack_map_test.cc
diff options
context:
space:
mode:
author Mythri Alle <mythria@google.com> 2022-03-23 12:49:30 +0000
committer Mythri Alle <mythria@google.com> 2022-07-12 13:03:15 +0000
commitfc067a360d14db5f84fd4b58e0dee6cb04ee759b (patch)
treea9217edb3a03e3937411407f704ad26e5887fb9a /compiler/optimizing/stack_map_test.cc
parent0ae89052f7213701b8b3a782266e84b3d3600dbf (diff)
Introduce a flag to check if JITed code has instrumentation support
Introduce a new flag to identify if JITed code was compiled with instrumentation support. We used to check if the runtime is java debuggable to check for instrumentation support of JITed code. We only set the java debuggable at runtime init and never changed it after. So this check was sufficient since we always JIT code with instrumentation support in debuggable runtimes. We want to be able to change the runtime to debuggable after the runtime has started. As a first step, introduce a new flag to explicitly check if JITed code was compiled with instrumentation support. Use this flag to check if code needs entry / exit stubs and to check if code is async deoptimizeable. Bug: 222479430 Test: art/test.py Change-Id: Ibcaeab869aa8ce153920a801dcc60988411c775b
Diffstat (limited to 'compiler/optimizing/stack_map_test.cc')
-rw-r--r--compiler/optimizing/stack_map_test.cc56
1 files changed, 48 insertions, 8 deletions
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index f6a739e15a..23af1f7fa1 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -52,7 +52,12 @@ TEST(StackMapTest, Test1) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 2);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 2,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArenaBitVector sp_mask(&allocator, 0, false);
size_t number_of_dex_registers = 2;
@@ -106,7 +111,12 @@ TEST(StackMapTest, Test2) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 2);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 2,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -300,7 +310,12 @@ TEST(StackMapTest, TestDeduplicateInlineInfoDexRegisterMap) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 2);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 2,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -363,7 +378,12 @@ TEST(StackMapTest, TestNonLiveDexRegisters) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 2);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 2,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
@@ -411,7 +431,12 @@ TEST(StackMapTest, TestShareDexRegisterMap) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 2);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 2,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArenaBitVector sp_mask(&allocator, 0, false);
uint32_t number_of_dex_registers = 2;
@@ -467,7 +492,12 @@ TEST(StackMapTest, TestNoDexRegisterMap) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 1);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 1,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArenaBitVector sp_mask(&allocator, 0, false);
stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask);
@@ -512,7 +542,12 @@ TEST(StackMapTest, InlineTest) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 2);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 2,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArtMethod art_method;
ArenaBitVector sp_mask1(&allocator, 0, true);
@@ -702,7 +737,12 @@ TEST(StackMapTest, TestDeduplicateStackMask) {
ArenaStack arena_stack(&pool);
ScopedArenaAllocator allocator(&arena_stack);
StackMapStream stream(&allocator, kRuntimeISA);
- stream.BeginMethod(32, 0, 0, 0);
+ stream.BeginMethod(/* frame_size_in_bytes= */ 32,
+ /* core_spill_mask= */ 0,
+ /* fp_spill_mask= */ 0,
+ /* num_dex_registers= */ 0,
+ /* baseline= */ false,
+ /* debuggable= */ false);
ArenaBitVector sp_mask(&allocator, 0, true);
sp_mask.SetBit(1);