summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Mythri Alle <mythria@google.com> 2022-04-22 13:49:24 +0000
committer Mythri Alle <mythria@google.com> 2022-04-22 13:49:24 +0000
commit79f078d87e410ed6d7b78a3afe8f7008db3601dd (patch)
treec7ebac1e626dd11e72767af1174b68f8fea9dfa9
parent640267052653a8c177b6f325675e7217b83d7d3c (diff)
Revert "Update how deoptimization from runtime methods is supported"
This reverts commit 640267052653a8c177b6f325675e7217b83d7d3c. Reason for revert: Broke golem benchmarks Change-Id: I2d3f19508cfa22520c551e5f872272e6e8cf553e
-rw-r--r--compiler/optimizing/code_generator.h1
-rw-r--r--compiler/utils/assembler_thumb_test_expected.cc.inc6
-rw-r--r--runtime/arch/arm/asm_support_arm.S4
-rw-r--r--runtime/arch/arm/asm_support_arm.h2
-rw-r--r--runtime/arch/arm/quick_entrypoints_arm.S113
-rw-r--r--runtime/arch/arm64/asm_support_arm64.h3
-rw-r--r--runtime/arch/arm64/quick_entrypoints_arm64.S105
-rw-r--r--runtime/arch/quick_alloc_entrypoints.S48
-rw-r--r--runtime/arch/x86/asm_support_x86.h2
-rw-r--r--runtime/arch/x86/quick_entrypoints_x86.S127
-rw-r--r--runtime/arch/x86_64/asm_support_x86_64.h2
-rw-r--r--runtime/arch/x86_64/quick_entrypoints_x86_64.S98
-rw-r--r--runtime/entrypoints/quick/quick_dexcache_entrypoints.cc1
-rw-r--r--runtime/entrypoints/quick/quick_thread_entrypoints.cc30
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc68
-rw-r--r--runtime/instrumentation.cc288
-rw-r--r--runtime/instrumentation.h30
-rw-r--r--runtime/interpreter/interpreter.cc1
-rw-r--r--runtime/oat.h4
-rw-r--r--runtime/oat_quick_method_header.h16
-rw-r--r--runtime/stack.h10
-rw-r--r--runtime/thread.cc4
-rw-r--r--runtime/thread.h18
-rw-r--r--tools/cpp-define-generator/globals.def4
-rw-r--r--tools/cpp-define-generator/thread.def2
25 files changed, 344 insertions, 643 deletions
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index b09219a2ed..d81a7b5382 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -33,7 +33,6 @@
#include "graph_visualizer.h"
#include "locations.h"
#include "nodes.h"
-#include "oat_quick_method_header.h"
#include "optimizing_compiler_stats.h"
#include "read_barrier_option.h"
#include "stack.h"
diff --git a/compiler/utils/assembler_thumb_test_expected.cc.inc b/compiler/utils/assembler_thumb_test_expected.cc.inc
index 1775014e9e..b6c6025e41 100644
--- a/compiler/utils/assembler_thumb_test_expected.cc.inc
+++ b/compiler/utils/assembler_thumb_test_expected.cc.inc
@@ -76,7 +76,7 @@ const char* const VixlJniHelpersResults = {
" f2: bb f1 00 0f cmp.w r11, #0\n"
" f6: 18 bf it ne\n"
" f8: e3 46 movne r11, r12\n"
- " fa: d9 f8 9c c0 ldr.w r12, [r9, #156]\n"
+ " fa: d9 f8 94 c0 ldr.w r12, [r9, #148]\n"
" fe: bc f1 00 0f cmp.w r12, #0\n"
" 102: 71 d1 bne 0x1e8 @ imm = #226\n"
" 104: cd f8 ff c7 str.w r12, [sp, #2047]\n"
@@ -152,8 +152,8 @@ const char* const VixlJniHelpersResults = {
" 218: bd e8 e0 4d pop.w {r5, r6, r7, r8, r10, r11, lr}\n"
" 21c: d9 f8 24 80 ldr.w r8, [r9, #36]\n"
" 220: 70 47 bx lr\n"
- " 222: d9 f8 9c 00 ldr.w r0, [r9, #156]\n"
- " 226: d9 f8 d0 e2 ldr.w lr, [r9, #720]\n"
+ " 222: d9 f8 94 00 ldr.w r0, [r9, #148]\n"
+ " 226: d9 f8 c8 e2 ldr.w lr, [r9, #712]\n"
" 22a: f0 47 blx lr\n"
};
diff --git a/runtime/arch/arm/asm_support_arm.S b/runtime/arch/arm/asm_support_arm.S
index 68afc24091..23d82bac38 100644
--- a/runtime/arch/arm/asm_support_arm.S
+++ b/runtime/arch/arm/asm_support_arm.S
@@ -315,6 +315,10 @@
DELIVER_PENDING_EXCEPTION
.endm
+.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ RETURN_OR_DELIVER_PENDING_EXCEPTION_REG r1
+.endm
+
.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
ldr ip, [rSELF, #THREAD_EXCEPTION_OFFSET] @ Get exception field.
cmp ip, #0
diff --git a/runtime/arch/arm/asm_support_arm.h b/runtime/arch/arm/asm_support_arm.h
index 5e3f85484a..aff055e611 100644
--- a/runtime/arch/arm/asm_support_arm.h
+++ b/runtime/arch/arm/asm_support_arm.h
@@ -25,8 +25,6 @@
#define FRAME_SIZE_SAVE_EVERYTHING 192
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_CLINIT FRAME_SIZE_SAVE_EVERYTHING
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_SUSPEND_CHECK FRAME_SIZE_SAVE_EVERYTHING
-#define SAVE_EVERYTHING_FRAME_R0_OFFSET \
- (FRAME_SIZE_SAVE_EVERYTHING - CALLEE_SAVE_EVERYTHING_NUM_CORE_SPILLS * POINTER_SIZE)
// The offset from the art_quick_read_barrier_mark_introspection (used for field
// loads with 32-bit LDR) to the entrypoint for field loads with 16-bit LDR,
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 5a9757232f..d6f129be50 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -134,50 +134,16 @@
.cfi_adjust_cfa_offset -52
.endm
-.macro RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
- ldr r1, [rSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
- cbnz r1, 1f
- DEOPT_OR_RETURN r1 // Check if deopt is required
+.macro RETURN_IF_RESULT_IS_ZERO
+ cbnz r0, 1f @ result non-zero branch over
+ bx lr @ return
1:
- DELIVER_PENDING_EXCEPTION
.endm
-.macro DEOPT_OR_RETURN temp, is_ref = 0
- ldr \temp, [rSELF, #THREAD_DEOPT_CHECK_REQUIRED_OFFSET]
- cbnz \temp, 2f
- bx lr
-2:
- SETUP_SAVE_EVERYTHING_FRAME \temp
- mov r2, \is_ref // pass if result is a reference
- mov r1, r0 // pass the result
- mov r0, rSELF // Thread::Current
- bl artDeoptimizeIfNeeded
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME
- REFRESH_MARKING_REGISTER
- bx lr
- CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
-.endm
-
-.macro DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_R0 temp, is_ref
- ldr \temp, [rSELF, #THREAD_DEOPT_CHECK_REQUIRED_OFFSET]
- cbnz \temp, 2f
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_R0
- REFRESH_MARKING_REGISTER
- bx lr
- CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
-2:
- str r0, [sp, SAVE_EVERYTHING_FRAME_R0_OFFSET] // update result in the frame
- mov r2, \is_ref // pass if result is a reference
- mov r1, r0 // pass the result
- mov r0, rSELF // Thread::Current
- bl artDeoptimizeIfNeeded
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME
- REFRESH_MARKING_REGISTER
- bx lr
- CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
+.macro RETURN_IF_RESULT_IS_NON_ZERO
+ cbz r0, 1f @ result zero branch over
+ bx lr @ return
+1:
.endm
.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
@@ -217,16 +183,12 @@ END \c_name
.endm
.macro RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
- cbnz r0, 1f @ result non-zero branch over
- DEOPT_OR_RETURN r1
-1:
+ RETURN_IF_RESULT_IS_ZERO
DELIVER_PENDING_EXCEPTION
.endm
-.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
- cbz r0, 1f @ result zero branch over
- DEOPT_OR_RETURN r1, /*is_ref=*/1
-1:
+.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO
DELIVER_PENDING_EXCEPTION
.endm
@@ -555,7 +517,8 @@ ENTRY art_quick_lock_object_no_inline
bl artLockObjectFromCode @ (Object* obj, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+ RETURN_IF_RESULT_IS_ZERO
+ DELIVER_PENDING_EXCEPTION
END art_quick_lock_object_no_inline
/*
@@ -585,7 +548,8 @@ ENTRY art_quick_unlock_object_no_inline
bl artUnlockObjectFromCode @ (Object* obj, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+ RETURN_IF_RESULT_IS_ZERO
+ DELIVER_PENDING_EXCEPTION
END art_quick_unlock_object_no_inline
/*
@@ -818,8 +782,11 @@ ENTRY \name
mov r1, rSELF @ pass Thread::Current
bl \entrypoint @ (uint32_t index, Thread*)
cbz r0, 1f @ If result is null, deliver the OOME.
- str r0, [sp, #136] @ store result in the frame
- DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_R0 r1, /* is_ref= */ 1
+ .cfi_remember_state
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_R0
+ REFRESH_MARKING_REGISTER
+ bx lr
+ CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
1:
DELIVER_PENDING_EXCEPTION_FRAME_READY
END \name
@@ -842,12 +809,12 @@ ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromC
/*
* Called by managed code to resolve a static field and load a non-wide value.
*/
-ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
/*
* Called by managed code to resolve a static field and load a 64-bit primitive value.
*/
@@ -860,7 +827,7 @@ ENTRY art_quick_get64_static
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
cbnz r2, 1f @ success if no exception pending
- DEOPT_OR_RETURN r2 @ check if deopt is required or return
+ bx lr @ return on success
1:
DELIVER_PENDING_EXCEPTION
END art_quick_get64_static
@@ -868,12 +835,12 @@ END art_quick_get64_static
/*
* Called by managed code to resolve an instance field and load a non-wide value.
*/
-TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
+TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_R1
/*
* Called by managed code to resolve an instance field and load a 64-bit primitive value.
*/
@@ -886,7 +853,7 @@ ENTRY art_quick_get64_instance
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
cbnz r2, 1f @ success if no exception pending
- DEOPT_OR_RETURN r2 @ check if deopt is required or return
+ bx lr @ return on success
1:
DELIVER_PENDING_EXCEPTION
END art_quick_get64_instance
@@ -921,7 +888,8 @@ ENTRY art_quick_set64_instance
.cfi_adjust_cfa_offset -16
RESTORE_SAVE_REFS_ONLY_FRAME @ TODO: we can clearly save an add here
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+ RETURN_IF_RESULT_IS_ZERO
+ DELIVER_PENDING_EXCEPTION
END art_quick_set64_instance
.extern artSet64StaticFromCompiledCode
@@ -935,7 +903,8 @@ ENTRY art_quick_set64_static
.cfi_adjust_cfa_offset -16
RESTORE_SAVE_REFS_ONLY_FRAME @ TODO: we can clearly save an add here
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_ZERO_OR_DELIVER
+ RETURN_IF_RESULT_IS_ZERO
+ DELIVER_PENDING_EXCEPTION
END art_quick_set64_static
// Generate the allocation entrypoints for each allocator.
@@ -1068,7 +1037,7 @@ ENTRY \c_name
bl \cxx_name @ (mirror::Class* cls, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END \c_name
.endm
@@ -1153,7 +1122,7 @@ ENTRY \name
bl \entrypoint // (mirror::Class* klass, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END \name
.endm
@@ -1226,7 +1195,7 @@ ENTRY \name
bl \entrypoint
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END \name
.endm
@@ -1914,7 +1883,7 @@ ENTRY art_quick_string_builder_append
bl artStringBuilderAppend @ (uint32_t, const unit32_t*, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END art_quick_string_builder_append
/*
diff --git a/runtime/arch/arm64/asm_support_arm64.h b/runtime/arch/arm64/asm_support_arm64.h
index 5a285a4d9d..887ee0259c 100644
--- a/runtime/arch/arm64/asm_support_arm64.h
+++ b/runtime/arch/arm64/asm_support_arm64.h
@@ -19,7 +19,6 @@
#include "asm_support.h"
-// TODO(mythria): Change these to use constants from callee_save_frame_arm64.h
#define CALLEE_SAVES_SIZE (12 * 8 + 8 * 8)
// +8 for the ArtMethod, +8 for alignment.
#define FRAME_SIZE_SAVE_ALL_CALLEE_SAVES (CALLEE_SAVES_SIZE + 16)
@@ -28,8 +27,6 @@
#define FRAME_SIZE_SAVE_EVERYTHING 512
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_CLINIT FRAME_SIZE_SAVE_EVERYTHING
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_SUSPEND_CHECK FRAME_SIZE_SAVE_EVERYTHING
-#define SAVE_EVERYTHING_FRAME_X0_OFFSET \
- (FRAME_SIZE_SAVE_EVERYTHING - CALLEE_SAVE_EVERYTHING_NUM_CORE_SPILLS * POINTER_SIZE)
// The offset from art_quick_read_barrier_mark_introspection to the array switch cases,
// i.e. art_quick_read_barrier_mark_introspection_arrays.
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 5f74f7ddf7..d8c91e11b9 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -194,56 +194,26 @@
RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
.endm
-.macro RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
- ldr x1, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
- cbnz x1, 1f
- DEOPT_OR_RETURN x1 // Check if deopt is required
-1: // deliver exception on current thread
- DELIVER_PENDING_EXCEPTION
+.macro RETURN_IF_RESULT_IS_ZERO
+ cbnz x0, 1f // result non-zero branch over
+ ret // return
+1:
.endm
-.macro DEOPT_OR_RETURN temp, is_ref = 0
- ldr \temp, [xSELF, #THREAD_DEOPT_CHECK_REQUIRED_OFFSET]
- cbnz \temp, 2f
- ret
-2:
- SETUP_SAVE_EVERYTHING_FRAME
- mov x2, \is_ref // pass if result is a reference
- mov x1, x0 // pass the result
- mov x0, xSELF // Thread::Current
- bl artDeoptimizeIfNeeded
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME
- REFRESH_MARKING_REGISTER
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
+.macro RETURN_IF_RESULT_IS_NON_ZERO
+ cbz x0, 1f // result zero branch over
+ ret // return
+1:
.endm
-.macro DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_X0 temp, is_ref
- ldr \temp, [xSELF, #THREAD_DEOPT_CHECK_REQUIRED_OFFSET]
- cbnz \temp, 2f
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
- REFRESH_MARKING_REGISTER
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
-2:
- str x0, [sp, #SAVE_EVERYTHING_FRAME_X0_OFFSET] // update result in the frame
- mov x2, \is_ref // pass if result is a reference
- mov x1, x0 // pass the result
- mov x0, xSELF // Thread::Current
- bl artDeoptimizeIfNeeded
- .cfi_remember_state
- RESTORE_SAVE_EVERYTHING_FRAME
- REFRESH_MARKING_REGISTER
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
+// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
+.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
.endm
-
.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
cbnz w0, 1f // result non-zero branch over
- DEOPT_OR_RETURN x1
+ ret // return
1:
DELIVER_PENDING_EXCEPTION
.endm
@@ -1244,7 +1214,11 @@ ENTRY \name
mov x1, xSELF // pass Thread::Current
bl \entrypoint // (int32_t index, Thread* self)
cbz w0, 1f // If result is null, deliver the OOME.
- DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_X0 x1, /* is_ref= */ 1
+ .cfi_remember_state
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
+ REFRESH_MARKING_REGISTER
+ ret // return
+ CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_EVERYTHING
1:
DELIVER_PENDING_EXCEPTION_FRAME_READY
END \name
@@ -1254,14 +1228,13 @@ END \name
ONE_ARG_SAVE_EVERYTHING_DOWNCALL \name, \entrypoint, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
.endm
-.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
- cbz w0, 1f // result zero branch over
- DEOPT_OR_RETURN x1, /*is_ref=*/1 // check for deopt or return
+.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ cbz w0, 1f // result zero branch over
+ ret // return
1:
DELIVER_PENDING_EXCEPTION
.endm
-
/*
* Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
* failure.
@@ -1283,21 +1256,21 @@ ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromC
// Note: Functions `art{Get,Set}<Kind>{Static,Instance}FromCompiledCode` are
// defined with a macro in runtime/entrypoints/quick/quick_field_entrypoints.cc.
-ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-
-TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
+TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
@@ -1437,7 +1410,7 @@ ENTRY \c_name
bl \cxx_name
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END \c_name
.endm
@@ -1492,7 +1465,7 @@ ENTRY \name
bl \entrypoint // (mirror::Class*, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END \name
.endm
@@ -1566,7 +1539,7 @@ ENTRY \name
bl \entrypoint
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END \name
.endm
@@ -2129,7 +2102,7 @@ ENTRY art_quick_string_builder_append
bl artStringBuilderAppend // (uint32_t, const unit32_t*, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME
REFRESH_MARKING_REGISTER
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
END art_quick_string_builder_append
/*
diff --git a/runtime/arch/quick_alloc_entrypoints.S b/runtime/arch/quick_alloc_entrypoints.S
index 5d4b24bc6b..32888edf7b 100644
--- a/runtime/arch/quick_alloc_entrypoints.S
+++ b/runtime/arch/quick_alloc_entrypoints.S
@@ -16,27 +16,27 @@
.macro GENERATE_ALLOC_ENTRYPOINTS c_suffix, cxx_suffix
// Called by managed code to allocate an object of a resolved class.
-ONE_ARG_DOWNCALL art_quick_alloc_object_resolved\c_suffix, artAllocObjectFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ONE_ARG_DOWNCALL art_quick_alloc_object_resolved\c_suffix, artAllocObjectFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate an object of an initialized class.
-ONE_ARG_DOWNCALL art_quick_alloc_object_initialized\c_suffix, artAllocObjectFromCodeInitialized\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ONE_ARG_DOWNCALL art_quick_alloc_object_initialized\c_suffix, artAllocObjectFromCodeInitialized\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate an object when the caller doesn't know whether it has access
// to the created type.
-ONE_ARG_DOWNCALL art_quick_alloc_object_with_checks\c_suffix, artAllocObjectFromCodeWithChecks\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ONE_ARG_DOWNCALL art_quick_alloc_object_with_checks\c_suffix, artAllocObjectFromCodeWithChecks\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate a string if it could not be removed by any optimizations
-ONE_ARG_DOWNCALL art_quick_alloc_string_object\c_suffix, artAllocStringObject\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ONE_ARG_DOWNCALL art_quick_alloc_string_object\c_suffix, artAllocStringObject\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate an array of a resolve class.
-TWO_ARG_DOWNCALL art_quick_alloc_array_resolved\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+TWO_ARG_DOWNCALL art_quick_alloc_array_resolved\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate a string from bytes
-FOUR_ARG_DOWNCALL art_quick_alloc_string_from_bytes\c_suffix, artAllocStringFromBytesFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+FOUR_ARG_DOWNCALL art_quick_alloc_string_from_bytes\c_suffix, artAllocStringFromBytesFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate a string from chars
-THREE_ARG_DOWNCALL art_quick_alloc_string_from_chars\c_suffix, artAllocStringFromCharsFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+THREE_ARG_DOWNCALL art_quick_alloc_string_from_chars\c_suffix, artAllocStringFromCharsFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
// Called by managed code to allocate a string from string
-ONE_ARG_DOWNCALL art_quick_alloc_string_from_string\c_suffix, artAllocStringFromStringFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ONE_ARG_DOWNCALL art_quick_alloc_string_from_string\c_suffix, artAllocStringFromStringFromCode\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-TWO_ARG_DOWNCALL art_quick_alloc_array_resolved8\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
-TWO_ARG_DOWNCALL art_quick_alloc_array_resolved16\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
-TWO_ARG_DOWNCALL art_quick_alloc_array_resolved32\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
-TWO_ARG_DOWNCALL art_quick_alloc_array_resolved64\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+TWO_ARG_DOWNCALL art_quick_alloc_array_resolved8\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+TWO_ARG_DOWNCALL art_quick_alloc_array_resolved16\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+TWO_ARG_DOWNCALL art_quick_alloc_array_resolved32\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+TWO_ARG_DOWNCALL art_quick_alloc_array_resolved64\c_suffix, artAllocArrayFromCodeResolved\cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
.endm
.macro GENERATE_ALL_ALLOC_ENTRYPOINTS
@@ -58,29 +58,29 @@ GENERATE_ALLOC_ENTRYPOINTS _region_tlab_instrumented, RegionTLABInstrumented
// GENERATE_ALL_ALLOC_ENTRYPOINTS for selectively implementing allocation fast paths in
// hand-written assembly.
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(c_suffix, cxx_suffix) \
- ONE_ARG_DOWNCALL art_quick_alloc_object_resolved ## c_suffix, artAllocObjectFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ ONE_ARG_DOWNCALL art_quick_alloc_object_resolved ## c_suffix, artAllocObjectFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(c_suffix, cxx_suffix) \
- ONE_ARG_DOWNCALL art_quick_alloc_object_initialized ## c_suffix, artAllocObjectFromCodeInitialized ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ ONE_ARG_DOWNCALL art_quick_alloc_object_initialized ## c_suffix, artAllocObjectFromCodeInitialized ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
- ONE_ARG_DOWNCALL art_quick_alloc_object_with_checks ## c_suffix, artAllocObjectFromCodeWithChecks ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ ONE_ARG_DOWNCALL art_quick_alloc_object_with_checks ## c_suffix, artAllocObjectFromCodeWithChecks ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(c_suffix, cxx_suffix) \
- ONE_ARG_DOWNCALL art_quick_alloc_string_object ## c_suffix, artAllocStringObject ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ ONE_ARG_DOWNCALL art_quick_alloc_string_object ## c_suffix, artAllocStringObject ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(c_suffix, cxx_suffix) \
- FOUR_ARG_DOWNCALL art_quick_alloc_string_from_bytes ## c_suffix, artAllocStringFromBytesFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ FOUR_ARG_DOWNCALL art_quick_alloc_string_from_bytes ## c_suffix, artAllocStringFromBytesFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(c_suffix, cxx_suffix) \
- THREE_ARG_DOWNCALL art_quick_alloc_string_from_chars ## c_suffix, artAllocStringFromCharsFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ THREE_ARG_DOWNCALL art_quick_alloc_string_from_chars ## c_suffix, artAllocStringFromCharsFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(c_suffix, cxx_suffix) \
- ONE_ARG_DOWNCALL art_quick_alloc_string_from_string ## c_suffix, artAllocStringFromStringFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ ONE_ARG_DOWNCALL art_quick_alloc_string_from_string ## c_suffix, artAllocStringFromStringFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(c_suffix, cxx_suffix) \
- TWO_ARG_DOWNCALL art_quick_alloc_array_resolved ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ TWO_ARG_DOWNCALL art_quick_alloc_array_resolved ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(c_suffix, cxx_suffix) \
- TWO_ARG_DOWNCALL art_quick_alloc_array_resolved8 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ TWO_ARG_DOWNCALL art_quick_alloc_array_resolved8 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(c_suffix, cxx_suffix) \
- TWO_ARG_DOWNCALL art_quick_alloc_array_resolved16 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ TWO_ARG_DOWNCALL art_quick_alloc_array_resolved16 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(c_suffix, cxx_suffix) \
- TWO_ARG_DOWNCALL art_quick_alloc_array_resolved32 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ TWO_ARG_DOWNCALL art_quick_alloc_array_resolved32 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(c_suffix, cxx_suffix) \
- TWO_ARG_DOWNCALL art_quick_alloc_array_resolved64 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER
+ TWO_ARG_DOWNCALL art_quick_alloc_array_resolved64 ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
.macro GENERATE_ALLOC_ENTRYPOINTS_FOR_REGION_TLAB_ALLOCATOR
// This is to be separately defined for each architecture to allow a hand-written assembly fast path.
diff --git a/runtime/arch/x86/asm_support_x86.h b/runtime/arch/x86/asm_support_x86.h
index f6889334df..737d736f01 100644
--- a/runtime/arch/x86/asm_support_x86.h
+++ b/runtime/arch/x86/asm_support_x86.h
@@ -25,7 +25,5 @@
#define FRAME_SIZE_SAVE_EVERYTHING (48 + 64)
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_CLINIT FRAME_SIZE_SAVE_EVERYTHING
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_SUSPEND_CHECK FRAME_SIZE_SAVE_EVERYTHING
-#define SAVE_EVERYTHING_FRAME_EAX_OFFSET \
- (FRAME_SIZE_SAVE_EVERYTHING - CALLEE_SAVE_EVERYTHING_NUM_CORE_SPILLS * POINTER_SIZE)
#endif // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_H_
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index c66d893d7d..7f1311c01e 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -794,9 +794,12 @@ MACRO3(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name, runtime_method_offset
call CALLVAR(cxx_name) // cxx_name(arg1, Thread*)
addl MACRO_LITERAL(16), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-16)
- testl %eax, %eax // If result is null deliver pending exception
+ testl %eax, %eax // If result is null, deliver the OOME.
jz 1f
- DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_EAX ebx, /* is_ref= */1 // Check for deopt
+ CFI_REMEMBER_STATE
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX // restore frame up to return address
+ ret // return
+ CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
1:
DELIVER_PENDING_EXCEPTION_FRAME_READY
END_FUNCTION VAR(c_name)
@@ -806,72 +809,18 @@ MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT, c_name, cxx_name)
ONE_ARG_SAVE_EVERYTHING_DOWNCALL \c_name, \cxx_name, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
END_MACRO
-MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER)
- testl %eax, %eax // eax == 0 ?
- jz 1f // if eax == 0 goto 1
- DEOPT_OR_RETURN ebx, /*is_ref=*/1 // check if deopt is required
-1: // deliver exception on current thread
- DELIVER_PENDING_EXCEPTION
-END_MACRO
-
-MACRO0(RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION)
- cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
- jne 1f // if exception field != 0 goto 1
- DEOPT_OR_RETURN ebx // check if deopt is required
-1: // deliver exception on current thread
+MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
+ testl %eax, %eax // eax == 0 ?
+ jz 1f // if eax == 0 goto 1
+ ret // return
+1: // deliver exception on current thread
DELIVER_PENDING_EXCEPTION
END_MACRO
-MACRO2(DEOPT_OR_RETURN, temp, is_ref = 0)
- cmpl LITERAL(0), %fs:THREAD_DEOPT_CHECK_REQUIRED_OFFSET
- jne 2f
- ret
-2:
- SETUP_SAVE_EVERYTHING_FRAME \temp
- subl MACRO_LITERAL(4), %esp // alignment padding
- CFI_ADJUST_CFA_OFFSET(4)
- pushl MACRO_LITERAL(\is_ref) // is_ref
- CFI_ADJUST_CFA_OFFSET(4)
- PUSH_ARG eax // result
- pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current
- CFI_ADJUST_CFA_OFFSET(4)
- call SYMBOL(artDeoptimizeIfNeeded)
- addl LITERAL(16), %esp // pop arguments
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
-END_MACRO
-
-MACRO2(DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_EAX, temp, is_ref = 0)
- cmpl LITERAL(0), %fs:THREAD_DEOPT_CHECK_REQUIRED_OFFSET
- jne 2f
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
-2:
- movl %eax, SAVE_EVERYTHING_FRAME_EAX_OFFSET(%esp) // update eax in the frame
- subl MACRO_LITERAL(4), %esp // alignment padding
- CFI_ADJUST_CFA_OFFSET(4)
- pushl MACRO_LITERAL(\is_ref) // is_ref
- CFI_ADJUST_CFA_OFFSET(4)
- PUSH_ARG eax // result
- pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current
- CFI_ADJUST_CFA_OFFSET(4)
- call SYMBOL(artDeoptimizeIfNeeded)
- addl LITERAL(16), %esp // pop arguments
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
-END_MACRO
-
-
MACRO0(RETURN_IF_EAX_ZERO)
testl %eax, %eax // eax == 0 ?
jnz 1f // if eax != 0 goto 1
- DEOPT_OR_RETURN ebx // check if deopt is needed
+ ret // return
1: // deliver exception on current thread
DELIVER_PENDING_EXCEPTION
END_MACRO
@@ -978,7 +927,7 @@ MACRO2(ART_QUICK_ALLOC_OBJECT_ROSALLOC, c_name, cxx_name)
addl LITERAL(16), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_FUNCTION VAR(c_name)
END_MACRO
@@ -1025,7 +974,7 @@ MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH, cxx_name)
addl LITERAL(16), %esp
CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_MACRO
MACRO2(ART_QUICK_ALLOC_OBJECT_TLAB, c_name, cxx_name)
@@ -1158,7 +1107,7 @@ MACRO3(GENERATE_ALLOC_ARRAY_TLAB, c_entrypoint, cxx_name, size_setup)
addl LITERAL(16), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_FUNCTION VAR(c_entrypoint)
END_MACRO
@@ -1552,21 +1501,21 @@ END_FUNCTION art_quick_lushr
// Note: Functions `art{Get,Set}<Kind>{Static,Instance}FromCompiledCode` are
// defined with a macro in runtime/entrypoints/quick/quick_field_entrypoints.cc.
-ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-
-TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_EAX_ZERO
TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_EAX_ZERO
@@ -1667,7 +1616,7 @@ DEFINE_FUNCTION art_quick_resolution_trampoline
movl %eax, %edi // remember code pointer in EDI
addl LITERAL(16), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-16)
- test %eax, %eax // if code pointer is null goto deliver the OOME.
+ test %eax, %eax // if code pointer is null goto deliver pending exception
jz 1f
RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP
1:
@@ -2025,19 +1974,19 @@ DEFINE_FUNCTION art_quick_string_builder_append
SETUP_SAVE_REFS_ONLY_FRAME ebx // save ref containing registers for GC
// Outgoing argument set up
leal FRAME_SIZE_SAVE_REFS_ONLY + __SIZEOF_POINTER__(%esp), %edi // prepare args
- push %eax // push padding
+ push %eax // push padding
CFI_ADJUST_CFA_OFFSET(4)
- pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
CFI_ADJUST_CFA_OFFSET(4)
- push %edi // pass args
+ push %edi // pass args
CFI_ADJUST_CFA_OFFSET(4)
- push %eax // pass format
+ push %eax // pass format
CFI_ADJUST_CFA_OFFSET(4)
- call SYMBOL(artStringBuilderAppend) // (uint32_t, const unit32_t*, Thread*)
- addl MACRO_LITERAL(16), %esp // pop arguments
+ call SYMBOL(artStringBuilderAppend) // (uint32_t, const unit32_t*, Thread*)
+ addl MACRO_LITERAL(16), %esp // pop arguments
CFI_ADJUST_CFA_OFFSET(-16)
- RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_FUNCTION art_quick_string_builder_append
// Create a function `name` calling the ReadBarrier::Mark routine,
diff --git a/runtime/arch/x86_64/asm_support_x86_64.h b/runtime/arch/x86_64/asm_support_x86_64.h
index e389c781e5..51befbe7b8 100644
--- a/runtime/arch/x86_64/asm_support_x86_64.h
+++ b/runtime/arch/x86_64/asm_support_x86_64.h
@@ -25,7 +25,5 @@
#define FRAME_SIZE_SAVE_EVERYTHING (144 + 16*8)
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_CLINIT FRAME_SIZE_SAVE_EVERYTHING
#define FRAME_SIZE_SAVE_EVERYTHING_FOR_SUSPEND_CHECK FRAME_SIZE_SAVE_EVERYTHING
-#define SAVE_EVERYTHING_FRAME_RAX_OFFSET \
- (FRAME_SIZE_SAVE_EVERYTHING - CALLEE_SAVE_EVERYTHING_NUM_CORE_SPILLS * POINTER_SIZE)
#endif // ART_RUNTIME_ARCH_X86_64_ASM_SUPPORT_X86_64_H_
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index 2888d531cf..673696c714 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -731,9 +731,12 @@ MACRO3(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name, runtime_method_offset
movl %eax, %edi // pass the index of the constant as arg0
movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
call CALLVAR(cxx_name) // cxx_name(arg0, Thread*)
- testl %eax, %eax // If result is null, deliver pending exception.
+ testl %eax, %eax // If result is null, deliver the OOME.
jz 1f
- DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_RAX /*is_ref=*/1
+ CFI_REMEMBER_STATE
+ RESTORE_SAVE_EVERYTHING_FRAME_KEEP_RAX // restore frame up to return address
+ ret
+ CFI_RESTORE_STATE_AND_DEF_CFA rsp, FRAME_SIZE_SAVE_EVERYTHING
1:
DELIVER_PENDING_EXCEPTION_FRAME_READY
END_FUNCTION VAR(c_name)
@@ -743,65 +746,18 @@ MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT, c_name, cxx_name)
ONE_ARG_SAVE_EVERYTHING_DOWNCALL \c_name, \cxx_name, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
END_MACRO
-MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER)
+MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
testq %rax, %rax // rax == 0 ?
jz 1f // if rax == 0 goto 1
- DEOPT_OR_RETURN /*is_ref=*/1 // Check if deopt is required
-1: // deliver exception on current thread
- DELIVER_PENDING_EXCEPTION
-END_MACRO
-
-
-MACRO0(RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION)
- movq %gs:THREAD_EXCEPTION_OFFSET, %rcx // get exception field
- testq %rcx, %rcx // rcx == 0 ?
- jnz 1f // if rcx != 0 goto 1
- DEOPT_OR_RETURN // Check if deopt is required
+ ret // return
1: // deliver exception on current thread
DELIVER_PENDING_EXCEPTION
END_MACRO
-MACRO1(DEOPT_OR_RETURN, is_ref = 0)
- cmpl LITERAL(0), %gs:THREAD_DEOPT_CHECK_REQUIRED_OFFSET
- jne 2f
- ret
-2:
- SETUP_SAVE_EVERYTHING_FRAME
- movq LITERAL(\is_ref), %rdx // pass if result is a reference
- movq %rax, %rsi // pass the result
- movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current
- call SYMBOL(artDeoptimizeIfNeeded)
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA rsp, FRAME_SIZE_SAVE_EVERYTHING
-END_MACRO
-
-MACRO1(DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_RAX, is_ref = 0)
- cmpl LITERAL(0), %gs:THREAD_DEOPT_CHECK_REQUIRED_OFFSET
- jne 2f
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME_KEEP_RAX
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA rsp, FRAME_SIZE_SAVE_EVERYTHING
-2:
- movq %rax, SAVE_EVERYTHING_FRAME_RAX_OFFSET(%rsp) // update result in the frame
- movq LITERAL(\is_ref), %rdx // pass if result is a reference
- movq %rax, %rsi // pass the result
- movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current
- call SYMBOL(artDeoptimizeIfNeeded)
- CFI_REMEMBER_STATE
- RESTORE_SAVE_EVERYTHING_FRAME
- ret
- CFI_RESTORE_STATE_AND_DEF_CFA rsp, FRAME_SIZE_SAVE_EVERYTHING
-END_MACRO
-
-
-
MACRO0(RETURN_IF_EAX_ZERO)
testl %eax, %eax // eax == 0 ?
jnz 1f // if eax != 0 goto 1
- DEOPT_OR_RETURN // Check if we need a deopt
+ ret // return
1: // deliver exception on current thread
DELIVER_PENDING_EXCEPTION
END_MACRO
@@ -903,7 +859,7 @@ MACRO2(ART_QUICK_ALLOC_OBJECT_ROSALLOC, c_name, cxx_name)
movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
call CALLVAR(cxx_name) // cxx_name(arg0, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_FUNCTION VAR(c_name)
END_MACRO
@@ -975,7 +931,7 @@ MACRO1(ALLOC_OBJECT_TLAB_SLOW_PATH, cxx_name)
movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
call CALLVAR(cxx_name) // cxx_name(arg0, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_MACRO
// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB). May be
@@ -1063,7 +1019,7 @@ MACRO3(GENERATE_ALLOC_ARRAY_TLAB, c_entrypoint, cxx_name, size_setup)
movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
call CALLVAR(cxx_name) // cxx_name(arg0, arg1, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_FUNCTION VAR(c_entrypoint)
END_MACRO
@@ -1368,27 +1324,27 @@ THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCod
THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCompiledCode, RETURN_IF_EAX_ZERO
THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_EAX_ZERO
-TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_EAX_ZERO
TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_EAX_ZERO
TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCompiledCode, RETURN_IF_EAX_ZERO
-TWO_ARG_REF_DOWNCALL art_quick_set64_static, artSet64StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+TWO_ARG_REF_DOWNCALL art_quick_set64_static, artSet64StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCompiledCode, RETURN_IF_EAX_ZERO
-ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
-ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
+ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
DEFINE_FUNCTION art_quick_proxy_invoke_handler
SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_RDI
@@ -1890,7 +1846,7 @@ DEFINE_FUNCTION art_quick_string_builder_append
movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
call artStringBuilderAppend // (uint32_t, const unit32_t*, Thread*)
RESTORE_SAVE_REFS_ONLY_FRAME // restore frame up to return address
- RETURN_IF_RESULT_IS_NON_ZERO_OR_DEOPT_OR_DELIVER // return or deopt or deliver exception
+ RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER // return or deliver exception
END_FUNCTION art_quick_string_builder_append
// Create a function `name` calling the ReadBarrier::Mark routine,
diff --git a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
index 76bee2152a..60a5875c5e 100644
--- a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
@@ -23,7 +23,6 @@
#include "dex/dex_file_types.h"
#include "entrypoints/entrypoint_utils-inl.h"
#include "gc/heap.h"
-#include "jvalue-inl.h"
#include "mirror/class-inl.h"
#include "mirror/class_loader.h"
#include "mirror/object-inl.h"
diff --git a/runtime/entrypoints/quick/quick_thread_entrypoints.cc b/runtime/entrypoints/quick/quick_thread_entrypoints.cc
index 5dca58ab04..93422cf056 100644
--- a/runtime/entrypoints/quick/quick_thread_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_thread_entrypoints.cc
@@ -21,46 +21,16 @@
namespace art {
-extern "C" void artDeoptimizeIfNeeded(Thread* self, uintptr_t result, bool is_ref)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
- DCHECK(!self->IsExceptionPending());
-
- ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
- DCHECK(sp != nullptr && (*sp)->IsRuntimeMethod());
-
- DeoptimizationMethodType type = instr->GetDeoptimizationMethodType(*sp);
- JValue jvalue;
- jvalue.SetJ(result);
- instr->DeoptimizeIfNeeded(self, sp, type, jvalue, is_ref);
-}
-
extern "C" void artTestSuspendFromCode(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
// Called when there is a pending checkpoint or suspend request.
ScopedQuickEntrypointChecks sqec(self);
self->CheckSuspend();
-
- // We could have other dex instructions at the same dex pc as suspend and we need to execute
- // those instructions. So we should start executing from the current dex pc.
- ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
- JValue result;
- result.SetJ(0);
- Runtime::Current()->GetInstrumentation()->DeoptimizeIfNeeded(
- self, sp, DeoptimizationMethodType::kKeepDexPc, result, /* is_ref= */ false);
}
extern "C" void artImplicitSuspendFromCode(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) {
// Called when there is a pending checkpoint or suspend request.
ScopedQuickEntrypointChecks sqec(self);
self->CheckSuspend(/*implicit=*/ true);
-
- // We could have other dex instructions at the same dex pc as suspend and we need to execute
- // those instructions. So we should start executing from the current dex pc.
- ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
- JValue result;
- result.SetJ(0);
- Runtime::Current()->GetInstrumentation()->DeoptimizeIfNeeded(
- self, sp, DeoptimizationMethodType::kKeepDexPc, result, /* is_ref= */ false);
}
extern "C" void artCompileOptimized(ArtMethod* method, Thread* self)
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 0707477df3..2086fa234c 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -713,34 +713,41 @@ extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self,
// Pop transition.
self->PopManagedStackFragment(fragment);
- // Check if caller needs to be deoptimized for instrumentation reasons.
- instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
+ // Request a stack deoptimization if needed
+ ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
+ uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp);
// If caller_pc is the instrumentation exit stub, the stub will check to see if deoptimization
// should be done and it knows the real return pc. NB If the upcall is null we don't need to do
// anything. This can happen during shutdown or early startup.
- if (UNLIKELY(instr->ShouldDeoptimizeCaller(self, sp))) {
- ArtMethod* caller = QuickArgumentVisitor::GetOuterMethod(sp);
- uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp);
- DCHECK(Runtime::Current()->IsAsyncDeoptimizeable(caller_pc));
- DCHECK(caller != nullptr);
- VLOG(deopt) << "Forcing deoptimization on return from method " << method->PrettyMethod()
- << " to " << caller->PrettyMethod() << (force_frame_pop ? " for frame-pop" : "");
- DCHECK(!force_frame_pop || result.GetJ() == 0) << "Force frame pop should have no result.";
- if (force_frame_pop && self->GetException() != nullptr) {
- LOG(WARNING) << "Suppressing exception for instruction-retry: "
- << self->GetException()->Dump();
+ if (UNLIKELY(
+ caller != nullptr &&
+ caller_pc != reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) &&
+ (self->IsForceInterpreter() || Dbg::IsForcedInterpreterNeededForUpcall(self, caller)))) {
+ if (!Runtime::Current()->IsAsyncDeoptimizeable(caller_pc)) {
+ LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
+ << caller->PrettyMethod();
+ } else {
+ VLOG(deopt) << "Forcing deoptimization on return from method " << method->PrettyMethod()
+ << " to " << caller->PrettyMethod()
+ << (force_frame_pop ? " for frame-pop" : "");
+ DCHECK_IMPLIES(force_frame_pop, result.GetJ() == 0)
+ << "Force frame pop should have no result.";
+ if (force_frame_pop && self->GetException() != nullptr) {
+ LOG(WARNING) << "Suppressing exception for instruction-retry: "
+ << self->GetException()->Dump();
+ }
+ // Push the context of the deoptimization stack so we can restore the return value and the
+ // exception before executing the deoptimized frames.
+ self->PushDeoptimizationContext(
+ result,
+ shorty[0] == 'L' || shorty[0] == '[', /* class or array */
+ force_frame_pop ? nullptr : self->GetException(),
+ /* from_code= */ false,
+ DeoptimizationMethodType::kDefault);
+
+ // Set special exception to cause deoptimization.
+ self->SetException(Thread::GetDeoptimizationException());
}
- DCHECK(self->GetException() != Thread::GetDeoptimizationException());
- // Push the context of the deoptimization stack so we can restore the return value and the
- // exception before executing the deoptimized frames.
- self->PushDeoptimizationContext(result,
- shorty[0] == 'L' || shorty[0] == '[', /* class or array */
- force_frame_pop ? nullptr : self->GetException(),
- /* from_code= */ false,
- DeoptimizationMethodType::kDefault);
-
- // Set special exception to cause deoptimization.
- self->SetException(Thread::GetDeoptimizationException());
}
// No need to restore the args since the method has already been run by the interpreter.
@@ -1069,7 +1076,6 @@ extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method,
}
}
- DCHECK(!method->IsRuntimeMethod());
instrumentation->PushInstrumentationStackFrame(self,
is_static ? nullptr : h_object.Get(),
method,
@@ -2594,10 +2600,6 @@ extern "C" uint64_t artInvokePolymorphic(mirror::Object* raw_receiver, Thread* s
// Pop transition record.
self->PopManagedStackFragment(fragment);
- bool is_ref = (shorty[0] == 'L');
- Runtime::Current()->GetInstrumentation()->PushDeoptContextIfNeeded(
- self, DeoptimizationMethodType::kDefault, is_ref, result);
-
return result.GetJ();
}
@@ -2656,10 +2658,6 @@ extern "C" uint64_t artInvokeCustom(uint32_t call_site_idx, Thread* self, ArtMet
// Pop transition record.
self->PopManagedStackFragment(fragment);
- bool is_ref = (shorty[0] == 'L');
- Runtime::Current()->GetInstrumentation()->PushDeoptContextIfNeeded(
- self, DeoptimizationMethodType::kDefault, is_ref, result);
-
return result.GetJ();
}
@@ -2690,7 +2688,7 @@ extern "C" int artMethodExitHook(Thread* self,
instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
DCHECK(instr->AreExitStubsInstalled());
bool is_ref;
- JValue return_value = instr->GetReturnValue(method, &is_ref, gpr_result, fpr_result);
+ JValue return_value = instr->GetReturnValue(self, method, &is_ref, gpr_result, fpr_result);
bool deoptimize = false;
{
StackHandleScope<1> hs(self);
@@ -2705,7 +2703,7 @@ extern "C" int artMethodExitHook(Thread* self,
// back to an upcall.
NthCallerVisitor visitor(self, 1, /*include_runtime_and_upcalls=*/false);
visitor.WalkStack(true);
- deoptimize = instr->ShouldDeoptimizeCaller(self, visitor);
+ deoptimize = instr->ShouldDeoptimizeMethod(self, visitor);
// If we need a deoptimization MethodExitEvent will be called by the interpreter when it
// re-executes the return instruction.
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 9c2f7f637d..1e328a31d2 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -55,9 +55,6 @@
#include "thread_list.h"
namespace art {
-extern "C" NO_RETURN void artDeoptimize(Thread* self);
-extern "C" NO_RETURN void artDeliverPendingExceptionFromCode(Thread* self);
-
namespace instrumentation {
constexpr bool kVerboseInstrumentation = false;
@@ -485,16 +482,15 @@ void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frame
instrumentation_exit_pc_(instrumentation_exit_pc),
reached_existing_instrumentation_frames_(false),
force_deopt_id_(force_deopt_id),
- deopt_all_frames_(deopt_all_frames),
- runtime_methods_need_deopt_check_(false) {}
+ deopt_all_frames_(deopt_all_frames) {}
bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
ArtMethod* m = GetMethod();
- if (m == nullptr || m->IsRuntimeMethod()) {
+ if (m == nullptr) {
if (kVerboseInstrumentation) {
- LOG(INFO) << " Skipping upcall / runtime method. Frame " << GetFrameId();
+ LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
}
- return true; // Ignore upcalls and runtime methods.
+ return true; // Ignore upcalls.
}
if (GetCurrentQuickFrame() == nullptr) {
if (kVerboseInstrumentation) {
@@ -511,6 +507,11 @@ void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frame
auto it = instrumentation_stack_->find(GetReturnPcAddr());
CHECK(it != instrumentation_stack_->end());
const InstrumentationStackFrame& frame = it->second;
+ if (m->IsRuntimeMethod()) {
+ if (frame.interpreter_entry_) {
+ return true;
+ }
+ }
// We've reached a frame which has already been installed with instrumentation exit stub.
// We should have already installed instrumentation or be interpreter on previous frames.
@@ -533,14 +534,12 @@ void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frame
const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
if (deopt_all_frames_) {
- runtime_methods_need_deopt_check_ = true;
SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kDebug);
}
return true;
}
CHECK_NE(return_pc, 0U);
- DCHECK(!m->IsRuntimeMethod());
- if (UNLIKELY(reached_existing_instrumentation_frames_)) {
+ if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
// We already saw an existing instrumentation frame so this should be a runtime-method
// inserted by the interpreter or runtime.
std::string thread_name;
@@ -551,9 +550,21 @@ void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frame
<< " return_pc is " << std::hex << return_pc;
UNREACHABLE();
}
-
+ if (m->IsRuntimeMethod()) {
+ size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes();
+ ArtMethod** caller_frame = reinterpret_cast<ArtMethod**>(
+ reinterpret_cast<uint8_t*>(GetCurrentQuickFrame()) + frame_size);
+ if (*caller_frame != nullptr && (*caller_frame)->IsNative()) {
+ // Do not install instrumentation exit on return to JNI stubs.
+ return true;
+ }
+ }
InstrumentationStackFrame instrumentation_frame(
- GetThisObject().Ptr(), m, return_pc, false, force_deopt_id_);
+ m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
+ m,
+ return_pc,
+ false,
+ force_deopt_id_);
if (kVerboseInstrumentation) {
LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
}
@@ -573,7 +584,6 @@ void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frame
bool reached_existing_instrumentation_frames_;
uint64_t force_deopt_id_;
bool deopt_all_frames_;
- bool runtime_methods_need_deopt_check_;
};
if (kVerboseInstrumentation) {
std::string thread_name;
@@ -591,10 +601,6 @@ void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frame
deopt_all_frames);
visitor.WalkStack(true);
- if (visitor.runtime_methods_need_deopt_check_) {
- thread->SetDeoptCheckRequired(true);
- }
-
if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
// Create method enter events for all methods currently on the thread's stack. We only do this
// if we haven't already processed the method enter events.
@@ -616,16 +622,14 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg)
Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
struct RestoreStackVisitor final : public StackVisitor {
- RestoreStackVisitor(Thread* thread_in,
- uintptr_t instrumentation_exit_pc,
+ RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Instrumentation* instrumentation)
: StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
thread_(thread_in),
instrumentation_exit_pc_(instrumentation_exit_pc),
instrumentation_(instrumentation),
instrumentation_stack_(thread_in->GetInstrumentationStack()),
- frames_removed_(0),
- runtime_methods_need_deopt_check_(false) {}
+ frames_removed_(0) {}
bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
if (instrumentation_stack_->size() == 0) {
@@ -639,19 +643,11 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg)
}
return true; // Ignore shadow frames.
}
- if (m == nullptr || m->IsRuntimeMethod()) {
+ if (m == nullptr) {
if (kVerboseInstrumentation) {
- LOG(INFO) << " Skipping upcall / runtime method. Frame " << GetFrameId();
+ LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
}
- return true; // Ignore upcalls and runtime methods.
- }
- const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
- // For JITed frames, we don't install instrumentation stubs.
- if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
- if (IsShouldDeoptimizeFlagForDebugSet()) {
- runtime_methods_need_deopt_check_ = true;
- }
- return true;
+ return true; // Ignore upcalls.
}
auto it = instrumentation_stack_->find(GetReturnPcAddr());
if (it != instrumentation_stack_->end()) {
@@ -688,7 +684,6 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg)
Instrumentation* const instrumentation_;
std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
size_t frames_removed_;
- bool runtime_methods_need_deopt_check_;
};
if (kVerboseInstrumentation) {
std::string thread_name;
@@ -703,10 +698,6 @@ static void InstrumentationRestoreStack(Thread* thread, void* arg)
reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
visitor.WalkStack(true);
- DCHECK_IMPLIES(visitor.runtime_methods_need_deopt_check_, thread->IsDeoptCheckRequired());
- if (!visitor.runtime_methods_need_deopt_check_) {
- thread->SetDeoptCheckRequired(false);
- }
CHECK_EQ(visitor.frames_removed_, stack->size());
stack->clear();
}
@@ -1530,18 +1521,83 @@ DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod*
return DeoptimizationMethodType::kDefault;
}
-JValue Instrumentation::GetReturnValue(ArtMethod* method,
- bool* is_ref,
- uint64_t* gpr_result,
- uint64_t* fpr_result) {
+// Try to get the shorty of a runtime method if it's an invocation stub.
+static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
+ char shorty = 'V';
+ StackVisitor::WalkStack(
+ [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
+ ArtMethod* m = stack_visitor->GetMethod();
+ if (m == nullptr || m->IsRuntimeMethod()) {
+ return true;
+ }
+ // The first Java method.
+ if (m->IsNative()) {
+ // Use JNI method's shorty for the jni stub.
+ shorty = m->GetShorty()[0];
+ } else if (m->IsProxyMethod()) {
+ // Proxy method just invokes its proxied method via
+ // art_quick_proxy_invoke_handler.
+ shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
+ } else {
+ const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
+ if (instr.IsInvoke()) {
+ uint16_t method_index = static_cast<uint16_t>(instr.VRegB());
+ const DexFile* dex_file = m->GetDexFile();
+ if (interpreter::IsStringInit(dex_file, method_index)) {
+ // Invoking string init constructor is turned into invoking
+ // StringFactory.newStringFromChars() which returns a string.
+ shorty = 'L';
+ } else {
+ shorty = dex_file->GetMethodShorty(method_index)[0];
+ }
+
+ } else {
+ // It could be that a non-invoke opcode invokes a stub, which in turn
+ // invokes Java code. In such cases, we should never expect a return
+ // value from the stub.
+ }
+ }
+ // Stop stack walking since we've seen a Java frame.
+ return false;
+ },
+ thread,
+ /* context= */ nullptr,
+ art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
+ return shorty;
+}
+
+JValue Instrumentation::GetReturnValue(
+ Thread* self, ArtMethod* method, bool* is_ref, uint64_t* gpr_result, uint64_t* fpr_result) {
uint32_t length;
const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
+ char return_shorty;
// Runtime method does not call into MethodExitEvent() so there should not be
// suspension point below.
ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
- DCHECK(!method->IsRuntimeMethod());
- char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
+ if (method->IsRuntimeMethod()) {
+ Runtime* runtime = Runtime::Current();
+ if (method != runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit) &&
+ method != runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck)) {
+ // If the caller is at an invocation point and the runtime method is not
+ // for clinit, we need to pass return results to the caller.
+ // We need the correct shorty to decide whether we need to pass the return
+ // result for deoptimization below.
+ return_shorty = GetRuntimeMethodShorty(self);
+ } else {
+ // Some runtime methods such as allocations, unresolved field getters, etc.
+ // have return value. We don't need to set return_value since MethodExitEvent()
+ // below isn't called for runtime methods. Deoptimization doesn't need the
+ // value either since the dex instruction will be re-executed by the
+ // interpreter, except these two cases:
+ // (1) For an invoke, which is handled above to get the correct shorty.
+ // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
+ // idempotent. However there is no return value for it anyway.
+ return_shorty = 'V';
+ }
+ } else {
+ return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
+ }
*is_ref = return_shorty == '[' || return_shorty == 'L';
JValue return_value;
@@ -1555,127 +1611,27 @@ JValue Instrumentation::GetReturnValue(ArtMethod* method,
return return_value;
}
-bool Instrumentation::PushDeoptContextIfNeeded(Thread* self,
- DeoptimizationMethodType deopt_type,
- bool is_ref,
- const JValue& return_value)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- if (self->IsExceptionPending()) {
- return false;
- }
-
- ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
- DCHECK(sp != nullptr && (*sp)->IsRuntimeMethod());
- if (!ShouldDeoptimizeCaller(self, sp)) {
- return false;
- }
-
- // TODO(mythria): The current deopt behaviour is we just re-execute the
- // alloc instruction so we don't need the return value. For instrumentation
- // related deopts, we actually don't need to and can use the result we got
- // here. Since this is a debug only feature it is not very important but
- // consider reusing the result in future.
- self->PushDeoptimizationContext(
- return_value, is_ref, nullptr, /* from_code= */ false, deopt_type);
- self->SetException(Thread::GetDeoptimizationException());
- return true;
-}
-
-void Instrumentation::DeoptimizeIfNeeded(Thread* self,
- ArtMethod** sp,
- DeoptimizationMethodType type,
- JValue return_value,
- bool is_reference) {
- if (self->IsAsyncExceptionPending() || ShouldDeoptimizeCaller(self, sp)) {
- self->PushDeoptimizationContext(return_value,
- is_reference,
- nullptr,
- /* from_code= */ false,
- type);
- artDeoptimize(self);
- }
-}
-
-bool Instrumentation::NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method) {
- return (method != nullptr) &&
- (InterpreterStubsInstalled() ||
- IsDeoptimized(method) ||
- self->IsForceInterpreter() ||
- // NB Since structurally obsolete compiled methods might have the offsets of
- // methods/fields compiled in we need to go back to interpreter whenever we hit
- // them.
- method->GetDeclaringClass()->IsObsoleteObject() ||
- Dbg::IsForcedInterpreterNeededForUpcall(self, method));
-}
-
-bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) {
- ArtMethod* runtime_method = *sp;
- DCHECK(runtime_method->IsRuntimeMethod());
- QuickMethodFrameInfo frame_info = Runtime::Current()->GetRuntimeMethodFrameInfo(runtime_method);
-
- uintptr_t caller_sp = reinterpret_cast<uintptr_t>(sp) + frame_info.FrameSizeInBytes();
- ArtMethod* caller = *(reinterpret_cast<ArtMethod**>(caller_sp));
- uintptr_t caller_pc_addr = reinterpret_cast<uintptr_t>(sp) + frame_info.GetReturnPcOffset();
- uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(caller_pc_addr);
-
- if (caller == nullptr ||
- caller->IsNative() ||
- caller_pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
- // If caller_pc is QuickInstrumentationExit then deoptimization will be handled by the
- // instrumentation exit trampoline so we don't need to handle deoptimizations here.
- // We need to check for a deoptimization here because when a redefinition happens it is
- // not safe to use any compiled code because the field offsets might change. For native
- // methods, we don't embed any field offsets so no need to check for a deoptimization.
- // If the caller is null we don't need to do anything. This can happen when the caller
- // is being interpreted by the switch interpreter (when called from
- // artQuickToInterpreterBridge) / during shutdown / early startup.
- return false;
- }
-
- if (!Runtime::Current()->IsAsyncDeoptimizeable(caller_pc)) {
- LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
- << caller->PrettyMethod();
- return false;
- }
-
- if (NeedsSlowInterpreterForMethod(self, caller)) {
- return true;
- }
-
- // Non java debuggable apps don't support redefinition and hence it isn't required to check if
- // frame needs to be deoptimized.
- if (!Runtime::Current()->IsJavaDebuggable()) {
- return false;
- }
-
+bool Instrumentation::ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor) {
bool should_deoptimize_frame = false;
- const OatQuickMethodHeader* header = caller->GetOatQuickMethodHeader(caller_pc);
+ const OatQuickMethodHeader* header = visitor.GetCurrentOatQuickMethodHeader();
if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
- DCHECK(header->IsOptimized());
- uint8_t* should_deopt_flag_addr =
- reinterpret_cast<uint8_t*>(caller_sp) + header->GetShouldDeoptimizeFlagOffset();
- if ((*should_deopt_flag_addr & static_cast<uint8_t>(DeoptimizeFlagValue::kDebug)) != 0) {
+ uint8_t should_deopt_flag = visitor.GetShouldDeoptimizeFlag();
+ // DeoptimizeFlag could be set for debugging or for CHA invalidations.
+ // Deoptimize here only if it was requested for debugging. CHA
+ // invalidations are handled in the JITed code.
+ if ((should_deopt_flag & static_cast<uint8_t>(DeoptimizeFlagValue::kDebug)) != 0) {
should_deoptimize_frame = true;
}
}
- return should_deoptimize_frame;
-}
-
-bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, const NthCallerVisitor& visitor) {
- bool should_deoptimize_frame = false;
- if (visitor.caller != nullptr) {
- const OatQuickMethodHeader* header = visitor.GetCurrentOatQuickMethodHeader();
- if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
- uint8_t should_deopt_flag = visitor.GetShouldDeoptimizeFlag();
- // DeoptimizeFlag could be set for debugging or for CHA invalidations.
- // Deoptimize here only if it was requested for debugging. CHA
- // invalidations are handled in the JITed code.
- if ((should_deopt_flag & static_cast<uint8_t>(DeoptimizeFlagValue::kDebug)) != 0) {
- should_deoptimize_frame = true;
- }
- }
- }
- return NeedsSlowInterpreterForMethod(self, visitor.caller) || should_deoptimize_frame;
+ return (visitor.caller != nullptr) &&
+ (InterpreterStubsInstalled() || IsDeoptimized(visitor.caller) ||
+ self->IsForceInterpreter() ||
+ // NB Since structurally obsolete compiled methods might have the offsets of
+ // methods/fields compiled in we need to go back to interpreter whenever we hit
+ // them.
+ visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
+ Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller) ||
+ should_deoptimize_frame);
}
TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
@@ -1700,19 +1656,19 @@ TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
self->VerifyStack();
ArtMethod* method = instrumentation_frame.method_;
- DCHECK(!method->IsRuntimeMethod());
bool is_ref;
- JValue return_value = GetReturnValue(method, &is_ref, gpr_result, fpr_result);
+ JValue return_value = GetReturnValue(self, method, &is_ref, gpr_result, fpr_result);
StackHandleScope<1> hs(self);
MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
if (is_ref) {
// Take a handle to the return value so we won't lose it if we suspend.
- DCHECK_ALIGNED(return_value.GetL(), kObjectAlignment);
+ // FIXME: The `is_ref` is often guessed wrong, so even object aligment
+ // assertion would fail for some tests. See b/204766614 .
+ // DCHECK_ALIGNED(return_value.GetL(), kObjectAlignment);
res.Assign(return_value.GetL());
}
- if (!instrumentation_frame.interpreter_entry_) {
- DCHECK(!method->IsRuntimeMethod());
+ if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
// Note that sending the event may change the contents of *return_pc_addr.
MethodExitEvent(self, instrumentation_frame.method_, OptionalFrame{}, return_value);
}
@@ -1724,7 +1680,7 @@ TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
// Check if we forced all threads to deoptimize in the time between this frame being created and
// now.
bool should_deoptimize_frame = instrumentation_frame.force_deopt_id_ != current_force_deopt_id_;
- bool deoptimize = ShouldDeoptimizeCaller(self, visitor) || should_deoptimize_frame;
+ bool deoptimize = ShouldDeoptimizeMethod(self, visitor) || should_deoptimize_frame;
if (is_ref) {
// Restore the return value if it's a reference since it might have moved.
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index c63e73eeea..b1631091ae 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -31,7 +31,6 @@
#include "base/macros.h"
#include "base/safe_map.h"
#include "gc_root.h"
-#include "jvalue.h"
#include "offsets.h"
namespace art {
@@ -480,29 +479,12 @@ class Instrumentation {
void ExceptionHandledEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
REQUIRES_SHARED(Locks::mutator_lock_);
- JValue GetReturnValue(ArtMethod* method, bool* is_ref, uint64_t* gpr_result, uint64_t* fpr_result)
- REQUIRES_SHARED(Locks::mutator_lock_);
- bool PushDeoptContextIfNeeded(Thread* self,
- DeoptimizationMethodType deopt_type,
- bool is_ref,
- const JValue& result) REQUIRES_SHARED(Locks::mutator_lock_);
- void DeoptimizeIfNeeded(Thread* self,
- ArtMethod** sp,
- DeoptimizationMethodType type,
- JValue result,
- bool is_ref) REQUIRES_SHARED(Locks::mutator_lock_);
- // TODO(mythria): Update uses of ShouldDeoptimizeCaller that takes a visitor by a method that
- // doesn't need to walk the stack. This is used on method exits to check if the caller needs a
- // deoptimization.
- bool ShouldDeoptimizeCaller(Thread* self, const NthCallerVisitor& visitor)
- REQUIRES_SHARED(Locks::mutator_lock_);
- // This returns if the caller of runtime method requires a deoptimization. This checks both if the
- // method requires a deopt or if this particular frame needs a deopt because of a class
- // redefinition.
- bool ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_);
- // This returns if the specified method requires a deoptimization. This doesn't account if a stack
- // frame involving this method requires a deoptimization.
- bool NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method)
+ JValue GetReturnValue(Thread* self,
+ ArtMethod* method,
+ bool* is_ref,
+ uint64_t* gpr_result,
+ uint64_t* fpr_result) REQUIRES_SHARED(Locks::mutator_lock_);
+ bool ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
// Called when an instrumented method is entered. The intended link register (lr) is saved so
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index dfb9f3e4f2..38c94abf06 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -476,7 +476,6 @@ void EnterInterpreterFromDeoptimize(Thread* self,
const uint32_t dex_pc = shadow_frame->GetDexPC();
uint32_t new_dex_pc = dex_pc;
if (UNLIKELY(self->IsExceptionPending())) {
- DCHECK(self->GetException() != Thread::GetDeoptimizationException());
// If we deoptimize from the QuickExceptionHandler, we already reported the exception throw
// event to the instrumentation. Skip throw listeners for the first frame. The deopt check
// should happen after the throw listener is called as throw listener can trigger a
diff --git a/runtime/oat.h b/runtime/oat.h
index 14b389d670..462d41cdf0 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,8 +32,8 @@ class InstructionSetFeatures;
class PACKED(4) OatHeader {
public:
static constexpr std::array<uint8_t, 4> kOatMagic { { 'o', 'a', 't', '\n' } };
- // Last oat version changed reason: Update deoptimization from runtime methods.
- static constexpr std::array<uint8_t, 4> kOatVersion { { '2', '2', '6', '\0' } };
+ // Last oat version changed reason: Revert^4 "bss support for inlining BCP into non-BCP".
+ static constexpr std::array<uint8_t, 4> kOatVersion { { '2', '2', '5', '\0' } };
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
static constexpr const char* kDebuggableKey = "debuggable";
diff --git a/runtime/oat_quick_method_header.h b/runtime/oat_quick_method_header.h
index 0bbf23fc27..e347588f3e 100644
--- a/runtime/oat_quick_method_header.h
+++ b/runtime/oat_quick_method_header.h
@@ -28,11 +28,6 @@ namespace art {
class ArtMethod;
-// Size in bytes of the should_deoptimize flag on stack.
-// We just need 4 bytes for our purpose regardless of the architecture. Frame size
-// calculation will automatically do alignment for the final frame size.
-static constexpr size_t kShouldDeoptimizeFlagSize = 4;
-
// OatQuickMethodHeader precedes the raw code chunk generated by the compiler.
class PACKED(4) OatQuickMethodHeader {
public:
@@ -150,17 +145,6 @@ class PACKED(4) OatQuickMethodHeader {
return CodeInfo::DecodeFrameInfo(GetOptimizedCodeInfoPtr());
}
- size_t GetShouldDeoptimizeFlagOffset() const {
- DCHECK(IsOptimized());
- QuickMethodFrameInfo frame_info = GetFrameInfo();
- size_t frame_size = frame_info.FrameSizeInBytes();
- size_t core_spill_size =
- POPCOUNT(frame_info.CoreSpillMask()) * GetBytesPerGprSpillLocation(kRuntimeISA);
- size_t fpu_spill_size =
- POPCOUNT(frame_info.FpSpillMask()) * GetBytesPerFprSpillLocation(kRuntimeISA);
- return frame_size - core_spill_size - fpu_spill_size - kShouldDeoptimizeFlagSize;
- }
-
uintptr_t ToNativeQuickPc(ArtMethod* method,
const uint32_t dex_pc,
bool is_for_catch_handler,
diff --git a/runtime/stack.h b/runtime/stack.h
index bfda57b136..1b00b54acb 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -58,6 +58,11 @@ enum VRegKind {
};
std::ostream& operator<<(std::ostream& os, VRegKind rhs);
+// Size in bytes of the should_deoptimize flag on stack.
+// We just need 4 bytes for our purpose regardless of the architecture. Frame size
+// calculation will automatically do alignment for the final frame size.
+static constexpr size_t kShouldDeoptimizeFlagSize = 4;
+
/*
* Our current stack layout.
* The Dalvik registers come first, followed by the
@@ -301,11 +306,6 @@ class StackVisitor {
return *GetShouldDeoptimizeFlagAddr();
}
- bool IsShouldDeoptimizeFlagForDebugSet() const REQUIRES_SHARED(Locks::mutator_lock_) {
- uint8_t should_deopt_flag = GetShouldDeoptimizeFlag();
- return (should_deopt_flag & static_cast<uint8_t>(DeoptimizeFlagValue::kDebug)) != 0;
- }
-
private:
// Private constructor known in the case that num_frames_ has already been computed.
StackVisitor(Thread* thread,
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 97cfb7a30c..78ba26dec0 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -272,7 +272,6 @@ void Thread::PushDeoptimizationContext(const JValue& return_value,
ObjPtr<mirror::Throwable> exception,
bool from_code,
DeoptimizationMethodType method_type) {
- DCHECK(exception != Thread::GetDeoptimizationException());
DeoptimizationContextRecord* record = new DeoptimizationContextRecord(
return_value,
is_reference,
@@ -3697,9 +3696,6 @@ void Thread::QuickDeliverException() {
ObjPtr<mirror::Throwable> exception = GetException();
CHECK(exception != nullptr);
if (exception == GetDeoptimizationException()) {
- // This wasn't a real exception, so just clear it here. If there was an actual exception it
- // will be recorded in the DeoptimizationContext and it will be restored later.
- ClearException();
artDeoptimize(this);
UNREACHABLE();
}
diff --git a/runtime/thread.h b/runtime/thread.h
index b32e3c2637..dd8b061b95 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -766,13 +766,6 @@ class Thread {
OFFSETOF_MEMBER(tls_32bit_sized_values, is_gc_marking));
}
- template <PointerSize pointer_size>
- static constexpr ThreadOffset<pointer_size> DeoptCheckRequiredOffset() {
- return ThreadOffset<pointer_size>(
- OFFSETOF_MEMBER(Thread, tls32_) +
- OFFSETOF_MEMBER(tls_32bit_sized_values, is_deopt_check_required));
- }
-
static constexpr size_t IsGcMarkingSize() {
return sizeof(tls32_.is_gc_marking);
}
@@ -1024,10 +1017,6 @@ class Thread {
void SetIsGcMarkingAndUpdateEntrypoints(bool is_marking);
- bool IsDeoptCheckRequired() const { return tls32_.is_deopt_check_required; }
-
- void SetDeoptCheckRequired(bool flag) { tls32_.is_deopt_check_required = flag; }
-
bool GetWeakRefAccessEnabled() const; // Only safe for current thread.
void SetWeakRefAccessEnabled(bool enabled) {
@@ -1723,7 +1712,6 @@ class Thread {
thread_exit_check_count(0),
is_transitioning_to_runnable(false),
is_gc_marking(false),
- is_deopt_check_required(false),
weak_ref_access_enabled(WeakRefAccessState::kVisiblyEnabled),
disable_thread_flip_count(0),
user_code_suspend_count(0),
@@ -1778,12 +1766,6 @@ class Thread {
// GC roots.
bool32_t is_gc_marking;
- // True if we need to check for deoptimization when returning from the runtime functions. This
- // is required only when a class is redefined to prevent executing code that has field offsets
- // embedded. For non-debuggable apps redefinition is not allowed and this flag should always be
- // set to false.
- bool32_t is_deopt_check_required;
-
// Thread "interrupted" status; stays raised until queried or thrown.
Atomic<bool32_t> interrupted;
diff --git a/tools/cpp-define-generator/globals.def b/tools/cpp-define-generator/globals.def
index 8dcc023cee..2572ea6f9b 100644
--- a/tools/cpp-define-generator/globals.def
+++ b/tools/cpp-define-generator/globals.def
@@ -28,7 +28,6 @@
#include "mirror/object_reference.h"
#include "runtime_globals.h"
#include "stack.h"
-#include "entrypoints/quick/callee_save_frame.h"
#endif
ASM_DEFINE(ACCESS_FLAGS_METHOD_IS_NATIVE,
@@ -83,6 +82,3 @@ ASM_DEFINE(STD_MEMORY_ORDER_RELAXED,
std::memory_order_relaxed)
ASM_DEFINE(STACK_OVERFLOW_RESERVED_BYTES,
GetStackOverflowReservedBytes(art::kRuntimeISA))
-ASM_DEFINE(CALLEE_SAVE_EVERYTHING_NUM_CORE_SPILLS,
- art::POPCOUNT(art::RuntimeCalleeSaveFrame::GetCoreSpills(
- art::CalleeSaveType::kSaveEverything)))
diff --git a/tools/cpp-define-generator/thread.def b/tools/cpp-define-generator/thread.def
index d796542a32..bae92009b2 100644
--- a/tools/cpp-define-generator/thread.def
+++ b/tools/cpp-define-generator/thread.def
@@ -37,8 +37,6 @@ ASM_DEFINE(THREAD_INTERPRETER_CACHE_SIZE_SHIFT,
(art::WhichPowerOf2(sizeof(art::InterpreterCache::Entry)) - 2))
ASM_DEFINE(THREAD_IS_GC_MARKING_OFFSET,
art::Thread::IsGcMarkingOffset<art::kRuntimePointerSize>().Int32Value())
-ASM_DEFINE(THREAD_DEOPT_CHECK_REQUIRED_OFFSET,
- art::Thread::DeoptCheckRequiredOffset<art::kRuntimePointerSize>().Int32Value())
ASM_DEFINE(THREAD_LOCAL_ALLOC_STACK_END_OFFSET,
art::Thread::ThreadLocalAllocStackEndOffset<art::kRuntimePointerSize>().Int32Value())
ASM_DEFINE(THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET,