Fix cfi information for x86.
With this change, I can now enable debug malloc fully.
Bug: 19071092
(cherry picked from commit 9f66589f3ca6055501c99333a19b69349b8f8568)
Change-Id: I04d477d0294d67c5e3a9d9147889bdffb9086bd4
diff --git a/runtime/arch/x86/jni_entrypoints_x86.S b/runtime/arch/x86/jni_entrypoints_x86.S
index 5d27e47..aca5a37 100644
--- a/runtime/arch/x86/jni_entrypoints_x86.S
+++ b/runtime/arch/x86/jni_entrypoints_x86.S
@@ -23,6 +23,7 @@
subl LITERAL(8), %esp // align stack
CFI_ADJUST_CFA_OFFSET(8)
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
+ CFI_ADJUST_CFA_OFFSET(4)
call SYMBOL(artFindNativeMethod) // (Thread*)
addl LITERAL(12), %esp // remove argument & padding
CFI_ADJUST_CFA_OFFSET(-12)
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index c9bc977..f6c7649 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1111,7 +1111,7 @@
POP eax // pop arguments
POP ecx
addl LITERAL(4), %esp
- CFI_ADJUST_CFA_OFFSET(-12)
+ CFI_ADJUST_CFA_OFFSET(-4)
SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx // save all registers as basis for long jump context
// Outgoing argument set up
PUSH eax // alignment padding
@@ -1183,8 +1183,8 @@
PUSH eax
#else
pushl MIRROR_OBJECT_CLASS_OFFSET(%edx) // pass arg2 - type of the value to be stored
-#endif
CFI_ADJUST_CFA_OFFSET(4)
+#endif
PUSH ebx // pass arg1 - component type of the array
call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b)
addl LITERAL(16), %esp // pop arguments
@@ -1429,6 +1429,7 @@
call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
movl %eax, %edi // remember code pointer in EDI
addl LITERAL(16), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-16)
test %eax, %eax // if code pointer is null goto deliver pending exception
jz 1f
RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME_AND_JUMP
@@ -1559,6 +1560,7 @@
PUSH eax // Pass Method*.
call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR)
addl LITERAL(28), %esp // Pop arguments upto saved Method*.
+ CFI_ADJUST_CFA_OFFSET(-28)
movl 60(%esp), %edi // Restore edi.
movl %eax, 60(%esp) // Place code* over edi, just under return pc.
movl SYMBOL(art_quick_instrumentation_exit)@GOT(%ebx), %ebx
@@ -1578,11 +1580,13 @@
movl 52(%esp), %ebp // Restore ebp.
movl 56(%esp), %esi // Restore esi.
addl LITERAL(60), %esp // Wind stack back upto code*.
+ CFI_ADJUST_CFA_OFFSET(-60)
ret // Call method (and pop).
END_FUNCTION art_quick_instrumentation_entry
DEFINE_FUNCTION art_quick_instrumentation_exit
pushl LITERAL(0) // Push a fake return PC as there will be none on the stack.
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME ebx, ebx
mov %esp, %ecx // Remember SP
subl LITERAL(8), %esp // Save float return value.
@@ -1611,6 +1615,7 @@
CFI_ADJUST_CFA_OFFSET(-8)
RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
addl LITERAL(4), %esp // Remove fake return pc.
+ CFI_ADJUST_CFA_OFFSET(-4)
jmp *%ecx // Return.
END_FUNCTION art_quick_instrumentation_exit
@@ -1619,7 +1624,7 @@
* will long jump to the upcall with a special exception of -1.
*/
DEFINE_FUNCTION art_quick_deoptimize
- pushl %ebx // Entry point for a jump. Fake that we were called.
+ PUSH ebx // Entry point for a jump. Fake that we were called.
.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path) // Entry point for real calls
// from compiled slow paths.
SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
@@ -1682,8 +1687,8 @@
DEFINE_FUNCTION art_nested_signal_return
SETUP_GOT_NOSAVE ebx // sets %ebx for call into PLT
movl LITERAL(1), %ecx
- pushl %ecx // second arg to longjmp (1)
- pushl %eax // first arg to longjmp (jmp_buf)
+ PUSH ecx // second arg to longjmp (1)
+ PUSH eax // first arg to longjmp (jmp_buf)
call PLT_SYMBOL(longjmp)
int3 // won't get here.
END_FUNCTION art_nested_signal_return