diff options
| -rw-r--r-- | build/Android.common_build.mk | 3 | ||||
| -rw-r--r-- | runtime/arch/arm/arm_sdiv.S | 2 | ||||
| -rw-r--r-- | runtime/arch/arm/asm_support_arm.S | 27 | ||||
| -rw-r--r-- | runtime/arch/arm/portable_entrypoints_arm.S | 2 | ||||
| -rw-r--r-- | runtime/arch/arm/quick_entrypoints_arm.S | 4 | ||||
| -rw-r--r-- | runtime/arch/arm64/asm_support_arm64.S | 16 | ||||
| -rw-r--r-- | runtime/arch/arm64/portable_entrypoints_arm64.S | 2 | ||||
| -rw-r--r-- | runtime/arch/arm64/quick_entrypoints_arm64.S | 4 | ||||
| -rw-r--r-- | runtime/arch/memcmp16.cc | 12 | ||||
| -rw-r--r-- | runtime/arch/memcmp16.h | 13 | ||||
| -rw-r--r-- | runtime/arch/memcmp16_test.cc | 2 | ||||
| -rw-r--r-- | runtime/arch/stub_test.cc | 190 | ||||
| -rw-r--r-- | runtime/arch/x86/asm_support_x86.S | 12 | ||||
| -rw-r--r-- | runtime/arch/x86/portable_entrypoints_x86.S | 2 | ||||
| -rw-r--r-- | runtime/arch/x86/quick_entrypoints_x86.S | 89 | ||||
| -rw-r--r-- | runtime/arch/x86_64/asm_support_x86_64.S | 35 | ||||
| -rw-r--r-- | runtime/arch/x86_64/portable_entrypoints_x86_64.S | 2 | ||||
| -rw-r--r-- | runtime/arch/x86_64/quick_entrypoints_x86_64.S | 72 |
18 files changed, 280 insertions, 209 deletions
diff --git a/build/Android.common_build.mk b/build/Android.common_build.mk index d55f310384..0dcefead38 100644 --- a/build/Android.common_build.mk +++ b/build/Android.common_build.mk @@ -157,7 +157,8 @@ art_cflags := \ -Wno-sign-promo \ -Wno-unused-parameter \ -Wstrict-aliasing \ - -fstrict-aliasing + -fstrict-aliasing \ + -fvisibility=protected ART_TARGET_CLANG_CFLAGS := ART_TARGET_CLANG_CFLAGS_arm := diff --git a/runtime/arch/arm/arm_sdiv.S b/runtime/arch/arm/arm_sdiv.S index 925e428444..babdbf5526 100644 --- a/runtime/arch/arm/arm_sdiv.S +++ b/runtime/arch/arm/arm_sdiv.S @@ -9,7 +9,7 @@ #include "asm_support_arm.S" .section .text -ENTRY CheckForARMSDIVInstruction +ENTRY_NO_HIDE CheckForARMSDIVInstruction mov r1,#1 // depending on the architecture, the assembler will not allow an // sdiv instruction, so we will have to output the bytes directly. diff --git a/runtime/arch/arm/asm_support_arm.S b/runtime/arch/arm/asm_support_arm.S index e1b0ce7e17..a3e3b21c7e 100644 --- a/runtime/arch/arm/asm_support_arm.S +++ b/runtime/arch/arm/asm_support_arm.S @@ -33,6 +33,7 @@ .macro ENTRY name .thumb_func .type \name, #function + .hidden \name // Hide this as a global symbol, so we do not incur plt calls. .global \name /* Cache alignment for function entry */ .balign 16 @@ -41,9 +42,35 @@ .fnstart .endm +.macro ENTRY_NO_HIDE name + .thumb_func + .type \name, #function + .global \name + /* Cache alignment for function entry */ + .balign 16 +\name: + .cfi_startproc + .fnstart +.endm + + .macro ARM_ENTRY name .arm .type \name, #function + .hidden \name // Hide this as a global symbol, so we do not incur plt calls. + .global \name + /* Cache alignment for function entry */ + .balign 16 +\name: + .cfi_startproc + /* Ensure we get a sane starting CFA. */ + .cfi_def_cfa sp,0 + .fnstart +.endm + +.macro ARM_ENTRY_NO_HIDE name + .arm + .type \name, #function .global \name /* Cache alignment for function entry */ .balign 16 diff --git a/runtime/arch/arm/portable_entrypoints_arm.S b/runtime/arch/arm/portable_entrypoints_arm.S index 98d17dc830..3491c18c37 100644 --- a/runtime/arch/arm/portable_entrypoints_arm.S +++ b/runtime/arch/arm/portable_entrypoints_arm.S @@ -138,7 +138,7 @@ ENTRY art_portable_resolution_trampoline END art_portable_resolution_trampoline .extern artPortableToInterpreterBridge -ENTRY art_portable_to_interpreter_bridge +ENTRY_NO_HIDE art_portable_to_interpreter_bridge @ Fake callee save ref and args frame set up, note portable doesn't use callee save frames. @ TODO: just save the registers that are needed in artPortableToInterpreterBridge. push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S index 9f0db8c716..b4194f57bf 100644 --- a/runtime/arch/arm/quick_entrypoints_arm.S +++ b/runtime/arch/arm/quick_entrypoints_arm.S @@ -988,7 +988,7 @@ END art_quick_resolution_trampoline /* * Called to do a generic JNI down-call */ -ENTRY art_quick_generic_jni_trampoline +ENTRY_NO_HIDE art_quick_generic_jni_trampoline SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME str r0, [sp, #0] // Store native ArtMethod* to bottom of stack. @@ -1083,7 +1083,7 @@ ENTRY art_quick_generic_jni_trampoline END art_quick_generic_jni_trampoline .extern artQuickToInterpreterBridge -ENTRY art_quick_to_interpreter_bridge +ENTRY_NO_HIDE art_quick_to_interpreter_bridge SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME mov r1, r9 @ pass Thread::Current mov r2, sp @ pass SP diff --git a/runtime/arch/arm64/asm_support_arm64.S b/runtime/arch/arm64/asm_support_arm64.S index be167faae6..fb49460364 100644 --- a/runtime/arch/arm64/asm_support_arm64.S +++ b/runtime/arch/arm64/asm_support_arm64.S @@ -44,6 +44,16 @@ .macro ENTRY name .type \name, #function + .hidden \name // Hide this as a global symbol, so we do not incur plt calls. + .global \name + /* Cache alignment for function entry */ + .balign 16 +\name: + .cfi_startproc +.endm + +.macro ENTRY_NO_HIDE name + .type \name, #function .global \name /* Cache alignment for function entry */ .balign 16 @@ -62,4 +72,10 @@ END \name .endm +.macro UNIMPLEMENTED_NO_HIDE name + ENTRY_NO_HIDE \name + brk 0 + END \name +.endm + #endif // ART_RUNTIME_ARCH_ARM64_ASM_SUPPORT_ARM64_S_ diff --git a/runtime/arch/arm64/portable_entrypoints_arm64.S b/runtime/arch/arm64/portable_entrypoints_arm64.S index e136885c7e..41711b5c66 100644 --- a/runtime/arch/arm64/portable_entrypoints_arm64.S +++ b/runtime/arch/arm64/portable_entrypoints_arm64.S @@ -25,4 +25,4 @@ UNIMPLEMENTED art_portable_proxy_invoke_handler UNIMPLEMENTED art_portable_resolution_trampoline -UNIMPLEMENTED art_portable_to_interpreter_bridge +UNIMPLEMENTED_NO_HIDE art_portable_to_interpreter_bridge diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S index ab9035ae45..2a19e27b04 100644 --- a/runtime/arch/arm64/quick_entrypoints_arm64.S +++ b/runtime/arch/arm64/quick_entrypoints_arm64.S @@ -1435,7 +1435,7 @@ END art_quick_resolution_trampoline /* * Called to do a generic JNI down-call */ -ENTRY art_quick_generic_jni_trampoline +ENTRY_NO_HIDE art_quick_generic_jni_trampoline SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL str x0, [sp, #0] // Store native ArtMethod* to bottom of stack. @@ -1531,7 +1531,7 @@ END art_quick_generic_jni_trampoline * x0 = method being called/to bridge to. * x1..x7, d0..d7 = arguments to that method. */ -ENTRY art_quick_to_interpreter_bridge +ENTRY_NO_HIDE art_quick_to_interpreter_bridge SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. // x0 will contain mirror::ArtMethod* method. diff --git a/runtime/arch/memcmp16.cc b/runtime/arch/memcmp16.cc index 7928085221..5a3e73eebc 100644 --- a/runtime/arch/memcmp16.cc +++ b/runtime/arch/memcmp16.cc @@ -28,4 +28,16 @@ int32_t memcmp16_generic_static(const uint16_t* s0, const uint16_t* s1, size_t c return 0; } +namespace art { + +namespace testing { + +int32_t MemCmp16Testing(const uint16_t* s0, const uint16_t* s1, size_t count) { + return MemCmp16(s0, s1, count); +} + +} + +} // namespace art + #pragma GCC diagnostic warning "-Wunused-function" diff --git a/runtime/arch/memcmp16.h b/runtime/arch/memcmp16.h index 14dc1e3880..4b9fb8eff6 100644 --- a/runtime/arch/memcmp16.h +++ b/runtime/arch/memcmp16.h @@ -50,4 +50,17 @@ static inline int32_t MemCmp16(const uint16_t* s0, const uint16_t* s1, size_t co extern "C" int32_t memcmp16_generic_static(const uint16_t* s0, const uint16_t* s1, size_t count); #endif +namespace art { + +namespace testing { + +// A version that is exposed and relatively "close to the metal," so that memcmp16_test can do +// some reasonable testing. Without this, as __memcmp16 is hidden, the test cannot access the +// implementation. +int32_t MemCmp16Testing(const uint16_t* s0, const uint16_t* s1, size_t count); + +} + +} // namespace art + #endif // ART_RUNTIME_ARCH_MEMCMP16_H_ diff --git a/runtime/arch/memcmp16_test.cc b/runtime/arch/memcmp16_test.cc index 5747c67ea3..5ba06f82a2 100644 --- a/runtime/arch/memcmp16_test.cc +++ b/runtime/arch/memcmp16_test.cc @@ -139,7 +139,7 @@ static void CheckSeparate(size_t max_length, size_t min_length) { size_t mod_min = c1_mod < c2_mod ? c1_mod : c2_mod; int32_t expected = memcmp16_compare(s1_pot_unaligned, s2_pot_unaligned, mod_min); - int32_t computed = MemCmp16(s1_pot_unaligned, s2_pot_unaligned, mod_min); + int32_t computed = art::testing::MemCmp16Testing(s1_pot_unaligned, s2_pot_unaligned, mod_min); ASSERT_EQ(expected, computed) << "Run " << round << ", c1=" << count1 << " c2=" << count2; diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc index 25f9a5a251..864e3f7ad0 100644 --- a/runtime/arch/stub_test.cc +++ b/runtime/arch/stub_test.cc @@ -17,6 +17,7 @@ #include <cstdio> #include "common_runtime_test.h" +#include "entrypoints/quick/quick_entrypoints_enum.h" #include "mirror/art_field-inl.h" #include "mirror/art_method-inl.h" #include "mirror/class-inl.h" @@ -543,15 +544,21 @@ class StubTest : public CommonRuntimeTest { #endif } + static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) { + int32_t offset; +#ifdef __LP64__ + offset = GetThreadOffset<8>(entrypoint).Int32Value(); +#else + offset = GetThreadOffset<4>(entrypoint).Int32Value(); +#endif + return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset); + } + protected: size_t fp_result; }; -#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_memcpy(void); -#endif - TEST_F(StubTest, Memcpy) { #if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) Thread* self = Thread::Current(); @@ -564,7 +571,7 @@ TEST_F(StubTest, Memcpy) { } Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]), - 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self); + 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self); EXPECT_EQ(orig[0], trg[0]); @@ -589,15 +596,14 @@ TEST_F(StubTest, Memcpy) { #endif } -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_lock_object(void); -#endif - TEST_F(StubTest, LockObject) { #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) static constexpr size_t kThinLockLoops = 100; Thread* self = Thread::Current(); + + const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject); + // Create an object ScopedObjectAccess soa(self); // garbage is created during ClassLinker::Init @@ -609,8 +615,7 @@ TEST_F(StubTest, LockObject) { LockWord::LockState old_state = lock.GetState(); EXPECT_EQ(LockWord::LockState::kUnlocked, old_state); - Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_lock_object), self); + Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self); LockWord lock_after = obj->GetLockWord(false); LockWord::LockState new_state = lock_after.GetState(); @@ -618,8 +623,7 @@ TEST_F(StubTest, LockObject) { EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero for (size_t i = 1; i < kThinLockLoops; ++i) { - Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_lock_object), self); + Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self); // Check we're at lock count i @@ -635,8 +639,7 @@ TEST_F(StubTest, LockObject) { obj2->IdentityHashCode(); - Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_lock_object), self); + Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self); LockWord lock_after2 = obj2->GetLockWord(false); LockWord::LockState new_state2 = lock_after2.GetState(); @@ -665,17 +668,15 @@ class RandGen { }; -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_lock_object(void); -extern "C" void art_quick_unlock_object(void); -#endif - // NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo. static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS { #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) static constexpr size_t kThinLockLoops = 100; Thread* self = Thread::Current(); + + const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject); + const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject); // Create an object ScopedObjectAccess soa(self); // garbage is created during ClassLinker::Init @@ -687,8 +688,7 @@ static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS { LockWord::LockState old_state = lock.GetState(); EXPECT_EQ(LockWord::LockState::kUnlocked, old_state); - test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self); + test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self); // This should be an illegal monitor state. EXPECT_TRUE(self->IsExceptionPending()); self->ClearException(); @@ -697,15 +697,13 @@ static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS { LockWord::LockState new_state = lock_after.GetState(); EXPECT_EQ(LockWord::LockState::kUnlocked, new_state); - test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_lock_object), self); + test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self); LockWord lock_after2 = obj->GetLockWord(false); LockWord::LockState new_state2 = lock_after2.GetState(); EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2); - test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self); + test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self); LockWord lock_after3 = obj->GetLockWord(false); LockWord::LockState new_state3 = lock_after3.GetState(); @@ -759,12 +757,12 @@ static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS { } if (lock) { - test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_lock_object), self); + test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object, + self); counts[index]++; } else { test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self); + art_quick_unlock_object, self); counts[index]--; } @@ -795,8 +793,8 @@ static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS { size_t index = kNumberOfLocks - 1 - i; size_t count = counts[index]; while (count > 0) { - test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self); + test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object, + self); count--; } @@ -825,6 +823,9 @@ extern "C" void art_quick_check_cast(void); TEST_F(StubTest, CheckCast) { #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) Thread* self = Thread::Current(); + + const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast); + // Find some classes. ScopedObjectAccess soa(self); // garbage is created during ClassLinker::Init @@ -838,24 +839,24 @@ TEST_F(StubTest, CheckCast) { EXPECT_FALSE(self->IsExceptionPending()); Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_check_cast), self); + art_quick_check_cast, self); EXPECT_FALSE(self->IsExceptionPending()); Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_check_cast), self); + art_quick_check_cast, self); EXPECT_FALSE(self->IsExceptionPending()); Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_check_cast), self); + art_quick_check_cast, self); EXPECT_FALSE(self->IsExceptionPending()); // TODO: Make the following work. But that would require correct managed frames. Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_check_cast), self); + art_quick_check_cast, self); EXPECT_TRUE(self->IsExceptionPending()); self->ClearException(); @@ -868,16 +869,16 @@ TEST_F(StubTest, CheckCast) { } -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_aput_obj_with_null_and_bound_check(void); -// Do not check non-checked ones, we'd need handlers and stuff... -#endif - TEST_F(StubTest, APutObj) { TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING(); #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) Thread* self = Thread::Current(); + + // Do not check non-checked ones, we'd need handlers and stuff... + const uintptr_t art_quick_aput_obj_with_null_and_bound_check = + StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck); + // Create an object ScopedObjectAccess soa(self); // garbage is created during ClassLinker::Init @@ -907,25 +908,25 @@ TEST_F(StubTest, APutObj) { EXPECT_FALSE(self->IsExceptionPending()); Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(str_obj.Get(), array->Get(0)); Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(str_obj.Get(), array->Get(1)); Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(str_obj.Get(), array->Get(2)); Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(str_obj.Get(), array->Get(3)); @@ -933,25 +934,25 @@ TEST_F(StubTest, APutObj) { // 1.2) Assign null to array[0..3] Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(nullptr, array->Get(0)); Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(nullptr, array->Get(1)); Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(nullptr, array->Get(2)); Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_FALSE(self->IsExceptionPending()); EXPECT_EQ(nullptr, array->Get(3)); @@ -972,7 +973,7 @@ TEST_F(StubTest, APutObj) { Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1), reinterpret_cast<size_t>(str_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_TRUE(self->IsExceptionPending()); self->ClearException(); @@ -980,7 +981,7 @@ TEST_F(StubTest, APutObj) { // 2.3) Index > 0 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_TRUE(self->IsExceptionPending()); self->ClearException(); @@ -988,7 +989,7 @@ TEST_F(StubTest, APutObj) { // 3) Failure cases (obj into str[]) Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self); + art_quick_aput_obj_with_null_and_bound_check, self); EXPECT_TRUE(self->IsExceptionPending()); self->ClearException(); @@ -1024,7 +1025,7 @@ TEST_F(StubTest, AllocObject) { size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary 0U, - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject), + StubTest::GetEntrypoint(self, kQuickAllocObject), self); EXPECT_FALSE(self->IsExceptionPending()); @@ -1038,7 +1039,7 @@ TEST_F(StubTest, AllocObject) { // We can use nullptr in the second argument as we do not need a method here (not used in // resolved/initialized cases) size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U, - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved), + StubTest::GetEntrypoint(self, kQuickAllocObjectResolved), self); EXPECT_FALSE(self->IsExceptionPending()); @@ -1052,7 +1053,7 @@ TEST_F(StubTest, AllocObject) { // We can use nullptr in the second argument as we do not need a method here (not used in // resolved/initialized cases) size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U, - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized), + StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized), self); EXPECT_FALSE(self->IsExceptionPending()); @@ -1108,7 +1109,7 @@ TEST_F(StubTest, AllocObject) { self->ClearException(); size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U, - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized), + StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized), self); EXPECT_TRUE(self->IsExceptionPending()); self->ClearException(); @@ -1154,7 +1155,7 @@ TEST_F(StubTest, AllocObjectArray) { size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary 10U, - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray), + StubTest::GetEntrypoint(self, kQuickAllocArray), self); EXPECT_FALSE(self->IsExceptionPending()); @@ -1169,7 +1170,7 @@ TEST_F(StubTest, AllocObjectArray) { // We can use nullptr in the second argument as we do not need a method here (not used in // resolved/initialized cases) size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U, - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved), + StubTest::GetEntrypoint(self, kQuickAllocArrayResolved), self); EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr)); EXPECT_NE(reinterpret_cast<size_t>(nullptr), result); @@ -1188,7 +1189,7 @@ TEST_F(StubTest, AllocObjectArray) { { size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), GB, // that should fail... - reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved), + StubTest::GetEntrypoint(self, kQuickAllocArrayResolved), self); EXPECT_TRUE(self->IsExceptionPending()); @@ -1205,10 +1206,6 @@ TEST_F(StubTest, AllocObjectArray) { } -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_string_compareto(void); -#endif - TEST_F(StubTest, StringCompareTo) { TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING(); @@ -1216,6 +1213,9 @@ TEST_F(StubTest, StringCompareTo) { // TODO: Check the "Unresolved" allocation stubs Thread* self = Thread::Current(); + + const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo); + ScopedObjectAccess soa(self); // garbage is created during ClassLinker::Init @@ -1274,7 +1274,7 @@ TEST_F(StubTest, StringCompareTo) { // Test string_compareto x y size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), reinterpret_cast<size_t>(s[y].Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self); + art_quick_string_compareto, self); EXPECT_FALSE(self->IsExceptionPending()); @@ -1306,11 +1306,6 @@ TEST_F(StubTest, StringCompareTo) { } -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_set32_static(void); -extern "C" void art_quick_get32_static(void); -#endif - static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer, StubTest* test) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -1322,13 +1317,13 @@ static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField> test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), static_cast<size_t>(values[i]), 0U, - reinterpret_cast<uintptr_t>(&art_quick_set32_static), + StubTest::GetEntrypoint(self, kQuickSet32Static), self, referrer); size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_get32_static), + StubTest::GetEntrypoint(self, kQuickGet32Static), self, referrer); @@ -1342,11 +1337,6 @@ static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField> } -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_set32_instance(void); -extern "C" void art_quick_get32_instance(void); -#endif - static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer, StubTest* test) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -1358,7 +1348,7 @@ static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtFiel test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), reinterpret_cast<size_t>(obj->Get()), static_cast<size_t>(values[i]), - reinterpret_cast<uintptr_t>(&art_quick_set32_instance), + StubTest::GetEntrypoint(self, kQuickSet32Instance), self, referrer); @@ -1371,7 +1361,7 @@ static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtFiel size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), reinterpret_cast<size_t>(obj->Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_get32_instance), + StubTest::GetEntrypoint(self, kQuickGet32Instance), self, referrer); EXPECT_EQ(res, static_cast<int32_t>(res2)); @@ -1385,8 +1375,6 @@ static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtFiel #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_set_obj_static(void); -extern "C" void art_quick_get_obj_static(void); static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self, mirror::ArtMethod* referrer, StubTest* test) @@ -1394,13 +1382,13 @@ static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* se test->Invoke3WithReferrer(static_cast<size_t>(f_idx), reinterpret_cast<size_t>(val), 0U, - reinterpret_cast<uintptr_t>(&art_quick_set_obj_static), + StubTest::GetEntrypoint(self, kQuickSetObjStatic), self, referrer); size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_get_obj_static), + StubTest::GetEntrypoint(self, kQuickGetObjStatic), self, referrer); @@ -1428,9 +1416,6 @@ static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_set_obj_instance(void); -extern "C" void art_quick_get_obj_instance(void); - static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg, mirror::Object* val, Thread* self, mirror::ArtMethod* referrer, StubTest* test) @@ -1438,14 +1423,14 @@ static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), reinterpret_cast<size_t>(trg), reinterpret_cast<size_t>(val), - reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance), + StubTest::GetEntrypoint(self, kQuickSetObjInstance), self, referrer); size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), reinterpret_cast<size_t>(trg), 0U, - reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance), + StubTest::GetEntrypoint(self, kQuickGetObjInstance), self, referrer); @@ -1476,11 +1461,6 @@ static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtFie // TODO: Complete these tests for 32b architectures. -#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__) -extern "C" void art_quick_set64_static(void); -extern "C" void art_quick_get64_static(void); -#endif - static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer, StubTest* test) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -1491,13 +1471,13 @@ static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField> for (size_t i = 0; i < num_values; ++i) { test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), values[i], - reinterpret_cast<uintptr_t>(&art_quick_set64_static), + StubTest::GetEntrypoint(self, kQuickSet64Static), self, referrer); size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), 0U, 0U, - reinterpret_cast<uintptr_t>(&art_quick_get64_static), + StubTest::GetEntrypoint(self, kQuickGet64Static), self, referrer); @@ -1511,11 +1491,6 @@ static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField> } -#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__) -extern "C" void art_quick_set64_instance(void); -extern "C" void art_quick_get64_instance(void); -#endif - static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer, StubTest* test) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { @@ -1527,7 +1502,7 @@ static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtFiel test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), reinterpret_cast<size_t>(obj->Get()), static_cast<size_t>(values[i]), - reinterpret_cast<uintptr_t>(&art_quick_set64_instance), + StubTest::GetEntrypoint(self, kQuickSet64Instance), self, referrer); @@ -1540,7 +1515,7 @@ static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtFiel size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()), reinterpret_cast<size_t>(obj->Get()), 0U, - reinterpret_cast<uintptr_t>(&art_quick_get64_instance), + StubTest::GetEntrypoint(self, kQuickGet64Instance), self, referrer); EXPECT_EQ(res, static_cast<int64_t>(res2)); @@ -1683,9 +1658,6 @@ TEST_F(StubTest, Fields64) { TestFields(self, this, Primitive::Type::kPrimLong); } -#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) -extern "C" void art_quick_imt_conflict_trampoline(void); -#endif TEST_F(StubTest, IMT) { #if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)) @@ -1716,7 +1688,7 @@ TEST_F(StubTest, IMT) { // Patch up ArrayList.contains. if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) { contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>( - GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge)); + StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge))); } // List @@ -1765,7 +1737,7 @@ TEST_F(StubTest, IMT) { size_t result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline), + StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod.Get(), static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex())); @@ -1782,7 +1754,7 @@ TEST_F(StubTest, IMT) { result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()), - reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline), + StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod.Get(), static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex())); @@ -1795,10 +1767,6 @@ TEST_F(StubTest, IMT) { #endif } -#if defined(__arm__) || defined(__aarch64__) -extern "C" void art_quick_indexof(void); -#endif - TEST_F(StubTest, StringIndexOf) { #if defined(__arm__) || defined(__aarch64__) TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING(); @@ -1848,7 +1816,7 @@ TEST_F(StubTest, StringIndexOf) { // Test string_compareto x y size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start, - reinterpret_cast<uintptr_t>(&art_quick_indexof), self); + StubTest::GetEntrypoint(self, kQuickIndexOf), self); EXPECT_FALSE(self->IsExceptionPending()); diff --git a/runtime/arch/x86/asm_support_x86.S b/runtime/arch/x86/asm_support_x86.S index 96c2c05e35..a578023c4c 100644 --- a/runtime/arch/x86/asm_support_x86.S +++ b/runtime/arch/x86/asm_support_x86.S @@ -112,6 +112,7 @@ #define PLT_SYMBOL(name) _ ## name #endif +// Directive to hide a function symbol. #if defined(__APPLE__) #define ASM_HIDDEN .private_extern #else @@ -125,6 +126,17 @@ END_MACRO MACRO1(DEFINE_FUNCTION, c_name) FUNCTION_TYPE(\c_name, 0) + ASM_HIDDEN VAR(c_name, 0) + .globl VAR(c_name, 0) + ALIGN_FUNCTION_ENTRY +VAR(c_name, 0): + CFI_STARTPROC + // Ensure we get a sane starting CFA. + CFI_DEF_CFA(esp, 4) +END_MACRO + +MACRO1(DEFINE_FUNCTION_NO_HIDE, c_name) + FUNCTION_TYPE(\c_name, 0) .globl VAR(c_name, 0) ALIGN_FUNCTION_ENTRY VAR(c_name, 0): diff --git a/runtime/arch/x86/portable_entrypoints_x86.S b/runtime/arch/x86/portable_entrypoints_x86.S index 5f270f8087..9365795fd6 100644 --- a/runtime/arch/x86/portable_entrypoints_x86.S +++ b/runtime/arch/x86/portable_entrypoints_x86.S @@ -115,7 +115,7 @@ DEFINE_FUNCTION art_portable_resolution_trampoline ret END_FUNCTION art_portable_resolution_trampoline -DEFINE_FUNCTION art_portable_to_interpreter_bridge +DEFINE_FUNCTION_NO_HIDE art_portable_to_interpreter_bridge PUSH ebp // Set up frame. movl %esp, %ebp CFI_DEF_CFA_REGISTER(%ebp) diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S index 084846abf0..2f3e31797d 100644 --- a/runtime/arch/x86/quick_entrypoints_x86.S +++ b/runtime/arch/x86/quick_entrypoints_x86.S @@ -106,7 +106,7 @@ MACRO0(DELIVER_PENDING_EXCEPTION) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) SETUP_GOT_NOSAVE // clobbers ebx (harmless here) - call PLT_SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) + call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) int3 // unreached END_MACRO @@ -121,7 +121,7 @@ MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) SETUP_GOT_NOSAVE // clobbers ebx (harmless here) - call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*, SP) int3 // unreached END_FUNCTION RAW_VAR(c_name, 0) END_MACRO @@ -137,7 +137,7 @@ MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass arg1 SETUP_GOT_NOSAVE // clobbers ebx (harmless here) - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) int3 // unreached END_FUNCTION RAW_VAR(c_name, 0) END_MACRO @@ -153,7 +153,7 @@ MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) PUSH ecx // pass arg2 PUSH eax // pass arg1 SETUP_GOT_NOSAVE // clobbers ebx (harmless here) - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP) int3 // unreached END_FUNCTION RAW_VAR(c_name, 0) END_MACRO @@ -161,7 +161,6 @@ END_MACRO /* * Called by managed code to create and deliver a NullPointerException. */ - ASM_HIDDEN art_quick_throw_null_pointer_exception NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode /* @@ -189,7 +188,6 @@ ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFr * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds * index, arg2 holds limit. */ - ASM_HIDDEN art_quick_throw_array_bounds TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode /* @@ -231,7 +229,7 @@ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) movl %edx, %edi // save code pointer in EDI addl MACRO_LITERAL(36), %esp // Pop arguments skip eax CFI_ADJUST_CFA_OFFSET(-36) @@ -253,7 +251,6 @@ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) END_FUNCTION RAW_VAR(c_name, 0) END_MACRO - ASM_HIDDEN art_quick_invoke_interface_trampoline INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck @@ -328,7 +325,7 @@ MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro) PUSH edx // pass SP pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) - call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*, SP) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -347,7 +344,7 @@ MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro) pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass arg1 - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -366,7 +363,7 @@ MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro) CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP) addl MACRO_LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -388,7 +385,7 @@ MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro) PUSH ecx // pass arg2 PUSH eax // pass arg1 SETUP_GOT_NOSAVE // clobbers EBX - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP) addl MACRO_LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -569,7 +566,7 @@ DEFINE_FUNCTION art_quick_lock_object pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass object - call PLT_SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP) + call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -603,7 +600,7 @@ DEFINE_FUNCTION art_quick_unlock_object pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass object - call PLT_SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP) + call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -615,7 +612,7 @@ DEFINE_FUNCTION art_quick_is_assignable PUSH eax // alignment padding PUSH ecx // pass arg2 - obj->klass PUSH eax // pass arg1 - checked class - call PLT_SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) + call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) addl LITERAL(12), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-12) ret @@ -626,7 +623,7 @@ DEFINE_FUNCTION art_quick_check_cast PUSH eax // alignment padding PUSH ecx // pass arg2 - obj->klass PUSH eax // pass arg1 - checked class - call PLT_SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) + call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) testl %eax, %eax jz 1f // jump forward if not assignable addl LITERAL(12), %esp // pop arguments @@ -645,7 +642,7 @@ DEFINE_FUNCTION art_quick_check_cast CFI_ADJUST_CFA_OFFSET(4) PUSH ecx // pass arg2 PUSH eax // pass arg1 - call PLT_SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP) + call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP) int3 // unreached END_FUNCTION art_quick_check_cast @@ -660,7 +657,6 @@ DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check jmp SYMBOL(art_quick_throw_null_pointer_exception) END_FUNCTION art_quick_aput_obj_with_null_and_bound_check - ASM_HIDDEN art_quick_aput_obj_with_bound_check DEFINE_FUNCTION art_quick_aput_obj_with_bound_check movl ARRAY_LENGTH_OFFSET(%eax), %ebx cmpl %ebx, %ecx @@ -670,7 +666,6 @@ DEFINE_FUNCTION art_quick_aput_obj_with_bound_check jmp SYMBOL(art_quick_throw_array_bounds) END_FUNCTION art_quick_aput_obj_with_bound_check - ASM_HIDDEN art_quick_aput_obj DEFINE_FUNCTION art_quick_aput_obj test %edx, %edx // store of null jz .Ldo_aput_null @@ -697,7 +692,7 @@ DEFINE_FUNCTION art_quick_aput_obj CFI_ADJUST_CFA_OFFSET(4) PUSH ebx // pass arg1 - component type of the array SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) + call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) testl %eax, %eax @@ -722,7 +717,7 @@ DEFINE_FUNCTION art_quick_aput_obj CFI_ADJUST_CFA_OFFSET(4) PUSH edx // pass arg2 - value PUSH eax // pass arg1 - array - call PLT_SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP) + call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP) int3 // unreached END_FUNCTION art_quick_aput_obj @@ -744,7 +739,7 @@ DEFINE_FUNCTION art_quick_d2l PUSH ecx // pass arg2 a.hi PUSH eax // pass arg1 a.lo SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(art_d2l) // (jdouble a) + call SYMBOL(art_d2l) // (jdouble a) addl LITERAL(12), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-12) ret @@ -755,7 +750,7 @@ DEFINE_FUNCTION art_quick_f2l CFI_ADJUST_CFA_OFFSET(8) SETUP_GOT_NOSAVE // clobbers EBX PUSH eax // pass arg1 a - call PLT_SYMBOL(art_f2l) // (jfloat a) + call SYMBOL(art_f2l) // (jfloat a) addl LITERAL(12), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-12) ret @@ -769,7 +764,7 @@ DEFINE_FUNCTION art_quick_ldiv PUSH ecx // pass arg2 a.hi PUSH eax // pass arg1 a.lo SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artLdiv) // (jlong a, jlong b) + call SYMBOL(artLdiv) // (jlong a, jlong b) addl LITERAL(28), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-28) ret @@ -783,7 +778,7 @@ DEFINE_FUNCTION art_quick_lmod PUSH ecx // pass arg2 a.hi PUSH eax // pass arg1 a.lo SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artLmod) // (jlong a, jlong b) + call SYMBOL(artLmod) // (jlong a, jlong b) addl LITERAL(28), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-28) ret @@ -851,7 +846,7 @@ DEFINE_FUNCTION art_quick_set32_instance PUSH ecx // pass object PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artSet32InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) + call SYMBOL(artSet32InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -871,7 +866,7 @@ DEFINE_FUNCTION art_quick_set64_instance PUSH ecx // pass object PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artSet64InstanceFromCode) // (field_idx, Object*, new_val, Thread*, SP) + call SYMBOL(artSet64InstanceFromCode) // (field_idx, Object*, new_val, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -892,7 +887,7 @@ DEFINE_FUNCTION art_quick_set_obj_instance PUSH ecx // pass object PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artSetObjInstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) + call SYMBOL(artSetObjInstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -912,7 +907,7 @@ DEFINE_FUNCTION art_quick_get32_instance PUSH ecx // pass object PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artGet32InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) + call SYMBOL(artGet32InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -932,7 +927,7 @@ DEFINE_FUNCTION art_quick_get64_instance PUSH ecx // pass object PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) + call SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -952,7 +947,7 @@ DEFINE_FUNCTION art_quick_get_obj_instance PUSH ecx // pass object PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artGetObjInstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) + call SYMBOL(artGetObjInstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -972,7 +967,7 @@ DEFINE_FUNCTION art_quick_set32_static PUSH ecx // pass new_val PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artSet32StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) + call SYMBOL(artSet32StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -993,7 +988,7 @@ DEFINE_FUNCTION art_quick_set64_static PUSH ebx // pass referrer PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP) + call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP) addl LITERAL(32), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-32) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -1013,7 +1008,7 @@ DEFINE_FUNCTION art_quick_set_obj_static PUSH ecx // pass new_val PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artSetObjStaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) + call SYMBOL(artSetObjStaticFromCode) // (field_idx, new_val, referrer, Thread*, SP) addl LITERAL(32), %esp // pop arguments RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO // return or deliver exception @@ -1029,7 +1024,7 @@ DEFINE_FUNCTION art_quick_get32_static PUSH ecx // pass referrer PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artGet32StaticFromCode) // (field_idx, referrer, Thread*, SP) + call SYMBOL(artGet32StaticFromCode) // (field_idx, referrer, Thread*, SP) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -1046,7 +1041,7 @@ DEFINE_FUNCTION art_quick_get64_static PUSH ecx // pass referrer PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artGet64StaticFromCode) // (field_idx, referrer, Thread*, SP) + call SYMBOL(artGet64StaticFromCode) // (field_idx, referrer, Thread*, SP) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -1063,7 +1058,7 @@ DEFINE_FUNCTION art_quick_get_obj_static PUSH ecx // pass referrer PUSH eax // pass field_idx SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artGetObjStaticFromCode) // (field_idx, referrer, Thread*, SP) + call SYMBOL(artGetObjStaticFromCode) // (field_idx, referrer, Thread*, SP) addl LITERAL(16), %esp // pop arguments CFI_ADJUST_CFA_OFFSET(-16) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address @@ -1078,7 +1073,7 @@ DEFINE_FUNCTION art_quick_proxy_invoke_handler PUSH ecx // pass receiver PUSH eax // pass proxy method SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP) + call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP) movd %eax, %xmm0 // place return value also into floating point return value movd %edx, %xmm1 punpckldq %xmm1, %xmm0 @@ -1110,7 +1105,7 @@ DEFINE_FUNCTION art_quick_resolution_trampoline PUSH ecx // pass receiver PUSH eax // pass method SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP) + call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP) movl %eax, %edi // remember code pointer in EDI addl LITERAL(16), %esp // pop arguments test %eax, %eax // if code pointer is NULL goto deliver pending exception @@ -1128,7 +1123,7 @@ DEFINE_FUNCTION art_quick_resolution_trampoline DELIVER_PENDING_EXCEPTION END_FUNCTION art_quick_resolution_trampoline -DEFINE_FUNCTION art_quick_generic_jni_trampoline +DEFINE_FUNCTION_NO_HIDE art_quick_generic_jni_trampoline SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // This also stores the native ArtMethod reference at the bottom of the stack. @@ -1145,7 +1140,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline pushl %ebp // Pass SP (to ArtMethod). pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current(). SETUP_GOT_NOSAVE // Clobbers ebx. - call PLT_SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp) + call SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp) // The C call will have registered the complete save-frame on success. // The result of the call is: @@ -1175,7 +1170,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline pushl %edx // Pass int result. pushl %eax pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current(). - call PLT_SYMBOL(artQuickGenericJniEndTrampoline) + call SYMBOL(artQuickGenericJniEndTrampoline) // Tear down the alloca. movl %ebp, %esp @@ -1209,7 +1204,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline DELIVER_PENDING_EXCEPTION END_FUNCTION art_quick_generic_jni_trampoline -DEFINE_FUNCTION art_quick_to_interpreter_bridge +DEFINE_FUNCTION_NO_HIDE art_quick_to_interpreter_bridge SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save frame mov %esp, %edx // remember SP PUSH eax // alignment padding @@ -1218,7 +1213,7 @@ DEFINE_FUNCTION art_quick_to_interpreter_bridge CFI_ADJUST_CFA_OFFSET(4) PUSH eax // pass method SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) + call SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) movd %eax, %xmm0 // place return value also into floating point return value movd %edx, %xmm1 punpckldq %xmm1, %xmm0 @@ -1245,7 +1240,7 @@ DEFINE_FUNCTION art_quick_instrumentation_entry PUSH ecx // Pass receiver. PUSH eax // Pass Method*. SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR) + call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR) addl LITERAL(28), %esp // Pop arguments upto saved Method*. movl 28(%esp), %edi // Restore edi. movl %eax, 28(%esp) // Place code* over edi, just under return pc. @@ -1280,7 +1275,7 @@ DEFINE_FUNCTION art_quick_instrumentation_exit pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current. CFI_ADJUST_CFA_OFFSET(4) SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_result, fpr_result) + call SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_result, fpr_result) mov %eax, %ecx // Move returned link register. addl LITERAL(32), %esp // Pop arguments. CFI_ADJUST_CFA_OFFSET(-32) @@ -1310,7 +1305,7 @@ DEFINE_FUNCTION art_quick_deoptimize pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current(). CFI_ADJUST_CFA_OFFSET(4) SETUP_GOT_NOSAVE // clobbers EBX - call PLT_SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP) + call SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP) int3 // Unreachable. END_FUNCTION art_quick_deoptimize diff --git a/runtime/arch/x86_64/asm_support_x86_64.S b/runtime/arch/x86_64/asm_support_x86_64.S index 682ba430bd..4ae61a2a65 100644 --- a/runtime/arch/x86_64/asm_support_x86_64.S +++ b/runtime/arch/x86_64/asm_support_x86_64.S @@ -107,6 +107,13 @@ #define PLT_SYMBOL(name) _ ## name #endif +// Directive to hide a function symbol. +#if defined(__APPLE__) + #define ASM_HIDDEN .private_extern +#else + #define ASM_HIDDEN .hidden +#endif + /* Cache alignment for function entry */ MACRO0(ALIGN_FUNCTION_ENTRY) .balign 16 @@ -116,13 +123,20 @@ END_MACRO // for mac builds. MACRO1(DEFINE_FUNCTION, c_name) FUNCTION_TYPE(\c_name, 0) + ASM_HIDDEN VAR(c_name, 0) + .globl VAR(c_name, 0) + ALIGN_FUNCTION_ENTRY +VAR(c_name, 0): + CFI_STARTPROC + // Ensure we get a sane starting CFA. + CFI_DEF_CFA(rsp, 8) +END_MACRO + +MACRO1(DEFINE_FUNCTION_NO_HIDE, c_name) + FUNCTION_TYPE(\c_name, 0) .globl VAR(c_name, 0) ALIGN_FUNCTION_ENTRY VAR(c_name, 0): -#if !defined(__APPLE__) - // Have a local entrypoint that's not globl -VAR(c_name, 0)_local: -#endif CFI_STARTPROC // Ensure we get a sane starting CFA. CFI_DEF_CFA(rsp, 8) @@ -147,6 +161,19 @@ END_MACRO MACRO1(UNIMPLEMENTED,name) FUNCTION_TYPE(\name, 0) + ASM_HIDDEN VAR(c_name, 0) + .globl VAR(name, 0) + ALIGN_FUNCTION_ENTRY +VAR(name, 0): + CFI_STARTPROC + int3 + int3 + CFI_ENDPROC + SIZE(\name, 0) +END_MACRO + +MACRO1(UNIMPLEMENTED_NO_HIDE,name) + FUNCTION_TYPE(\name, 0) .globl VAR(name, 0) ALIGN_FUNCTION_ENTRY VAR(name, 0): diff --git a/runtime/arch/x86_64/portable_entrypoints_x86_64.S b/runtime/arch/x86_64/portable_entrypoints_x86_64.S index 2e9d19a899..7b84d178db 100644 --- a/runtime/arch/x86_64/portable_entrypoints_x86_64.S +++ b/runtime/arch/x86_64/portable_entrypoints_x86_64.S @@ -25,4 +25,4 @@ UNIMPLEMENTED art_portable_proxy_invoke_handler UNIMPLEMENTED art_portable_resolution_trampoline -UNIMPLEMENTED art_portable_to_interpreter_bridge +UNIMPLEMENTED_NO_HIDE art_portable_to_interpreter_bridge diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S index 48bc240dab..f95bd22e9f 100644 --- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S +++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S @@ -232,7 +232,7 @@ MACRO0(DELIVER_PENDING_EXCEPTION) // (Thread*, SP) setup movq %gs:THREAD_SELF_OFFSET, %rdi movq %rsp, %rsi - call PLT_SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) + call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) UNREACHABLE END_MACRO @@ -242,7 +242,7 @@ MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) // Outgoing argument set up movq %rsp, %rsi // pass SP movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*, SP) UNREACHABLE END_FUNCTION VAR(c_name, 0) END_MACRO @@ -253,7 +253,7 @@ MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) // Outgoing argument set up movq %rsp, %rdx // pass SP movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP) UNREACHABLE END_FUNCTION VAR(c_name, 0) END_MACRO @@ -264,7 +264,7 @@ MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) // Outgoing argument set up movq %rsp, %rcx // pass SP movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*, SP) UNREACHABLE END_FUNCTION VAR(c_name, 0) END_MACRO @@ -329,7 +329,7 @@ MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread movq %rsp, %r8 // pass SP - call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, caller method*, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg1, arg2, caller method*, Thread*, SP) // save the code pointer movq %rax, %rdi movq %rdx, %rax @@ -643,7 +643,7 @@ MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro) // Outgoing argument set up movq %rsp, %rsi // pass SP movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) @@ -655,7 +655,7 @@ MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro) // Outgoing argument set up movq %rsp, %rdx // pass SP movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(arg0, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg0, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) @@ -667,7 +667,7 @@ MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro) // Outgoing argument set up movq %rsp, %rcx // pass SP movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(arg0, arg1, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg0, arg1, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) @@ -679,7 +679,7 @@ MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro) // Outgoing argument set up movq %rsp, %r8 // pass SP movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() - call PLT_VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) @@ -692,7 +692,7 @@ MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) // arg0 is in rdi movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() movq %rsp, %rcx // pass SP - call PLT_VAR(cxx_name, 1) // cxx_name(arg0, referrer, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg0, referrer, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) END_FUNCTION VAR(c_name, 0) @@ -705,7 +705,7 @@ MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) // arg0 and arg1 are in rdi/rsi movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() movq %rsp, %r8 // pass SP - call PLT_VAR(cxx_name, 1) // (arg0, arg1, referrer, Thread*, SP) + call VAR(cxx_name, 1) // (arg0, arg1, referrer, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) END_FUNCTION VAR(c_name, 0) @@ -718,7 +718,7 @@ MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) // arg0, arg1, and arg2 are in rdi/rsi/rdx movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current() movq %rsp, %r9 // pass SP - call PLT_VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, referrer, Thread*, SP) + call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, referrer, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address CALL_MACRO(return_macro, 2) // return or deliver exception END_FUNCTION VAR(c_name, 0) @@ -887,7 +887,7 @@ DEFINE_FUNCTION art_quick_lock_object SETUP_REF_ONLY_CALLEE_SAVE_FRAME movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() movq %rsp, %rdx // pass SP - call PLT_SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP) + call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO END_FUNCTION art_quick_lock_object @@ -913,7 +913,7 @@ DEFINE_FUNCTION art_quick_unlock_object SETUP_REF_ONLY_CALLEE_SAVE_FRAME movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() movq %rsp, %rdx // pass SP - call PLT_SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP) + call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO END_FUNCTION art_quick_unlock_object @@ -922,7 +922,7 @@ DEFINE_FUNCTION art_quick_check_cast PUSH rdi // Save args for exc PUSH rsi SETUP_FP_CALLEE_SAVE_FRAME - call PLT_SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) + call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) testq %rax, %rax jz 1f // jump forward if not assignable RESTORE_FP_CALLEE_SAVE_FRAME @@ -937,7 +937,7 @@ DEFINE_FUNCTION art_quick_check_cast SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context mov %rsp, %rcx // pass SP mov %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() - call PLT_SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP) + call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP) int3 // unreached END_FUNCTION art_quick_check_cast @@ -958,8 +958,8 @@ DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check #else testl %edi, %edi // testq %rdi, %rdi - jnz art_quick_aput_obj_with_bound_check_local - jmp art_quick_throw_null_pointer_exception_local + jnz art_quick_aput_obj_with_bound_check + jmp art_quick_throw_null_pointer_exception #endif // __APPLE__ END_FUNCTION art_quick_aput_obj_with_null_and_bound_check @@ -972,12 +972,12 @@ DEFINE_FUNCTION art_quick_aput_obj_with_bound_check movl ARRAY_LENGTH_OFFSET(%edi), %ecx // movl ARRAY_LENGTH_OFFSET(%rdi), %ecx // This zero-extends, so value(%rcx)=value(%ecx) cmpl %ecx, %esi - jb art_quick_aput_obj_local + jb art_quick_aput_obj mov %esi, %edi // mov %rsi, %rdi mov %ecx, %esi // mov %rcx, %rsi - jmp art_quick_throw_array_bounds_local + jmp art_quick_throw_array_bounds #endif // __APPLE__ END_FUNCTION art_quick_aput_obj_with_bound_check @@ -1018,7 +1018,7 @@ DEFINE_FUNCTION art_quick_aput_obj movl CLASS_OFFSET(%edx), %esi // Pass arg2 = value's class. movq %rcx, %rdi // Pass arg1 = array's component type. - call PLT_SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) + call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) // Exception? testq %rax, %rax @@ -1057,7 +1057,7 @@ DEFINE_FUNCTION art_quick_aput_obj movq %gs:THREAD_SELF_OFFSET, %rdx // Pass arg 3 = Thread::Current(). // Pass arg 1 = array. - call PLT_SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP) + call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP) int3 // unreached END_FUNCTION art_quick_aput_obj @@ -1099,7 +1099,7 @@ DEFINE_FUNCTION art_quick_set64_static // field_idx is in rdi movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() movq %rsp, %r8 // pass SP - call PLT_SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP) + call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP) RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address RETURN_IF_EAX_ZERO // return or deliver exception END_FUNCTION art_quick_set64_static @@ -1139,7 +1139,7 @@ DEFINE_FUNCTION art_quick_proxy_invoke_handler movq %rdi, 0(%rsp) movq %gs:THREAD_SELF_OFFSET, %rdx // Pass Thread::Current(). movq %rsp, %rcx // Pass SP. - call PLT_SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP) + call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP) movq %rax, %xmm0 // Copy return value in case of float returns. addq LITERAL(168 + 4*8), %rsp // Pop arguments. CFI_ADJUST_CFA_OFFSET(-168 - 4*8) @@ -1158,7 +1158,7 @@ DEFINE_FUNCTION art_quick_imt_conflict_trampoline movl 8(%rsp), %edi // load caller Method* movl METHOD_DEX_CACHE_METHODS_OFFSET(%rdi), %edi // load dex_cache_resolved_methods movl OBJECT_ARRAY_DATA_OFFSET(%rdi, %rax, 4), %edi // load the target method - jmp art_quick_invoke_interface_trampoline_local + jmp art_quick_invoke_interface_trampoline #endif // __APPLE__ END_FUNCTION art_quick_imt_conflict_trampoline @@ -1166,7 +1166,7 @@ DEFINE_FUNCTION art_quick_resolution_trampoline SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME movq %gs:THREAD_SELF_OFFSET, %rdx movq %rsp, %rcx - call PLT_SYMBOL(artQuickResolutionTrampoline) // (called, receiver, Thread*, SP) + call SYMBOL(artQuickResolutionTrampoline) // (called, receiver, Thread*, SP) movq %rax, %r10 // Remember returned code pointer in R10. movq (%rsp), %rdi // Load called method into RDI. RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME @@ -1254,7 +1254,7 @@ END_FUNCTION art_quick_resolution_trampoline /* * Called to do a generic JNI down-call */ -DEFINE_FUNCTION art_quick_generic_jni_trampoline +DEFINE_FUNCTION_NO_HIDE art_quick_generic_jni_trampoline // Save callee and GPR args, mixed together to agree with core spills bitmap. PUSH r15 // Callee save. PUSH r14 // Callee save. @@ -1310,7 +1310,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline // gs:... rbp <= where they are movq %gs:THREAD_SELF_OFFSET, %rdi movq %rbp, %rsi - call PLT_SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp) + call SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp) // The C call will have registered the complete save-frame on success. // The result of the call is: @@ -1354,7 +1354,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline movq %gs:THREAD_SELF_OFFSET, %rdi movq %rax, %rsi movq %xmm0, %rdx - call PLT_SYMBOL(artQuickGenericJniEndTrampoline) + call SYMBOL(artQuickGenericJniEndTrampoline) // Tear down the alloca. movq %rbp, %rsp @@ -1441,11 +1441,11 @@ END_FUNCTION art_quick_generic_jni_trampoline * RDI = method being called / to bridge to. * RSI, RDX, RCX, R8, R9 are arguments to that method. */ -DEFINE_FUNCTION art_quick_to_interpreter_bridge +DEFINE_FUNCTION_NO_HIDE art_quick_to_interpreter_bridge SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. movq %gs:THREAD_SELF_OFFSET, %rsi // RSI := Thread::Current() movq %rsp, %rdx // RDX := sp - call PLT_SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) + call SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. movq %rax, %xmm0 // Place return value also into floating point return value. RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception @@ -1467,12 +1467,12 @@ DEFINE_FUNCTION art_quick_instrumentation_entry movq %rsp, %rcx // Pass SP. movq FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp), %r8 // Pass return PC. - call PLT_SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR) + call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR) // %rax = result of call. movq %r12, %rdi // Reload method pointer. - leaq art_quick_instrumentation_exit_local(%rip), %r12 // Set up return through instrumentation + leaq art_quick_instrumentation_exit(%rip), %r12 // Set up return through instrumentation movq %r12, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp) // exit. RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME @@ -1501,7 +1501,7 @@ DEFINE_FUNCTION art_quick_instrumentation_exit movq %rax, %rdx // Pass integer result. movq %xmm0, %rcx // Pass floating-point result. - call PLT_SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_res, fpr_res) + call SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_res, fpr_res) movq %rax, %rdi // Store return PC movq %rdx, %rsi // Store second return PC in hidden arg. @@ -1526,7 +1526,7 @@ DEFINE_FUNCTION art_quick_deoptimize // Stack should be aligned now. movq %rsp, %rsi // Pass SP. movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread. - call PLT_SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP) + call SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP) int3 // Unreachable. END_FUNCTION art_quick_deoptimize @@ -1577,7 +1577,7 @@ UNIMPLEMENTED art_quick_memcmp16 DEFINE_FUNCTION art_quick_assignable_from_code SETUP_FP_CALLEE_SAVE_FRAME - call PLT_SYMBOL(artIsAssignableFromCode) // (const mirror::Class*, const mirror::Class*) + call SYMBOL(artIsAssignableFromCode) // (const mirror::Class*, const mirror::Class*) RESTORE_FP_CALLEE_SAVE_FRAME ret END_FUNCTION art_quick_assignable_from_code |