summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2023-03-07 15:33:29 +0100
committer VladimĂ­r Marko <vmarko@google.com> 2023-03-09 08:22:13 +0000
commitc554a91c2a35313df1f9a75375e8761da503386a (patch)
tree763dcbd91afed29ccfc003c1a980464a0645eb74
parentbdbd707a8fe4e39912b64c83c91af1a65d3a8c5a (diff)
Remove obsolete functions from JNI macro assemblers.
Remove obsolete methods and related T32 tests. Change some functions to private functions of implementing classes, unavailable through the JNI macro assembler interface. Add some T32 tests for `MoveArguments()`. Add a TODO for using LDRD which should improve some @CriticalNative stubs. Remove unnecessary `#include`s of `jni_macro_assembler.h`. Test: m test-art-host-gtest Test: testrunner.py --host --optimizing Bug: 271573990 Change-Id: I65c14ba1cf8a50db80243aaaa96e85eb6a28477f
-rw-r--r--compiler/utils/arm/assembler_arm_vixl.h1
-rw-r--r--compiler/utils/arm/jni_macro_assembler_arm_vixl.cc175
-rw-r--r--compiler/utils/arm/jni_macro_assembler_arm_vixl.h100
-rw-r--r--compiler/utils/arm64/jni_macro_assembler_arm64.cc201
-rw-r--r--compiler/utils/arm64/jni_macro_assembler_arm64.h81
-rw-r--r--compiler/utils/assembler_thumb_test.cc63
-rw-r--r--compiler/utils/assembler_thumb_test_expected.cc.inc263
-rw-r--r--compiler/utils/jni_macro_assembler.h86
-rw-r--r--compiler/utils/x86/jni_macro_assembler_x86.cc171
-rw-r--r--compiler/utils/x86/jni_macro_assembler_x86.h78
-rw-r--r--compiler/utils/x86_64/assembler_x86_64.h1
-rw-r--r--compiler/utils/x86_64/jni_macro_assembler_x86_64.cc178
-rw-r--r--compiler/utils/x86_64/jni_macro_assembler_x86_64.h97
13 files changed, 253 insertions, 1242 deletions
diff --git a/compiler/utils/arm/assembler_arm_vixl.h b/compiler/utils/arm/assembler_arm_vixl.h
index f2c0b76b66..93775316b3 100644
--- a/compiler/utils/arm/assembler_arm_vixl.h
+++ b/compiler/utils/arm/assembler_arm_vixl.h
@@ -27,7 +27,6 @@
#include "utils/arm/assembler_arm_shared.h"
#include "utils/arm/managed_register_arm.h"
#include "utils/assembler.h"
-#include "utils/jni_macro_assembler.h"
// TODO(VIXL): Make VIXL compile with -Wshadow and remove pragmas.
#pragma GCC diagnostic push
diff --git a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc
index 699df80096..394575c1a2 100644
--- a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc
+++ b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc
@@ -27,7 +27,6 @@ using namespace vixl::aarch32; // NOLINT(build/namespaces)
namespace vixl32 = vixl::aarch32;
using vixl::ExactAssemblyScope;
-using vixl::CodeBufferCheckScope;
namespace art HIDDEN {
namespace arm {
@@ -307,13 +306,6 @@ void ArmVIXLJNIMacroAssembler::Store(ManagedRegister m_base,
}
}
-void ArmVIXLJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
- vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- temps.Exclude(src);
- asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
-}
-
void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
@@ -321,70 +313,6 @@ void ArmVIXLJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msr
asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
}
-void ArmVIXLJNIMacroAssembler::StoreSpanning(FrameOffset dest,
- ManagedRegister msrc,
- FrameOffset in_off) {
- vixl::aarch32::Register src = AsVIXLRegister(msrc.AsArm());
- asm_.StoreToOffset(kStoreWord, src, sp, dest.Int32Value());
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value());
- asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4);
-}
-
-void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value());
- asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
-}
-
-void ArmVIXLJNIMacroAssembler::CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- asm_.LoadFromOffset(kLoadWord, scratch, AsVIXLRegister(base.AsArm()), offs.Int32Value());
- if (unpoison_reference) {
- asm_.MaybeUnpoisonHeapReference(scratch);
- }
- asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
-}
-
-void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister mdest,
- ManagedRegister mbase,
- MemberOffset offs,
- bool unpoison_reference) {
- vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
- vixl::aarch32::Register base = AsVIXLRegister(mbase.AsArm());
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- temps.Exclude(dest, base);
- asm_.LoadFromOffset(kLoadWord, dest, base, offs.Int32Value());
-
- if (unpoison_reference) {
- asm_.MaybeUnpoisonHeapReference(dest);
- }
-}
-
-void ArmVIXLJNIMacroAssembler::LoadRef(ManagedRegister dest ATTRIBUTE_UNUSED,
- FrameOffset src ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::LoadRawPtr(ManagedRegister dest ATTRIBUTE_UNUSED,
- ManagedRegister base ATTRIBUTE_UNUSED,
- Offset offs ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- asm_.LoadImmediate(scratch, imm);
- asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value());
-}
-
void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
return Load(m_dst.AsArm(), sp, src.Int32Value(), size);
}
@@ -396,11 +324,6 @@ void ArmVIXLJNIMacroAssembler::Load(ManagedRegister m_dst,
return Load(m_dst.AsArm(), AsVIXLRegister(m_base.AsArm()), offs.Int32Value(), size);
}
-void ArmVIXLJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
- ThreadOffset32 src,
- size_t size) {
- return Load(m_dst.AsArm(), tr, src.Int32Value(), size);
-}
void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
vixl::aarch32::Register dest = AsVIXLRegister(mdest.AsArm());
@@ -409,27 +332,6 @@ void ArmVIXLJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, Threa
asm_.LoadFromOffset(kLoadWord, dest, tr, offs.Int32Value());
}
-void ArmVIXLJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- asm_.LoadFromOffset(kLoadWord, scratch, tr, thr_offs.Int32Value());
- asm_.StoreToOffset(kStoreWord, scratch, sp, fr_offs.Int32Value());
-}
-
-void ArmVIXLJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs ATTRIBUTE_UNUSED,
- FrameOffset fr_offs ATTRIBUTE_UNUSED,
- ManagedRegister mscratch ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
- FrameOffset fr_offs) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- asm_.AddConstant(scratch, sp, fr_offs.Int32Value());
- asm_.StoreToOffset(kStoreWord, scratch, tr, thr_offs.Int32Value());
-}
-
void ArmVIXLJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs, bool tag_sp) {
if (tag_sp) {
UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
@@ -898,48 +800,6 @@ void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t si
}
}
-void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dest ATTRIBUTE_UNUSED,
- ManagedRegister src_base ATTRIBUTE_UNUSED,
- Offset src_offset ATTRIBUTE_UNUSED,
- ManagedRegister mscratch ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest_base ATTRIBUTE_UNUSED,
- Offset dest_offset ATTRIBUTE_UNUSED,
- FrameOffset src ATTRIBUTE_UNUSED,
- ManagedRegister mscratch ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
- FrameOffset src_base ATTRIBUTE_UNUSED,
- Offset src_offset ATTRIBUTE_UNUSED,
- ManagedRegister mscratch ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::Copy(ManagedRegister dest ATTRIBUTE_UNUSED,
- Offset dest_offset ATTRIBUTE_UNUSED,
- ManagedRegister src ATTRIBUTE_UNUSED,
- Offset src_offset ATTRIBUTE_UNUSED,
- ManagedRegister mscratch ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
-void ArmVIXLJNIMacroAssembler::Copy(FrameOffset dst ATTRIBUTE_UNUSED,
- Offset dest_offset ATTRIBUTE_UNUSED,
- FrameOffset src ATTRIBUTE_UNUSED,
- Offset src_offset ATTRIBUTE_UNUSED,
- ManagedRegister scratch ATTRIBUTE_UNUSED,
- size_t size ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
void ArmVIXLJNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
FrameOffset spilled_reference_offset,
ManagedRegister min_reg,
@@ -985,35 +845,6 @@ void ArmVIXLJNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
}
}
-void ArmVIXLJNIMacroAssembler::CreateJObject(FrameOffset out_off,
- FrameOffset spilled_reference_offset,
- bool null_allowed) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- vixl32::Register scratch = temps.Acquire();
- if (null_allowed) {
- asm_.LoadFromOffset(kLoadWord, scratch, sp, spilled_reference_offset.Int32Value());
- // Null values get a jobject value null. Otherwise, the jobject is
- // the address of the spilled reference.
- // e.g. scratch = (scratch == 0) ? 0 : (SP+spilled_reference_offset)
- ___ Cmp(scratch, 0);
-
- // FIXME: Using 32-bit T32 instruction in IT-block is deprecated.
- if (asm_.ShifterOperandCanHold(ADD, spilled_reference_offset.Int32Value())) {
- ExactAssemblyScope guard(asm_.GetVIXLAssembler(),
- 2 * vixl32::kMaxInstructionSizeInBytes,
- CodeBufferCheckScope::kMaximumSize);
- ___ it(ne, 0x8);
- asm_.AddConstantInIt(scratch, sp, spilled_reference_offset.Int32Value(), ne);
- } else {
- // TODO: Implement this (old arm assembler would have crashed here).
- UNIMPLEMENTED(FATAL);
- }
- } else {
- asm_.AddConstant(scratch, sp, spilled_reference_offset.Int32Value());
- }
- asm_.StoreToOffset(kStoreWord, scratch, sp, out_off.Int32Value());
-}
-
void ArmVIXLJNIMacroAssembler::VerifyObject(ManagedRegister src ATTRIBUTE_UNUSED,
bool could_be_null ATTRIBUTE_UNUSED) {
// TODO: not validating references.
@@ -1240,10 +1071,6 @@ void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) {
___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm());
}
-void ArmVIXLJNIMacroAssembler::MemoryBarrier(ManagedRegister scratch ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL);
-}
-
void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister dest,
vixl32::Register base,
int32_t offset,
@@ -1265,6 +1092,8 @@ void ArmVIXLJNIMacroAssembler::Load(ArmManagedRegister dest,
}
} else if (dest.IsRegisterPair()) {
CHECK_EQ(8u, size) << dest;
+ // TODO: Use LDRD to improve stubs for @CriticalNative methods with parameters
+ // (long, long, ...). A single 32-bit LDRD is presumably faster than two 16-bit LDRs.
___ Ldr(AsVIXLRegisterPairLow(dest), MemOperand(base, offset));
___ Ldr(AsVIXLRegisterPairHigh(dest), MemOperand(base, offset + 4));
} else if (dest.IsSRegister()) {
diff --git a/compiler/utils/arm/jni_macro_assembler_arm_vixl.h b/compiler/utils/arm/jni_macro_assembler_arm_vixl.h
index df14d0403d..bcc40c4016 100644
--- a/compiler/utils/arm/jni_macro_assembler_arm_vixl.h
+++ b/compiler/utils/arm/jni_macro_assembler_arm_vixl.h
@@ -63,34 +63,14 @@ class ArmVIXLJNIMacroAssembler final
// Store routines.
void Store(FrameOffset offs, ManagedRegister src, size_t size) override;
void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) override;
- void StoreRef(FrameOffset dest, ManagedRegister src) override;
void StoreRawPtr(FrameOffset dest, ManagedRegister src) override;
- void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override;
-
- void StoreStackOffsetToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs) override;
-
void StoreStackPointerToThread(ThreadOffset32 thr_offs, bool tag_sp) override;
- void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override;
-
// Load routines.
void Load(ManagedRegister dest, FrameOffset src, size_t size) override;
void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) override;
- void LoadFromThread(ManagedRegister dest,
- ThreadOffset32 src,
- size_t size) override;
-
- void LoadRef(ManagedRegister dest, FrameOffset src) override;
-
- void LoadRef(ManagedRegister dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
-
- void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override;
-
void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset32 offs) override;
// Copying routines.
@@ -102,52 +82,6 @@ class ArmVIXLJNIMacroAssembler final
void Move(ManagedRegister dest, size_t value) override;
- void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) override;
-
- void CopyRawPtrToThread(ThreadOffset32 thr_offs,
- FrameOffset fr_offs,
- ManagedRegister scratch) override;
-
- void CopyRef(FrameOffset dest, FrameOffset src) override;
- void CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
-
- void Copy(FrameOffset dest, FrameOffset src, size_t size) override;
-
- void Copy(FrameOffset dest,
- ManagedRegister src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(ManagedRegister dest_base,
- Offset dest_offset,
- FrameOffset src,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(FrameOffset dest,
- FrameOffset src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(ManagedRegister dest,
- Offset dest_offset,
- ManagedRegister src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(FrameOffset dest,
- Offset dest_offset,
- FrameOffset src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
// Sign extension.
void SignExtend(ManagedRegister mreg, size_t size) override;
@@ -158,21 +92,6 @@ class ArmVIXLJNIMacroAssembler final
void GetCurrentThread(ManagedRegister dest) override;
void GetCurrentThread(FrameOffset dest_offset) override;
- // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
- // stale reference that can be used to avoid loading the spilled value to
- // see if the value is null.
- void CreateJObject(ManagedRegister out_reg,
- FrameOffset spilled_reference_offset,
- ManagedRegister in_reg,
- bool null_allowed) override;
-
- // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`.
- void CreateJObject(FrameOffset out_off,
- FrameOffset spilled_reference_offset,
- bool null_allowed) override;
-
// Heap::VerifyObject on src. In some cases (such as a reference to this) we
// know that src may not be null.
void VerifyObject(ManagedRegister src, bool could_be_null) override;
@@ -220,14 +139,23 @@ class ArmVIXLJNIMacroAssembler final
// Code at this offset will serve as the target for the Jump call.
void Bind(JNIMacroLabel* label) override;
- void MemoryBarrier(ManagedRegister scratch) override;
-
+ private:
+ void Copy(FrameOffset dest, FrameOffset src, size_t size);
void Load(ArmManagedRegister dest, vixl32::Register base, int32_t offset, size_t size);
- private:
+ // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
+ // stale reference that can be used to avoid loading the spilled value to
+ // see if the value is null.
+ void CreateJObject(ManagedRegister out_reg,
+ FrameOffset spilled_reference_offset,
+ ManagedRegister in_reg,
+ bool null_allowed);
+
// Used for testing.
- friend class ArmVIXLAssemblerTest_VixlLoadFromOffset_Test;
- friend class ArmVIXLAssemblerTest_VixlStoreToOffset_Test;
+ ART_FRIEND_TEST(ArmVIXLAssemblerTest, VixlJniHelpers);
+ ART_FRIEND_TEST(ArmVIXLAssemblerTest, VixlLoadFromOffset);
+ ART_FRIEND_TEST(ArmVIXLAssemblerTest, VixlStoreToOffset);
};
class ArmVIXLJNIMacroLabel final
diff --git a/compiler/utils/arm64/jni_macro_assembler_arm64.cc b/compiler/utils/arm64/jni_macro_assembler_arm64.cc
index 27d275fe61..807f493d2c 100644
--- a/compiler/utils/arm64/jni_macro_assembler_arm64.cc
+++ b/compiler/utils/arm64/jni_macro_assembler_arm64.cc
@@ -191,33 +191,12 @@ void Arm64JNIMacroAssembler::Store(ManagedRegister m_base,
}
}
-void Arm64JNIMacroAssembler::StoreRef(FrameOffset offs, ManagedRegister m_src) {
- Arm64ManagedRegister src = m_src.AsArm64();
- CHECK(src.IsXRegister()) << src;
- StoreWToOffset(kStoreWord, src.AsOverlappingWRegister(), SP,
- offs.Int32Value());
-}
-
void Arm64JNIMacroAssembler::StoreRawPtr(FrameOffset offs, ManagedRegister m_src) {
Arm64ManagedRegister src = m_src.AsArm64();
CHECK(src.IsXRegister()) << src;
StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
}
-void Arm64JNIMacroAssembler::StoreImmediateToFrame(FrameOffset offs, uint32_t imm) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- Register scratch = temps.AcquireW();
- ___ Mov(scratch, imm);
- ___ Str(scratch, MEM_OP(reg_x(SP), offs.Int32Value()));
-}
-
-void Arm64JNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset64 tr_offs, FrameOffset fr_offs) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- Register scratch = temps.AcquireX();
- ___ Add(scratch, reg_x(SP), fr_offs.Int32Value());
- ___ Str(scratch, MEM_OP(reg_x(TR), tr_offs.Int32Value()));
-}
-
void Arm64JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset64 tr_offs, bool tag_sp) {
UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
Register scratch = temps.AcquireX();
@@ -228,12 +207,6 @@ void Arm64JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset64 tr_offs, b
___ Str(scratch, MEM_OP(reg_x(TR), tr_offs.Int32Value()));
}
-void Arm64JNIMacroAssembler::StoreSpanning(FrameOffset dest_off ATTRIBUTE_UNUSED,
- ManagedRegister m_source ATTRIBUTE_UNUSED,
- FrameOffset in_off ATTRIBUTE_UNUSED) {
- UNIMPLEMENTED(FATAL); // This case is not applicable to ARM64.
-}
-
// Load routines.
void Arm64JNIMacroAssembler::LoadImmediate(XRegister dest, int32_t value, Condition cond) {
if ((cond == al) || (cond == nv)) {
@@ -332,45 +305,6 @@ void Arm64JNIMacroAssembler::Load(ManagedRegister m_dst,
return Load(m_dst.AsArm64(), m_base.AsArm64().AsXRegister(), offs.Int32Value(), size);
}
-void Arm64JNIMacroAssembler::LoadFromThread(ManagedRegister m_dst,
- ThreadOffset64 src,
- size_t size) {
- return Load(m_dst.AsArm64(), TR, src.Int32Value(), size);
-}
-
-void Arm64JNIMacroAssembler::LoadRef(ManagedRegister m_dst, FrameOffset offs) {
- Arm64ManagedRegister dst = m_dst.AsArm64();
- CHECK(dst.IsXRegister()) << dst;
- LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), SP, offs.Int32Value());
-}
-
-void Arm64JNIMacroAssembler::LoadRef(ManagedRegister m_dst,
- ManagedRegister m_base,
- MemberOffset offs,
- bool unpoison_reference) {
- Arm64ManagedRegister dst = m_dst.AsArm64();
- Arm64ManagedRegister base = m_base.AsArm64();
- CHECK(dst.IsXRegister() && base.IsXRegister());
- LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), base.AsXRegister(),
- offs.Int32Value());
- if (unpoison_reference) {
- WRegister ref_reg = dst.AsOverlappingWRegister();
- asm_.MaybeUnpoisonHeapReference(reg_w(ref_reg));
- }
-}
-
-void Arm64JNIMacroAssembler::LoadRawPtr(ManagedRegister m_dst,
- ManagedRegister m_base,
- Offset offs) {
- Arm64ManagedRegister dst = m_dst.AsArm64();
- Arm64ManagedRegister base = m_base.AsArm64();
- CHECK(dst.IsXRegister() && base.IsXRegister());
- // Remove dst and base form the temp list - higher level API uses IP1, IP0.
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister()));
- ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
-}
-
void Arm64JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister m_dst, ThreadOffset64 offs) {
Arm64ManagedRegister dst = m_dst.AsArm64();
CHECK(dst.IsXRegister()) << dst;
@@ -649,42 +583,6 @@ void Arm64JNIMacroAssembler::Move(ManagedRegister m_dst, size_t value) {
___ Mov(reg_x(dst.AsXRegister()), value);
}
-void Arm64JNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset64 tr_offs) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- Register scratch = temps.AcquireX();
- ___ Ldr(scratch, MEM_OP(reg_x(TR), tr_offs.Int32Value()));
- ___ Str(scratch, MEM_OP(sp, fr_offs.Int32Value()));
-}
-
-void Arm64JNIMacroAssembler::CopyRawPtrToThread(ThreadOffset64 tr_offs,
- FrameOffset fr_offs,
- ManagedRegister m_scratch) {
- Arm64ManagedRegister scratch = m_scratch.AsArm64();
- CHECK(scratch.IsXRegister()) << scratch;
- LoadFromOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
- StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
-}
-
-void Arm64JNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- Register scratch = temps.AcquireW();
- ___ Ldr(scratch, MEM_OP(reg_x(SP), src.Int32Value()));
- ___ Str(scratch, MEM_OP(reg_x(SP), dest.Int32Value()));
-}
-
-void Arm64JNIMacroAssembler::CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) {
- UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
- Register scratch = temps.AcquireW();
- ___ Ldr(scratch, MEM_OP(reg_x(base.AsArm64().AsXRegister()), offs.Int32Value()));
- if (unpoison_reference) {
- asm_.MaybeUnpoisonHeapReference(scratch);
- }
- ___ Str(scratch, MEM_OP(reg_x(SP), dest.Int32Value()));
-}
-
void Arm64JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) {
DCHECK(size == 4 || size == 8) << size;
UseScratchRegisterScope temps(asm_.GetVIXLAssembler());
@@ -693,105 +591,6 @@ void Arm64JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size
___ Str(scratch, MEM_OP(reg_x(SP), dest.Int32Value()));
}
-void Arm64JNIMacroAssembler::Copy(FrameOffset dest,
- ManagedRegister src_base,
- Offset src_offset,
- ManagedRegister m_scratch,
- size_t size) {
- Arm64ManagedRegister scratch = m_scratch.AsArm64();
- Arm64ManagedRegister base = src_base.AsArm64();
- CHECK(base.IsXRegister()) << base;
- CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
- CHECK(size == 4 || size == 8) << size;
- if (size == 4) {
- LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsXRegister(),
- src_offset.Int32Value());
- StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
- } else if (size == 8) {
- LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), src_offset.Int32Value());
- StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
- } else {
- UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
- }
-}
-
-void Arm64JNIMacroAssembler::Copy(ManagedRegister m_dest_base,
- Offset dest_offs,
- FrameOffset src,
- ManagedRegister m_scratch,
- size_t size) {
- Arm64ManagedRegister scratch = m_scratch.AsArm64();
- Arm64ManagedRegister base = m_dest_base.AsArm64();
- CHECK(base.IsXRegister()) << base;
- CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
- CHECK(size == 4 || size == 8) << size;
- if (size == 4) {
- LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
- StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsXRegister(),
- dest_offs.Int32Value());
- } else if (size == 8) {
- LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
- StoreToOffset(scratch.AsXRegister(), base.AsXRegister(), dest_offs.Int32Value());
- } else {
- UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
- }
-}
-
-void Arm64JNIMacroAssembler::Copy(FrameOffset /*dst*/,
- FrameOffset /*src_base*/,
- Offset /*src_offset*/,
- ManagedRegister /*mscratch*/,
- size_t /*size*/) {
- UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
-}
-
-void Arm64JNIMacroAssembler::Copy(ManagedRegister m_dest,
- Offset dest_offset,
- ManagedRegister m_src,
- Offset src_offset,
- ManagedRegister m_scratch,
- size_t size) {
- Arm64ManagedRegister scratch = m_scratch.AsArm64();
- Arm64ManagedRegister src = m_src.AsArm64();
- Arm64ManagedRegister dest = m_dest.AsArm64();
- CHECK(dest.IsXRegister()) << dest;
- CHECK(src.IsXRegister()) << src;
- CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
- CHECK(size == 4 || size == 8) << size;
- if (size == 4) {
- if (scratch.IsWRegister()) {
- LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsXRegister(),
- src_offset.Int32Value());
- StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsXRegister(),
- dest_offset.Int32Value());
- } else {
- LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), src.AsXRegister(),
- src_offset.Int32Value());
- StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), dest.AsXRegister(),
- dest_offset.Int32Value());
- }
- } else if (size == 8) {
- LoadFromOffset(scratch.AsXRegister(), src.AsXRegister(), src_offset.Int32Value());
- StoreToOffset(scratch.AsXRegister(), dest.AsXRegister(), dest_offset.Int32Value());
- } else {
- UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
- }
-}
-
-void Arm64JNIMacroAssembler::Copy(FrameOffset /*dst*/,
- Offset /*dest_offset*/,
- FrameOffset /*src*/,
- Offset /*src_offset*/,
- ManagedRegister /*scratch*/,
- size_t /*size*/) {
- UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
-}
-
-void Arm64JNIMacroAssembler::MemoryBarrier(ManagedRegister m_scratch ATTRIBUTE_UNUSED) {
- // TODO: Should we check that m_scratch is IP? - see arm.
- ___ Dmb(InnerShareable, BarrierAll);
-}
-
void Arm64JNIMacroAssembler::SignExtend(ManagedRegister mreg, size_t size) {
Arm64ManagedRegister reg = mreg.AsArm64();
CHECK(size == 1 || size == 2) << size;
diff --git a/compiler/utils/arm64/jni_macro_assembler_arm64.h b/compiler/utils/arm64/jni_macro_assembler_arm64.h
index 3f6a4fb30d..3e6a23de44 100644
--- a/compiler/utils/arm64/jni_macro_assembler_arm64.h
+++ b/compiler/utils/arm64/jni_macro_assembler_arm64.h
@@ -68,23 +68,12 @@ class Arm64JNIMacroAssembler final : public JNIMacroAssemblerFwd<Arm64Assembler,
// Store routines.
void Store(FrameOffset offs, ManagedRegister src, size_t size) override;
void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) override;
- void StoreRef(FrameOffset dest, ManagedRegister src) override;
void StoreRawPtr(FrameOffset dest, ManagedRegister src) override;
- void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override;
- void StoreStackOffsetToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs) override;
void StoreStackPointerToThread(ThreadOffset64 thr_offs, bool tag_sp) override;
- void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override;
// Load routines.
void Load(ManagedRegister dest, FrameOffset src, size_t size) override;
void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) override;
- void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override;
- void LoadRef(ManagedRegister dest, FrameOffset src) override;
- void LoadRef(ManagedRegister dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
- void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override;
void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) override;
// Copying routines.
@@ -93,43 +82,6 @@ class Arm64JNIMacroAssembler final : public JNIMacroAssemblerFwd<Arm64Assembler,
ArrayRef<FrameOffset> refs) override;
void Move(ManagedRegister dest, ManagedRegister src, size_t size) override;
void Move(ManagedRegister dest, size_t value) override;
- void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset64 thr_offs) override;
- void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
- override;
- void CopyRef(FrameOffset dest, FrameOffset src) override;
- void CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
- void Copy(FrameOffset dest, FrameOffset src, size_t size) override;
- void Copy(FrameOffset dest,
- ManagedRegister src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
- void Copy(ManagedRegister dest_base,
- Offset dest_offset,
- FrameOffset src,
- ManagedRegister scratch,
- size_t size) override;
- void Copy(FrameOffset dest,
- FrameOffset src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
- void Copy(ManagedRegister dest,
- Offset dest_offset,
- ManagedRegister src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
- void Copy(FrameOffset dest,
- Offset dest_offset,
- FrameOffset src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
- void MemoryBarrier(ManagedRegister scratch) override;
// Sign extension.
void SignExtend(ManagedRegister mreg, size_t size) override;
@@ -141,21 +93,6 @@ class Arm64JNIMacroAssembler final : public JNIMacroAssemblerFwd<Arm64Assembler,
void GetCurrentThread(ManagedRegister dest) override;
void GetCurrentThread(FrameOffset dest_offset) override;
- // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
- // stale reference that can be used to avoid loading the spilled value to
- // see if the value is null.
- void CreateJObject(ManagedRegister out_reg,
- FrameOffset spilled_reference_offset,
- ManagedRegister in_reg,
- bool null_allowed) override;
-
- // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`.
- void CreateJObject(FrameOffset out_off,
- FrameOffset spilled_reference_offset,
- bool null_allowed) override;
-
// Heap::VerifyObject on src. In some cases (such as a reference to this) we
// know that src may not be null.
void VerifyObject(ManagedRegister src, bool could_be_null) override;
@@ -223,6 +160,24 @@ class Arm64JNIMacroAssembler final : public JNIMacroAssemblerFwd<Arm64Assembler,
void LoadFromOffset(XRegister dest, XRegister base, int32_t offset);
void LoadSFromOffset(SRegister dest, XRegister base, int32_t offset);
void LoadDFromOffset(DRegister dest, XRegister base, int32_t offset);
+
+ void Copy(FrameOffset dest, FrameOffset src, size_t size);
+
+ // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
+ // stale reference that can be used to avoid loading the spilled value to
+ // see if the value is null.
+ void CreateJObject(ManagedRegister out_reg,
+ FrameOffset spilled_reference_offset,
+ ManagedRegister in_reg,
+ bool null_allowed);
+
+ // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`.
+ void CreateJObject(FrameOffset out_off,
+ FrameOffset spilled_reference_offset,
+ bool null_allowed);
+
void AddConstant(XRegister rd,
int32_t value,
vixl::aarch64::Condition cond = vixl::aarch64::al);
diff --git a/compiler/utils/assembler_thumb_test.cc b/compiler/utils/assembler_thumb_test.cc
index 7d0842629b..672cd3d10f 100644
--- a/compiler/utils/assembler_thumb_test.cc
+++ b/compiler/utils/assembler_thumb_test.cc
@@ -144,7 +144,6 @@ TEST_F(ArmVIXLAssemblerTest, VixlJniHelpers) {
__ Load(scratch_register, FrameOffset(4092), 4);
__ Load(scratch_register, FrameOffset(4096), 4);
__ LoadRawPtrFromThread(scratch_register, ThreadOffset32(512));
- __ LoadRef(method_register, scratch_register, MemberOffset(128), /* unpoison_reference= */ false);
// Stores
__ Store(FrameOffset(32), method_register, 4);
@@ -154,20 +153,67 @@ TEST_F(ArmVIXLAssemblerTest, VixlJniHelpers) {
__ Store(FrameOffset(1024), method_register, 4);
__ Store(FrameOffset(4092), scratch_register, 4);
__ Store(FrameOffset(4096), scratch_register, 4);
- __ StoreImmediateToFrame(FrameOffset(48), 0xFF);
- __ StoreImmediateToFrame(FrameOffset(48), 0xFFFFFF);
__ StoreRawPtr(FrameOffset(48), scratch_register);
- __ StoreRef(FrameOffset(48), scratch_register);
- __ StoreSpanning(FrameOffset(48), method_register, FrameOffset(48));
- __ StoreStackOffsetToThread(ThreadOffset32(512), FrameOffset(4096));
__ StoreStackPointerToThread(ThreadOffset32(512), false);
__ StoreStackPointerToThread(ThreadOffset32(512), true);
+ // MoveArguments
+ static constexpr FrameOffset kInvalidReferenceOffset =
+ JNIMacroAssembler<kArmPointerSize>::kInvalidReferenceOffset;
+ static constexpr size_t kNativePointerSize = static_cast<size_t>(kArmPointerSize);
+ // Normal or @FastNative with parameters (Object, long, long, int, Object).
+ // Note: This shall not spill the reference R1 to [sp, #36]. The JNI compiler spills
+ // references in an separate initial pass before moving arguments and creating `jobject`s.
+ ArgumentLocation move_dests1[] = {
+ ArgumentLocation(ArmManagedRegister::FromCoreRegister(R2), kNativePointerSize),
+ ArgumentLocation(FrameOffset(0), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(8), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(16), kVRegSize),
+ ArgumentLocation(FrameOffset(20), kNativePointerSize),
+ };
+ ArgumentLocation move_srcs1[] = {
+ ArgumentLocation(ArmManagedRegister::FromCoreRegister(R1), kVRegSize),
+ ArgumentLocation(ArmManagedRegister::FromRegisterPair(R2_R3), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(48), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(56), kVRegSize),
+ ArgumentLocation(FrameOffset(60), kVRegSize),
+ };
+ FrameOffset move_refs1[] {
+ FrameOffset(36),
+ FrameOffset(kInvalidReferenceOffset),
+ FrameOffset(kInvalidReferenceOffset),
+ FrameOffset(kInvalidReferenceOffset),
+ FrameOffset(60),
+ };
+ __ MoveArguments(ArrayRef<ArgumentLocation>(move_dests1),
+ ArrayRef<ArgumentLocation>(move_srcs1),
+ ArrayRef<FrameOffset>(move_refs1));
+ // @CriticalNative with parameters (long, long, long, int).
+ ArgumentLocation move_dests2[] = {
+ ArgumentLocation(ArmManagedRegister::FromRegisterPair(R0_R1), 2 * kVRegSize),
+ ArgumentLocation(ArmManagedRegister::FromRegisterPair(R2_R3), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(0), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(8), kVRegSize),
+ };
+ ArgumentLocation move_srcs2[] = {
+ ArgumentLocation(ArmManagedRegister::FromRegisterPair(R2_R3), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(28), kVRegSize),
+ ArgumentLocation(FrameOffset(32), 2 * kVRegSize),
+ ArgumentLocation(FrameOffset(40), kVRegSize),
+ };
+ FrameOffset move_refs2[] {
+ FrameOffset(kInvalidReferenceOffset),
+ FrameOffset(kInvalidReferenceOffset),
+ FrameOffset(kInvalidReferenceOffset),
+ FrameOffset(kInvalidReferenceOffset),
+ };
+ __ MoveArguments(ArrayRef<ArgumentLocation>(move_dests2),
+ ArrayRef<ArgumentLocation>(move_srcs2),
+ ArrayRef<FrameOffset>(move_refs2));
+
// Other
__ Call(method_register, FrameOffset(48));
__ Copy(FrameOffset(48), FrameOffset(44), 4);
- __ CopyRawPtrFromThread(FrameOffset(44), ThreadOffset32(512));
- __ CopyRef(FrameOffset(48), FrameOffset(44));
__ GetCurrentThread(method_register);
__ GetCurrentThread(FrameOffset(48));
__ Move(hidden_arg_register, method_register, 4);
@@ -178,7 +224,6 @@ TEST_F(ArmVIXLAssemblerTest, VixlJniHelpers) {
__ CreateJObject(high_register, FrameOffset(48), high_register, true);
__ CreateJObject(high_register, FrameOffset(48), high_register, false);
__ CreateJObject(method_register, FrameOffset(48), high_register, true);
- __ CreateJObject(FrameOffset(48), FrameOffset(64), true);
__ CreateJObject(method_register, FrameOffset(0), high_register, true);
__ CreateJObject(method_register, FrameOffset(1028), high_register, true);
__ CreateJObject(high_register, FrameOffset(1028), high_register, true);
diff --git a/compiler/utils/assembler_thumb_test_expected.cc.inc b/compiler/utils/assembler_thumb_test_expected.cc.inc
index 79cf029b79..aea7f14762 100644
--- a/compiler/utils/assembler_thumb_test_expected.cc.inc
+++ b/compiler/utils/assembler_thumb_test_expected.cc.inc
@@ -18,145 +18,144 @@ const char* const VixlJniHelpersResults = {
" 2c: f50d 5c80 add.w r12, sp, #4096\n"
" 30: f8dc c000 ldr.w r12, [r12]\n"
" 34: f8d9 c200 ldr.w r12, [r9, #512]\n"
- " 38: f8dc 0080 ldr.w r0, [r12, #128]\n"
- " 3c: 9008 str r0, [sp, #32]\n"
- " 3e: 901f str r0, [sp, #124]\n"
- " 40: 9021 str r0, [sp, #132]\n"
- " 42: 90ff str r0, [sp, #1020]\n"
- " 44: f8cd 0400 str.w r0, [sp, #1024]\n"
- " 48: f8cd cffc str.w r12, [sp, #4092]\n"
- " 4c: f84d 5d04 str r5, [sp, #-4]!\n"
- " 50: f50d 5580 add.w r5, sp, #4096\n"
- " 54: f8c5 c004 str.w r12, [r5, #4]\n"
- " 58: f85d 5b04 ldr r5, [sp], #4\n"
- " 5c: f04f 0cff mov.w r12, #255\n"
- " 60: f8cd c030 str.w r12, [sp, #48]\n"
- " 64: f06f 4c7f mvn r12, #4278190080\n"
- " 68: f8cd c030 str.w r12, [sp, #48]\n"
- " 6c: f8cd c030 str.w r12, [sp, #48]\n"
- " 70: f8cd c030 str.w r12, [sp, #48]\n"
- " 74: 900c str r0, [sp, #48]\n"
- " 76: f8dd c030 ldr.w r12, [sp, #48]\n"
- " 7a: f8cd c034 str.w r12, [sp, #52]\n"
- " 7e: f50d 5c80 add.w r12, sp, #4096\n"
- " 82: f8c9 c200 str.w r12, [r9, #512]\n"
- " 86: f8c9 d200 str.w sp, [r9, #512]\n"
- " 8a: f04d 0c02 orr r12, sp, #2\n"
- " 8e: f8c9 c200 str.w r12, [r9, #512]\n"
- " 92: f8d0 e030 ldr.w lr, [r0, #48]\n"
- " 96: 47f0 blx lr\n"
- " 98: f8dd c02c ldr.w r12, [sp, #44]\n"
- " 9c: f8cd c030 str.w r12, [sp, #48]\n"
- " a0: f8d9 c200 ldr.w r12, [r9, #512]\n"
- " a4: f8cd c02c str.w r12, [sp, #44]\n"
- " a8: f8dd c02c ldr.w r12, [sp, #44]\n"
- " ac: f8cd c030 str.w r12, [sp, #48]\n"
- " b0: 4648 mov r0, r9\n"
- " b2: f8cd 9030 str.w r9, [sp, #48]\n"
- " b6: 4604 mov r4, r0\n"
- " b8: f10d 0c30 add.w r12, sp, #48\n"
- " bc: f1bb 0f00 cmp.w r11, #0\n"
- " c0: bf18 it ne\n"
- " c2: 46e3 movne r11, r12\n"
- " c4: f10d 0b30 add.w r11, sp, #48\n"
- " c8: ea5f 000b movs.w r0, r11\n"
- " cc: bf18 it ne\n"
- " ce: a80c addne r0, sp, #48\n"
- " d0: f8dd c040 ldr.w r12, [sp, #64]\n"
- " d4: f1bc 0f00 cmp.w r12, #0\n"
- " d8: bf18 it ne\n"
- " da: f10d 0c40 addne.w r12, sp, #64\n"
- " de: f8cd c030 str.w r12, [sp, #48]\n"
- " e2: ea5f 000b movs.w r0, r11\n"
- " e6: bf18 it ne\n"
- " e8: a800 addne r0, sp, #0\n"
- " ea: f20d 4004 addw r0, sp, #1028\n"
- " ee: f1bb 0f00 cmp.w r11, #0\n"
- " f2: bf08 it eq\n"
- " f4: 4658 moveq r0, r11\n"
- " f6: f20d 4c04 addw r12, sp, #1028\n"
- " fa: f1bb 0f00 cmp.w r11, #0\n"
- " fe: bf18 it ne\n"
- " 100: 46e3 movne r11, r12\n"
- " 102: f8d9 c09c ldr.w r12, [r9, #156]\n"
- " 106: f1bc 0f00 cmp.w r12, #0\n"
- " 10a: d171 bne 0x1f0 @ imm = #226\n"
- " 10c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 110: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 114: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 118: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 11c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 120: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 124: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 128: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 12c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 130: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 134: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 138: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 13c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 140: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 144: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 148: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 14c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 150: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 154: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 158: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 15c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 160: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 164: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 168: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 16c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 170: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 174: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 178: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 17c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 180: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 184: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 188: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 18c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 190: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 194: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 198: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 19c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1a0: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1a4: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1a8: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1ac: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1b0: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1b4: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1b8: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1bc: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1c0: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1c4: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1c8: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1cc: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1d0: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1d4: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1d8: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 38: 9008 str r0, [sp, #32]\n"
+ " 3a: 901f str r0, [sp, #124]\n"
+ " 3c: 9021 str r0, [sp, #132]\n"
+ " 3e: 90ff str r0, [sp, #1020]\n"
+ " 40: f8cd 0400 str.w r0, [sp, #1024]\n"
+ " 44: f8cd cffc str.w r12, [sp, #4092]\n"
+ " 48: f84d 5d04 str r5, [sp, #-4]!\n"
+ " 4c: f50d 5580 add.w r5, sp, #4096\n"
+ " 50: f8c5 c004 str.w r12, [r5, #4]\n"
+ " 54: f85d 5b04 ldr r5, [sp], #4\n"
+ " 58: f8cd c030 str.w r12, [sp, #48]\n"
+ " 5c: f8c9 d200 str.w sp, [r9, #512]\n"
+ " 60: f04d 0c02 orr r12, sp, #2\n"
+ " 64: f8c9 c200 str.w r12, [r9, #512]\n"
+ " 68: a909 add r1, sp, #36\n"
+ " 6a: e9cd 2300 strd r2, r3, [sp]\n"
+ " 6e: e9dd 020c ldrd r0, r2, [sp, #48]\n"
+ " 72: e9cd 0202 strd r0, r2, [sp, #8]\n"
+ " 76: e9dd 020e ldrd r0, r2, [sp, #56]\n"
+ " 7a: 2a00 cmp r2, #0\n"
+ " 7c: bf18 it ne\n"
+ " 7e: aa0f addne r2, sp, #60\n"
+ " 80: e9cd 0204 strd r0, r2, [sp, #16]\n"
+ " 84: 460a mov r2, r1\n"
+ " 86: e9dd 0108 ldrd r0, r1, [sp, #32]\n"
+ " 8a: e9cd 0100 strd r0, r1, [sp]\n"
+ " 8e: f8dd c028 ldr.w r12, [sp, #40]\n"
+ " 92: f8cd c008 str.w r12, [sp, #8]\n"
+ " 96: 4610 mov r0, r2\n"
+ " 98: 4619 mov r1, r3\n"
+ " 9a: 9a07 ldr r2, [sp, #28]\n"
+ " 9c: 9b08 ldr r3, [sp, #32]\n"
+ " 9e: f8d0 e030 ldr.w lr, [r0, #48]\n"
+ " a2: 47f0 blx lr\n"
+ " a4: f8dd c02c ldr.w r12, [sp, #44]\n"
+ " a8: f8cd c030 str.w r12, [sp, #48]\n"
+ " ac: 4648 mov r0, r9\n"
+ " ae: f8cd 9030 str.w r9, [sp, #48]\n"
+ " b2: 4604 mov r4, r0\n"
+ " b4: f10d 0c30 add.w r12, sp, #48\n"
+ " b8: f1bb 0f00 cmp.w r11, #0\n"
+ " bc: bf18 it ne\n"
+ " be: 46e3 movne r11, r12\n"
+ " c0: f10d 0b30 add.w r11, sp, #48\n"
+ " c4: ea5f 000b movs.w r0, r11\n"
+ " c8: bf18 it ne\n"
+ " ca: a80c addne r0, sp, #48\n"
+ " cc: ea5f 000b movs.w r0, r11\n"
+ " d0: bf18 it ne\n"
+ " d2: a800 addne r0, sp, #0\n"
+ " d4: f20d 4004 addw r0, sp, #1028\n"
+ " d8: f1bb 0f00 cmp.w r11, #0\n"
+ " dc: bf08 it eq\n"
+ " de: 4658 moveq r0, r11\n"
+ " e0: f20d 4c04 addw r12, sp, #1028\n"
+ " e4: f1bb 0f00 cmp.w r11, #0\n"
+ " e8: bf18 it ne\n"
+ " ea: 46e3 movne r11, r12\n"
+ " ec: f8d9 c09c ldr.w r12, [r9, #156]\n"
+ " f0: f1bc 0f00 cmp.w r12, #0\n"
+ " f4: d16f bne 0x1d6 @ imm = #222\n"
+ " f6: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " fa: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " fe: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 102: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 106: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 10a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 10e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 112: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 116: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 11a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 11e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 122: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 126: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 12a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 12e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 132: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 136: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 13a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 13e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 142: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 146: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 14a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 14e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 152: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 156: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 15a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 15e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 162: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 166: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 16a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 16e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 172: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 176: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 17a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 17e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 182: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 186: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 18a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 18e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 192: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 196: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 19a: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 19e: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1a2: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1a6: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1aa: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1ae: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1b2: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1b6: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1ba: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1be: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1c2: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1c6: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1ca: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1ce: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1d2: f000 b803 b.w 0x1dc @ imm = #6\n"
+ " 1d6: f000 b81e b.w 0x216 @ imm = #60\n"
+ " 1da: 0000 movs r0, r0\n"
" 1dc: f8cd c7ff str.w r12, [sp, #2047]\n"
" 1e0: f8cd c7ff str.w r12, [sp, #2047]\n"
" 1e4: f8cd c7ff str.w r12, [sp, #2047]\n"
" 1e8: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 1ec: f000 b802 b.w 0x1f4 @ imm = #4\n"
- " 1f0: f000 b81b b.w 0x22a @ imm = #54\n"
+ " 1ec: f8cd c7ff str.w r12, [sp, #2047]\n"
+ " 1f0: f8cd c7ff str.w r12, [sp, #2047]\n"
" 1f4: f8cd c7ff str.w r12, [sp, #2047]\n"
" 1f8: f8cd c7ff str.w r12, [sp, #2047]\n"
" 1fc: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 200: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 204: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 208: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 20c: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 210: f8cd c7ff str.w r12, [sp, #2047]\n"
- " 214: f50d 5d80 add.w sp, sp, #4096\n"
- " 218: b008 add sp, #32\n"
- " 21a: b001 add sp, #4\n"
- " 21c: ecbd 8a10 vpop {s16, s17, s18, s19, s20, s21, s22, s23, s24, s25, s26, s27, s28, s29, s30, s31}\n"
- " 220: e8bd 4de0 pop.w {r5, r6, r7, r8, r10, r11, lr}\n"
- " 224: f8d9 8024 ldr.w r8, [r9, #36]\n"
- " 228: 4770 bx lr\n"
- " 22a: f8d9 009c ldr.w r0, [r9, #156]\n"
- " 22e: f8d9 e2d0 ldr.w lr, [r9, #720]\n"
- " 232: 47f0 blx lr\n"
+ " 200: f50d 5d80 add.w sp, sp, #4096\n"
+ " 204: b008 add sp, #32\n"
+ " 206: b001 add sp, #4\n"
+ " 208: ecbd 8a10 vpop {s16, s17, s18, s19, s20, s21, s22, s23, s24, s25, s26, s27, s28, s29, s30, s31}\n"
+ " 20c: e8bd 4de0 pop.w {r5, r6, r7, r8, r10, r11, lr}\n"
+ " 210: f8d9 8024 ldr.w r8, [r9, #36]\n"
+ " 214: 4770 bx lr\n"
+ " 216: f8d9 009c ldr.w r0, [r9, #156]\n"
+ " 21a: f8d9 e2d0 ldr.w lr, [r9, #720]\n"
+ " 21e: 47f0 blx lr\n"
};
const char* const VixlLoadFromOffsetResults = {
diff --git a/compiler/utils/jni_macro_assembler.h b/compiler/utils/jni_macro_assembler.h
index 79dd987081..15a4c3fe67 100644
--- a/compiler/utils/jni_macro_assembler.h
+++ b/compiler/utils/jni_macro_assembler.h
@@ -118,41 +118,18 @@ class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> {
// Store routines
virtual void Store(FrameOffset offs, ManagedRegister src, size_t size) = 0;
virtual void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) = 0;
- virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0;
virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0;
- virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) = 0;
-
- virtual void StoreStackOffsetToThread(ThreadOffset<kPointerSize> thr_offs,
- FrameOffset fr_offs) = 0;
-
// Stores stack pointer by tagging it if required so we can walk the stack. In debuggable runtimes
// we use tag to tell if we are using JITed code or AOT code. In non-debuggable runtimes we never
// use JITed code when AOT code is present. So checking for AOT code is sufficient to detect which
// code is being executed. We avoid tagging in non-debuggable runtimes to reduce instructions.
virtual void StoreStackPointerToThread(ThreadOffset<kPointerSize> thr_offs, bool tag_sp) = 0;
- virtual void StoreSpanning(FrameOffset dest,
- ManagedRegister src,
- FrameOffset in_off) = 0;
-
// Load routines
virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0;
virtual void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) = 0;
- virtual void LoadFromThread(ManagedRegister dest,
- ThreadOffset<kPointerSize> src,
- size_t size) = 0;
-
- virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0;
- // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference.
- virtual void LoadRef(ManagedRegister dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) = 0;
-
- virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0;
-
virtual void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset<kPointerSize> offs) = 0;
// Copying routines
@@ -169,56 +146,8 @@ class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> {
virtual void Move(ManagedRegister dest, ManagedRegister src, size_t size) = 0;
- virtual void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset<kPointerSize> thr_offs) = 0;
-
- virtual void CopyRawPtrToThread(ThreadOffset<kPointerSize> thr_offs,
- FrameOffset fr_offs,
- ManagedRegister scratch) = 0;
-
- virtual void CopyRef(FrameOffset dest, FrameOffset src) = 0;
- virtual void CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) = 0;
-
- virtual void Copy(FrameOffset dest, FrameOffset src, size_t size) = 0;
-
- virtual void Copy(FrameOffset dest,
- ManagedRegister src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) = 0;
-
- virtual void Copy(ManagedRegister dest_base,
- Offset dest_offset,
- FrameOffset src,
- ManagedRegister scratch,
- size_t size) = 0;
-
- virtual void Copy(FrameOffset dest,
- FrameOffset src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) = 0;
-
- virtual void Copy(ManagedRegister dest,
- Offset dest_offset,
- ManagedRegister src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) = 0;
-
- virtual void Copy(FrameOffset dest,
- Offset dest_offset,
- FrameOffset src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) = 0;
-
virtual void Move(ManagedRegister dst, size_t value) = 0;
- virtual void MemoryBarrier(ManagedRegister scratch) = 0;
-
// Sign extension
virtual void SignExtend(ManagedRegister mreg, size_t size) = 0;
@@ -229,21 +158,6 @@ class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> {
virtual void GetCurrentThread(ManagedRegister dest) = 0;
virtual void GetCurrentThread(FrameOffset dest_offset) = 0;
- // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
- // stale reference that can be used to avoid loading the spilled value to
- // see if the value is null.
- virtual void CreateJObject(ManagedRegister out_reg,
- FrameOffset spilled_reference_offset,
- ManagedRegister in_reg,
- bool null_allowed) = 0;
-
- // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`.
- virtual void CreateJObject(FrameOffset out_off,
- FrameOffset spilled_reference_offset,
- bool null_allowed) = 0;
-
// Heap::VerifyObject on src. In some cases (such as a reference to this) we
// know that src may not be null.
virtual void VerifyObject(ManagedRegister src, bool could_be_null) = 0;
diff --git a/compiler/utils/x86/jni_macro_assembler_x86.cc b/compiler/utils/x86/jni_macro_assembler_x86.cc
index 7bb167cdf4..40fdc50f67 100644
--- a/compiler/utils/x86/jni_macro_assembler_x86.cc
+++ b/compiler/utils/x86/jni_macro_assembler_x86.cc
@@ -165,28 +165,12 @@ void X86JNIMacroAssembler::Store(ManagedRegister mbase,
}
}
-void X86JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
- X86ManagedRegister src = msrc.AsX86();
- CHECK(src.IsCpuRegister());
- __ movl(Address(ESP, dest), src.AsCpuRegister());
-}
-
void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
X86ManagedRegister src = msrc.AsX86();
CHECK(src.IsCpuRegister());
__ movl(Address(ESP, dest), src.AsCpuRegister());
}
-void X86JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) {
- __ movl(Address(ESP, dest), Immediate(imm));
-}
-
-void X86JNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs) {
- Register scratch = GetScratchRegister();
- __ leal(scratch, Address(ESP, fr_offs));
- __ fs()->movl(Address::Absolute(thr_offs), scratch);
-}
-
void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs, bool tag_sp) {
if (tag_sp) {
// There is no free register, store contents onto stack and restore back later.
@@ -201,12 +185,6 @@ void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs, bo
}
}
-void X86JNIMacroAssembler::StoreSpanning(FrameOffset /*dst*/,
- ManagedRegister /*src*/,
- FrameOffset /*in_off*/) {
- UNIMPLEMENTED(FATAL); // this case only currently exists for ARM
-}
-
void X86JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
Load(mdest, X86ManagedRegister::FromCpuRegister(ESP), MemberOffset(src.Int32Value()), size);
}
@@ -243,61 +221,6 @@ void X86JNIMacroAssembler::Load(ManagedRegister mdest,
}
}
-void X86JNIMacroAssembler::LoadFromThread(ManagedRegister mdest, ThreadOffset32 src, size_t size) {
- X86ManagedRegister dest = mdest.AsX86();
- if (dest.IsNoRegister()) {
- CHECK_EQ(0u, size);
- } else if (dest.IsCpuRegister()) {
- if (size == 1u) {
- __ fs()->movzxb(dest.AsCpuRegister(), Address::Absolute(src));
- } else {
- CHECK_EQ(4u, size);
- __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
- }
- } else if (dest.IsRegisterPair()) {
- CHECK_EQ(8u, size);
- __ fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
- __ fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset32(src.Int32Value()+4)));
- } else if (dest.IsX87Register()) {
- if (size == 4) {
- __ fs()->flds(Address::Absolute(src));
- } else {
- __ fs()->fldl(Address::Absolute(src));
- }
- } else {
- CHECK(dest.IsXmmRegister());
- if (size == 4) {
- __ fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
- } else {
- __ fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
- }
- }
-}
-
-void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
- X86ManagedRegister dest = mdest.AsX86();
- CHECK(dest.IsCpuRegister());
- __ movl(dest.AsCpuRegister(), Address(ESP, src));
-}
-
-void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool unpoison_reference) {
- X86ManagedRegister dest = mdest.AsX86();
- CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
- __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
- if (unpoison_reference) {
- __ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
- }
-}
-
-void X86JNIMacroAssembler::LoadRawPtr(ManagedRegister mdest,
- ManagedRegister base,
- Offset offs) {
- X86ManagedRegister dest = mdest.AsX86();
- CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
- __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
-}
-
void X86JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
X86ManagedRegister dest = mdest.AsX86();
CHECK(dest.IsCpuRegister());
@@ -417,39 +340,6 @@ void X86JNIMacroAssembler::Move(ManagedRegister mdest, size_t value) {
__ movl(dest.AsCpuRegister(), Immediate(value));
}
-void X86JNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) {
- Register scratch = GetScratchRegister();
- __ movl(scratch, Address(ESP, src));
- __ movl(Address(ESP, dest), scratch);
-}
-
-void X86JNIMacroAssembler::CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) {
- Register scratch = GetScratchRegister();
- __ movl(scratch, Address(base.AsX86().AsCpuRegister(), offs));
- if (unpoison_reference) {
- __ MaybeUnpoisonHeapReference(scratch);
- }
- __ movl(Address(ESP, dest), scratch);
-}
-
-void X86JNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) {
- Register scratch = GetScratchRegister();
- __ fs()->movl(scratch, Address::Absolute(thr_offs));
- __ movl(Address(ESP, fr_offs), scratch);
-}
-
-void X86JNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs,
- FrameOffset fr_offs,
- ManagedRegister mscratch) {
- X86ManagedRegister scratch = mscratch.AsX86();
- CHECK(scratch.IsCpuRegister());
- Load(scratch, fr_offs, 4);
- __ fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
-}
-
void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) {
DCHECK(size == 4 || size == 8) << size;
Register scratch = GetScratchRegister();
@@ -461,67 +351,6 @@ void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size)
}
}
-void X86JNIMacroAssembler::Copy(FrameOffset /*dst*/,
- ManagedRegister /*src_base*/,
- Offset /*src_offset*/,
- ManagedRegister /*scratch*/,
- size_t /*size*/) {
- UNIMPLEMENTED(FATAL);
-}
-
-void X86JNIMacroAssembler::Copy(ManagedRegister dest_base,
- Offset dest_offset,
- FrameOffset src,
- ManagedRegister scratch,
- size_t size) {
- CHECK(scratch.IsNoRegister());
- CHECK_EQ(size, 4u);
- __ pushl(Address(ESP, src));
- __ popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
-}
-
-void X86JNIMacroAssembler::Copy(FrameOffset dest,
- FrameOffset src_base,
- Offset src_offset,
- ManagedRegister mscratch,
- size_t size) {
- Register scratch = mscratch.AsX86().AsCpuRegister();
- CHECK_EQ(size, 4u);
- __ movl(scratch, Address(ESP, src_base));
- __ movl(scratch, Address(scratch, src_offset));
- __ movl(Address(ESP, dest), scratch);
-}
-
-void X86JNIMacroAssembler::Copy(ManagedRegister dest,
- Offset dest_offset,
- ManagedRegister src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) {
- CHECK_EQ(size, 4u);
- CHECK(scratch.IsNoRegister());
- __ pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
- __ popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
-}
-
-void X86JNIMacroAssembler::Copy(FrameOffset dest,
- Offset dest_offset,
- FrameOffset src,
- Offset src_offset,
- ManagedRegister mscratch,
- size_t size) {
- Register scratch = mscratch.AsX86().AsCpuRegister();
- CHECK_EQ(size, 4u);
- CHECK_EQ(dest.Int32Value(), src.Int32Value());
- __ movl(scratch, Address(ESP, src));
- __ pushl(Address(scratch, src_offset));
- __ popl(Address(scratch, dest_offset));
-}
-
-void X86JNIMacroAssembler::MemoryBarrier(ManagedRegister) {
- __ mfence();
-}
-
void X86JNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
FrameOffset spilled_reference_offset,
ManagedRegister min_reg,
diff --git a/compiler/utils/x86/jni_macro_assembler_x86.h b/compiler/utils/x86/jni_macro_assembler_x86.h
index eba4b99b8d..c5e8ad578c 100644
--- a/compiler/utils/x86/jni_macro_assembler_x86.h
+++ b/compiler/utils/x86/jni_macro_assembler_x86.h
@@ -59,30 +59,14 @@ class X86JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86Assembler, Poi
// Store routines
void Store(FrameOffset offs, ManagedRegister src, size_t size) override;
void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) override;
- void StoreRef(FrameOffset dest, ManagedRegister src) override;
void StoreRawPtr(FrameOffset dest, ManagedRegister src) override;
- void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override;
-
- void StoreStackOffsetToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs) override;
-
void StoreStackPointerToThread(ThreadOffset32 thr_offs, bool tag_sp) override;
- void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override;
-
// Load routines
void Load(ManagedRegister dest, FrameOffset src, size_t size) override;
void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) override;
- void LoadFromThread(ManagedRegister dest, ThreadOffset32 src, size_t size) override;
-
- void LoadRef(ManagedRegister dest, FrameOffset src) override;
-
- void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool unpoison_reference) override;
-
- void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override;
-
void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset32 offs) override;
// Copying routines
@@ -94,36 +78,6 @@ class X86JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86Assembler, Poi
void Move(ManagedRegister dest, size_t value) override;
- void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) override;
-
- void CopyRawPtrToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
- override;
-
- void CopyRef(FrameOffset dest, FrameOffset src) override;
- void CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
-
- void Copy(FrameOffset dest, FrameOffset src, size_t size) override;
-
- void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch,
- size_t size) override;
-
- void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch,
- size_t size) override;
-
- void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch,
- size_t size) override;
-
- void Copy(ManagedRegister dest, Offset dest_offset, ManagedRegister src, Offset src_offset,
- ManagedRegister scratch, size_t size) override;
-
- void Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
- ManagedRegister scratch, size_t size) override;
-
- void MemoryBarrier(ManagedRegister) override;
-
// Sign extension
void SignExtend(ManagedRegister mreg, size_t size) override;
@@ -134,21 +88,6 @@ class X86JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86Assembler, Poi
void GetCurrentThread(ManagedRegister dest) override;
void GetCurrentThread(FrameOffset dest_offset) override;
- // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
- // stale reference that can be used to avoid loading the spilled value to
- // see if the value is null.
- void CreateJObject(ManagedRegister out_reg,
- FrameOffset spilled_reference_offset,
- ManagedRegister in_reg,
- bool null_allowed) override;
-
- // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`.
- void CreateJObject(FrameOffset out_off,
- FrameOffset spilled_reference_offset,
- bool null_allowed) override;
-
// Heap::VerifyObject on src. In some cases (such as a reference to this) we
// know that src may not be null.
void VerifyObject(ManagedRegister src, bool could_be_null) override;
@@ -197,6 +136,23 @@ class X86JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86Assembler, Poi
void Bind(JNIMacroLabel* label) override;
private:
+ void Copy(FrameOffset dest, FrameOffset src, size_t size);
+
+ // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
+ // stale reference that can be used to avoid loading the spilled value to
+ // see if the value is null.
+ void CreateJObject(ManagedRegister out_reg,
+ FrameOffset spilled_reference_offset,
+ ManagedRegister in_reg,
+ bool null_allowed);
+
+ // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`.
+ void CreateJObject(FrameOffset out_off,
+ FrameOffset spilled_reference_offset,
+ bool null_allowed);
+
DISALLOW_COPY_AND_ASSIGN(X86JNIMacroAssembler);
};
diff --git a/compiler/utils/x86_64/assembler_x86_64.h b/compiler/utils/x86_64/assembler_x86_64.h
index 9402fe3b44..235ea03e2b 100644
--- a/compiler/utils/x86_64/assembler_x86_64.h
+++ b/compiler/utils/x86_64/assembler_x86_64.h
@@ -30,7 +30,6 @@
#include "managed_register_x86_64.h"
#include "offsets.h"
#include "utils/assembler.h"
-#include "utils/jni_macro_assembler.h"
namespace art HIDDEN {
namespace x86_64 {
diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
index 044027aaca..e552d29ee3 100644
--- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
+++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc
@@ -194,29 +194,12 @@ void X86_64JNIMacroAssembler::Store(ManagedRegister mbase,
}
}
-void X86_64JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
- X86_64ManagedRegister src = msrc.AsX86_64();
- CHECK(src.IsCpuRegister());
- __ movl(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
-}
-
void X86_64JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
X86_64ManagedRegister src = msrc.AsX86_64();
CHECK(src.IsCpuRegister());
__ movq(Address(CpuRegister(RSP), dest), src.AsCpuRegister());
}
-void X86_64JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) {
- __ movl(Address(CpuRegister(RSP), dest), Immediate(imm)); // TODO(64) movq?
-}
-
-void X86_64JNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset64 thr_offs,
- FrameOffset fr_offs) {
- CpuRegister scratch = GetScratchRegister();
- __ leaq(scratch, Address(CpuRegister(RSP), fr_offs));
- __ gs()->movq(Address::Absolute(thr_offs, true), scratch);
-}
-
void X86_64JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset64 thr_offs, bool tag_sp) {
if (tag_sp) {
CpuRegister reg = GetScratchRegister();
@@ -228,12 +211,6 @@ void X86_64JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset64 thr_offs,
}
}
-void X86_64JNIMacroAssembler::StoreSpanning(FrameOffset /*dst*/,
- ManagedRegister /*src*/,
- FrameOffset /*in_off*/) {
- UNIMPLEMENTED(FATAL); // this case only currently exists for ARM
-}
-
void X86_64JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
Load(mdest, X86_64ManagedRegister::FromCpuRegister(RSP), MemberOffset(src.Int32Value()), size);
}
@@ -270,67 +247,6 @@ void X86_64JNIMacroAssembler::Load(ManagedRegister mdest,
}
}
-void X86_64JNIMacroAssembler::LoadFromThread(ManagedRegister mdest,
- ThreadOffset64 src, size_t size) {
- X86_64ManagedRegister dest = mdest.AsX86_64();
- if (dest.IsNoRegister()) {
- CHECK_EQ(0u, size);
- } else if (dest.IsCpuRegister()) {
- if (size == 1u) {
- __ gs()->movzxb(dest.AsCpuRegister(), Address::Absolute(src, true));
- } else {
- CHECK_EQ(4u, size);
- __ gs()->movl(dest.AsCpuRegister(), Address::Absolute(src, true));
- }
- } else if (dest.IsRegisterPair()) {
- CHECK_EQ(8u, size);
- __ gs()->movq(dest.AsRegisterPairLow(), Address::Absolute(src, true));
- } else if (dest.IsX87Register()) {
- if (size == 4) {
- __ gs()->flds(Address::Absolute(src, true));
- } else {
- __ gs()->fldl(Address::Absolute(src, true));
- }
- } else {
- CHECK(dest.IsXmmRegister());
- if (size == 4) {
- __ gs()->movss(dest.AsXmmRegister(), Address::Absolute(src, true));
- } else {
- __ gs()->movsd(dest.AsXmmRegister(), Address::Absolute(src, true));
- }
- }
-}
-
-void X86_64JNIMacroAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
- X86_64ManagedRegister dest = mdest.AsX86_64();
- CHECK(dest.IsCpuRegister());
- __ movq(dest.AsCpuRegister(), Address(CpuRegister(RSP), src));
-}
-
-void X86_64JNIMacroAssembler::LoadRef(ManagedRegister mdest,
- ManagedRegister mbase,
- MemberOffset offs,
- bool unpoison_reference) {
- X86_64ManagedRegister base = mbase.AsX86_64();
- X86_64ManagedRegister dest = mdest.AsX86_64();
- CHECK(base.IsCpuRegister());
- CHECK(dest.IsCpuRegister());
- __ movl(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs));
- if (unpoison_reference) {
- __ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
- }
-}
-
-void X86_64JNIMacroAssembler::LoadRawPtr(ManagedRegister mdest,
- ManagedRegister mbase,
- Offset offs) {
- X86_64ManagedRegister base = mbase.AsX86_64();
- X86_64ManagedRegister dest = mdest.AsX86_64();
- CHECK(base.IsCpuRegister());
- CHECK(dest.IsCpuRegister());
- __ movq(dest.AsCpuRegister(), Address(base.AsCpuRegister(), offs));
-}
-
void X86_64JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset64 offs) {
X86_64ManagedRegister dest = mdest.AsX86_64();
CHECK(dest.IsCpuRegister());
@@ -490,39 +406,6 @@ void X86_64JNIMacroAssembler::Move(ManagedRegister mdest, size_t value) {
__ movq(dest.AsCpuRegister(), Immediate(value));
}
-void X86_64JNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) {
- CpuRegister scratch = GetScratchRegister();
- __ movl(scratch, Address(CpuRegister(RSP), src));
- __ movl(Address(CpuRegister(RSP), dest), scratch);
-}
-
-void X86_64JNIMacroAssembler::CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) {
- CpuRegister scratch = GetScratchRegister();
- __ movl(scratch, Address(base.AsX86_64().AsCpuRegister(), offs));
- if (unpoison_reference) {
- __ MaybeUnpoisonHeapReference(scratch);
- }
- __ movl(Address(CpuRegister(RSP), dest), scratch);
-}
-
-void X86_64JNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset64 thr_offs) {
- CpuRegister scratch = GetScratchRegister();
- __ gs()->movq(scratch, Address::Absolute(thr_offs, true));
- __ movq(Address(CpuRegister(RSP), fr_offs), scratch);
-}
-
-void X86_64JNIMacroAssembler::CopyRawPtrToThread(ThreadOffset64 thr_offs,
- FrameOffset fr_offs,
- ManagedRegister mscratch) {
- X86_64ManagedRegister scratch = mscratch.AsX86_64();
- CHECK(scratch.IsCpuRegister());
- Load(scratch, fr_offs, 8);
- __ gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister());
-}
-
void X86_64JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) {
DCHECK(size == 4 || size == 8) << size;
CpuRegister scratch = GetScratchRegister();
@@ -535,67 +418,6 @@ void X86_64JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t siz
}
}
-void X86_64JNIMacroAssembler::Copy(FrameOffset /*dst*/,
- ManagedRegister /*src_base*/,
- Offset /*src_offset*/,
- ManagedRegister /*scratch*/,
- size_t /*size*/) {
- UNIMPLEMENTED(FATAL);
-}
-
-void X86_64JNIMacroAssembler::Copy(ManagedRegister dest_base,
- Offset dest_offset,
- FrameOffset src,
- ManagedRegister scratch,
- size_t size) {
- CHECK(scratch.IsNoRegister());
- CHECK_EQ(size, 4u);
- __ pushq(Address(CpuRegister(RSP), src));
- __ popq(Address(dest_base.AsX86_64().AsCpuRegister(), dest_offset));
-}
-
-void X86_64JNIMacroAssembler::Copy(FrameOffset dest,
- FrameOffset src_base,
- Offset src_offset,
- ManagedRegister mscratch,
- size_t size) {
- CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
- CHECK_EQ(size, 4u);
- __ movq(scratch, Address(CpuRegister(RSP), src_base));
- __ movq(scratch, Address(scratch, src_offset));
- __ movq(Address(CpuRegister(RSP), dest), scratch);
-}
-
-void X86_64JNIMacroAssembler::Copy(ManagedRegister dest,
- Offset dest_offset,
- ManagedRegister src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) {
- CHECK_EQ(size, 4u);
- CHECK(scratch.IsNoRegister());
- __ pushq(Address(src.AsX86_64().AsCpuRegister(), src_offset));
- __ popq(Address(dest.AsX86_64().AsCpuRegister(), dest_offset));
-}
-
-void X86_64JNIMacroAssembler::Copy(FrameOffset dest,
- Offset dest_offset,
- FrameOffset src,
- Offset src_offset,
- ManagedRegister mscratch,
- size_t size) {
- CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister();
- CHECK_EQ(size, 4u);
- CHECK_EQ(dest.Int32Value(), src.Int32Value());
- __ movq(scratch, Address(CpuRegister(RSP), src));
- __ pushq(Address(scratch, src_offset));
- __ popq(Address(scratch, dest_offset));
-}
-
-void X86_64JNIMacroAssembler::MemoryBarrier(ManagedRegister) {
- __ mfence();
-}
-
void X86_64JNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
FrameOffset spilled_reference_offset,
ManagedRegister min_reg,
diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.h b/compiler/utils/x86_64/jni_macro_assembler_x86_64.h
index 3308c7ebd3..2c1fc3588d 100644
--- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.h
+++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.h
@@ -60,32 +60,14 @@ class X86_64JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86_64Assemble
// Store routines
void Store(FrameOffset offs, ManagedRegister src, size_t size) override;
void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) override;
- void StoreRef(FrameOffset dest, ManagedRegister src) override;
void StoreRawPtr(FrameOffset dest, ManagedRegister src) override;
- void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) override;
-
- void StoreStackOffsetToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs) override;
-
void StoreStackPointerToThread(ThreadOffset64 thr_offs, bool tag_sp) override;
- void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off) override;
-
// Load routines
void Load(ManagedRegister dest, FrameOffset src, size_t size) override;
void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) override;
- void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) override;
-
- void LoadRef(ManagedRegister dest, FrameOffset src) override;
-
- void LoadRef(ManagedRegister dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
-
- void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) override;
-
void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) override;
// Copying routines
@@ -97,53 +79,6 @@ class X86_64JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86_64Assemble
void Move(ManagedRegister dest, size_t value) override;
- void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset64 thr_offs) override;
-
- void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
- override;
-
- void CopyRef(FrameOffset dest, FrameOffset src) override;
- void CopyRef(FrameOffset dest,
- ManagedRegister base,
- MemberOffset offs,
- bool unpoison_reference) override;
-
- void Copy(FrameOffset dest, FrameOffset src, size_t size) override;
-
- void Copy(FrameOffset dest,
- ManagedRegister src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(ManagedRegister dest_base,
- Offset dest_offset,
- FrameOffset src,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(FrameOffset dest,
- FrameOffset src_base,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(ManagedRegister dest,
- Offset dest_offset,
- ManagedRegister src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void Copy(FrameOffset dest,
- Offset dest_offset,
- FrameOffset src,
- Offset src_offset,
- ManagedRegister scratch,
- size_t size) override;
-
- void MemoryBarrier(ManagedRegister) override;
-
// Sign extension
void SignExtend(ManagedRegister mreg, size_t size) override;
@@ -154,21 +89,6 @@ class X86_64JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86_64Assemble
void GetCurrentThread(ManagedRegister dest) override;
void GetCurrentThread(FrameOffset dest_offset) override;
- // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
- // stale reference that can be used to avoid loading the spilled value to
- // see if the value is null.
- void CreateJObject(ManagedRegister out_reg,
- FrameOffset spilled_reference_offset,
- ManagedRegister in_reg,
- bool null_allowed) override;
-
- // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
- // or to be null if the value is null and `null_allowed`.
- void CreateJObject(FrameOffset out_off,
- FrameOffset spilled_reference_offset,
- bool null_allowed) override;
-
// Heap::VerifyObject on src. In some cases (such as a reference to this) we
// know that src may not be null.
void VerifyObject(ManagedRegister src, bool could_be_null) override;
@@ -217,6 +137,23 @@ class X86_64JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86_64Assemble
void Bind(JNIMacroLabel* label) override;
private:
+ void Copy(FrameOffset dest, FrameOffset src, size_t size);
+
+ // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
+ // stale reference that can be used to avoid loading the spilled value to
+ // see if the value is null.
+ void CreateJObject(ManagedRegister out_reg,
+ FrameOffset spilled_reference_offset,
+ ManagedRegister in_reg,
+ bool null_allowed);
+
+ // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
+ // or to be null if the value is null and `null_allowed`.
+ void CreateJObject(FrameOffset out_off,
+ FrameOffset spilled_reference_offset,
+ bool null_allowed);
+
DISALLOW_COPY_AND_ASSIGN(X86_64JNIMacroAssembler);
};