summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/optimizing/optimizing_compiler.cc13
-rw-r--r--runtime/arch/arm64/quick_entrypoints_arm64.S2
-rw-r--r--runtime/arch/riscv64/entrypoints_init_riscv64.cc34
-rw-r--r--runtime/arch/riscv64/quick_entrypoints_riscv64.S81
-rw-r--r--runtime/entrypoints/quick/quick_trampoline_entrypoints.cc5
5 files changed, 125 insertions, 10 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index bc0a66f7db..6062d25eb8 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -771,6 +771,8 @@ static bool CanAssembleGraphForRiscv64(HGraph* graph) {
case HInstruction::kGoto:
case HInstruction::kPackedSwitch:
case HInstruction::kSelect:
+ case HInstruction::kThrow:
+ case HInstruction::kNop:
case HInstruction::kTryBoundary:
case HInstruction::kClearException:
case HInstruction::kLoadException:
@@ -790,6 +792,8 @@ static bool CanAssembleGraphForRiscv64(HGraph* graph) {
case HInstruction::kLoadMethodType:
case HInstruction::kNewArray:
case HInstruction::kNewInstance:
+ case HInstruction::kConstructorFence:
+ case HInstruction::kMemoryBarrier:
case HInstruction::kInstanceFieldGet:
case HInstruction::kInstanceFieldSet:
case HInstruction::kStaticFieldGet:
@@ -821,16 +825,25 @@ static bool CanAssembleGraphForRiscv64(HGraph* graph) {
case HInstruction::kUShr:
case HInstruction::kAbs:
case HInstruction::kBooleanNot:
+ case HInstruction::kDiv:
+ case HInstruction::kRem:
case HInstruction::kMul:
case HInstruction::kNeg:
case HInstruction::kNot:
case HInstruction::kMin:
case HInstruction::kMax:
+ case HInstruction::kMonitorOperation:
+ case HInstruction::kStringBuilderAppend:
case HInstruction::kInvokeStaticOrDirect:
case HInstruction::kInvokeVirtual:
case HInstruction::kInvokeInterface:
case HInstruction::kCurrentMethod:
case HInstruction::kNullCheck:
+ case HInstruction::kDeoptimize:
+ case HInstruction::kDivZeroCheck:
+ case HInstruction::kCheckCast:
+ case HInstruction::kInstanceOf:
+ case HInstruction::kBoundType:
break;
default:
// Unimplemented instruction.
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index abb09be54d..7ac2bbe0e9 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -903,6 +903,8 @@ ENTRY art_quick_check_instance_of
cbz x0, .Lthrow_class_cast_exception
// Restore and return
+ // TODO: We do not need to restore X0 and X1 on success. We also do not need
+ // to record CFI for them as the information is not very useful.
RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32
ret
diff --git a/runtime/arch/riscv64/entrypoints_init_riscv64.cc b/runtime/arch/riscv64/entrypoints_init_riscv64.cc
index 3bf38b056d..98ae040ad6 100644
--- a/runtime/arch/riscv64/entrypoints_init_riscv64.cc
+++ b/runtime/arch/riscv64/entrypoints_init_riscv64.cc
@@ -14,15 +14,20 @@
* limitations under the License.
*/
+#include <math.h>
+
#include "entrypoints/quick/quick_default_init_entrypoints.h"
#include "entrypoints/quick/quick_entrypoints.h"
namespace art {
+// Cast entrypoints.
+extern "C" size_t artInstanceOfFromCode(mirror::Object* obj, mirror::Class* ref_class);
+
+// Read barrier entrypoints.
// art_quick_read_barrier_mark_regX uses an non-standard calling convention: it
// expects its input in register X and returns its result in that same register,
// and saves and restores all other registers.
-
// No read barrier for X0 (Zero), X1 (RA), X2 (SP), X3 (GP) and X4 (TP).
extern "C" mirror::Object* art_quick_read_barrier_mark_reg05(mirror::Object*); // t0/x5
extern "C" mirror::Object* art_quick_read_barrier_mark_reg06(mirror::Object*); // t1/x6
@@ -90,6 +95,33 @@ void InitEntryPoints(JniEntryPoints* jpoints,
QuickEntryPoints* qpoints,
bool monitor_jni_entry_exit) {
DefaultInitEntryPoints(jpoints, qpoints, monitor_jni_entry_exit);
+
+ // Cast
+ qpoints->SetInstanceofNonTrivial(artInstanceOfFromCode);
+ qpoints->SetCheckInstanceOf(art_quick_check_instance_of);
+
+ // Math
+ // TODO(riscv64): null entrypoints not needed for riscv64 - using generated code.
+ qpoints->SetCmpgDouble(nullptr);
+ qpoints->SetCmpgFloat(nullptr);
+ qpoints->SetCmplDouble(nullptr);
+ qpoints->SetCmplFloat(nullptr);
+ qpoints->SetFmod(fmod);
+ qpoints->SetL2d(nullptr);
+ qpoints->SetFmodf(fmodf);
+ qpoints->SetL2f(nullptr);
+ qpoints->SetD2iz(nullptr);
+ qpoints->SetF2iz(nullptr);
+ qpoints->SetIdivmod(nullptr);
+ qpoints->SetD2l(nullptr);
+ qpoints->SetF2l(nullptr);
+ qpoints->SetLdiv(nullptr);
+ qpoints->SetLmod(nullptr);
+ qpoints->SetLmul(nullptr);
+ qpoints->SetShlLong(nullptr);
+ qpoints->SetShrLong(nullptr);
+ qpoints->SetUshrLong(nullptr);
+
// TODO(riscv64): add other entrypoints
}
diff --git a/runtime/arch/riscv64/quick_entrypoints_riscv64.S b/runtime/arch/riscv64/quick_entrypoints_riscv64.S
index 1bac627b2d..e4edd60df4 100644
--- a/runtime/arch/riscv64/quick_entrypoints_riscv64.S
+++ b/runtime/arch/riscv64/quick_entrypoints_riscv64.S
@@ -667,6 +667,72 @@ ENTRY art_quick_proxy_invoke_handler
END art_quick_proxy_invoke_handler
+// Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+// will long jump to the upcall with a special exception of -1.
+ .extern artDeoptimizeFromCompiledCode
+ENTRY art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_EVERYTHING_FRAME
+ mv a1, xSELF // Pass Thread::Current().
+ call artDeoptimizeFromCompiledCode // (DeoptimizationKind, Thread*)
+ unimp
+END art_quick_deoptimize_from_compiled_code
+
+
+ .extern artStringBuilderAppend
+ENTRY art_quick_string_builder_append
+ SETUP_SAVE_REFS_ONLY_FRAME // Save callee saves in case of GC.
+ addi a1, sp, (FRAME_SIZE_SAVE_REFS_ONLY + __SIZEOF_POINTER__) // Pass args.
+ mv a2, xSELF // Pass Thread::Current().
+ call artStringBuilderAppend // (uint32_t, const unit32_t*, Thread*)
+ RESTORE_SAVE_REFS_ONLY_FRAME
+ RETURN_OR_DEOPT_IF_RESULT_IS_NON_NULL_OR_DELIVER
+END art_quick_string_builder_append
+
+
+// Entry from managed code that calls artInstanceOfFromCode and on failure calls
+// artThrowClassCastExceptionForObject.
+ .extern artInstanceOfFromCode
+ .extern artThrowClassCastExceptionForObject
+ENTRY art_quick_check_instance_of
+ // Type check using the bit string passes null as the target class. In that case just throw.
+ beqz a1, .Lthrow_class_cast_exception_for_bitstring_check
+
+ // Store arguments and return address register.
+ // Stack needs to be 16B aligned on calls.
+ INCREASE_FRAME 32
+ sd a0, 0*8(sp)
+ sd a1, 1*8(sp)
+ SAVE_GPR ra, 3*8
+
+ // Call runtime code.
+ call artInstanceOfFromCode
+
+ // Restore RA.
+ RESTORE_GPR ra, 3*8
+
+ // Check for exception.
+ CFI_REMEMBER_STATE
+ beqz a0, .Lthrow_class_cast_exception
+
+ // Remove spill area and return (no need to restore A0 and A1).
+ DECREASE_FRAME 32
+ ret
+
+.Lthrow_class_cast_exception:
+ CFI_RESTORE_STATE_AND_DEF_CFA sp, 32
+ // Restore A0 and remove spill area.
+ ld a0, 0*8(sp)
+ ld a1, 1*8(sp)
+ DECREASE_FRAME 32
+
+.Lthrow_class_cast_exception_for_bitstring_check:
+ SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // Save all registers as basis for long jump context.
+ mv a2, xSELF // Pass Thread::Current().
+ call artThrowClassCastExceptionForObject // (Object*, Class*, Thread*)
+ unimp // We should not return here...
+END art_quick_check_instance_of
+
+
.macro N_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING n, c_name, cxx_name
.extern \cxx_name
ENTRY \c_name
@@ -1169,7 +1235,7 @@ ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromC
RESTORE_GPR ra, 5*8
DECREASE_FRAME 48
.endm
-#endif // USE_BAKER_READ_BARRIER
+#endif // USE_BAKER_READ_BARRIER
#endif // USE_READ_BARRIER
ENTRY art_quick_aput_obj
@@ -1764,25 +1830,26 @@ END \name
.macro COMPUTE_ARRAY_SIZE_8 class, count, temp0, temp1, temp2
// Add array data offset and alignment adjustment to the `\count`.
- addi \temp1, \count, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
+ li \temp1, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
+ add.uw \temp1, \count, \temp1
.endm
.macro COMPUTE_ARRAY_SIZE_16 class, count, temp0, temp1, temp2
// Add array data offset and alignment adjustment to the shifted `\count`.
li \temp1, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
- sh1add \temp1, \count, \temp1
+ sh1add.uw \temp1, \count, \temp1
.endm
.macro COMPUTE_ARRAY_SIZE_32 class, count, temp0, temp1, temp2
// Add array data offset and alignment adjustment to the shifted `\count`.
li \temp1, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
- sh2add \temp1, \count, \temp1
+ sh2add.uw \temp1, \count, \temp1
.endm
.macro COMPUTE_ARRAY_SIZE_64 class, count, temp0, temp1, temp2
// Add array data offset and alignment adjustment to the shifted `\count`.
li \temp1, (MIRROR_WIDE_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
- sh3add \temp1, \count, \temp1
+ sh3add.uw \temp1, \count, \temp1
.endm
// TODO(ngeoffray): art_quick_alloc_array_resolved_region_tlab is not used for arm64, remove
@@ -1819,10 +1886,6 @@ GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, \
COMPUTE_ARRAY_SIZE_64
-UNDEFINED art_quick_deoptimize_from_compiled_code
-UNDEFINED art_quick_string_builder_append
-UNDEFINED art_quick_check_instance_of
-
UNDEFINED art_quick_set8_instance
UNDEFINED art_quick_set8_static
UNDEFINED art_quick_set16_instance
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 9c5fe5f096..ef9c2d2068 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -899,6 +899,11 @@ extern "C" uint64_t artQuickProxyInvokeHandler(
{},
result);
}
+
+ if (QuickArgumentVisitor::NaNBoxing() && shorty[0] == 'F') {
+ result.SetJ(result.GetJ() | UINT64_C(0xffffffff00000000));
+ }
+
return result.GetJ();
}