summaryrefslogtreecommitdiff
path: root/compiler/optimizing/intrinsics.h
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2023-10-13 10:28:36 +0000
committer VladimĂ­r Marko <vmarko@google.com> 2023-10-17 11:56:00 +0000
commitd5c097bcda44e237ecabcdba9b3dca2348289138 (patch)
tree46be9f64c07e806c8c1f68e19ac09b0d41f8a635 /compiler/optimizing/intrinsics.h
parentf7bd87edf3b80ce3bbd6e571fd119c878cb79992 (diff)
riscv64: Implement `CriticalNativeAbiFixupRiscv64`.
And pass integral stack args sign-extended to 64 bits for direct @CriticalNative calls. Enable direct @CriticalNative call codegen unconditionally and also enable `HClinitCheck` codegen and extend the 178-app-image-native-method run-test to properly test these use cases. Test: # Edit `run-test` to disable checker, then testrunner.py --target --64 --ndebug --optimizing # Ignore 6 pre-existing failures (down from 7). Bug: 283082089 Change-Id: Ia514c62006c7079b04182cc39e413eb2deb089c1
Diffstat (limited to 'compiler/optimizing/intrinsics.h')
-rw-r--r--compiler/optimizing/intrinsics.h5
1 files changed, 5 insertions, 0 deletions
diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h
index b6c7e1b997..a16b93dfc6 100644
--- a/compiler/optimizing/intrinsics.h
+++ b/compiler/optimizing/intrinsics.h
@@ -328,6 +328,11 @@ bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) {
return false;
}
+// Insert a `Float.floatToRawIntBits()` or `Double.doubleToRawLongBits()` intrinsic for a
+// given input. These fake calls are needed on arm and riscv64 to satisfy type consistency
+// checks while passing certain FP args in core registers for direct @CriticalNative calls.
+void InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect* invoke, size_t input_index);
+
} // namespace art
#endif // ART_COMPILER_OPTIMIZING_INTRINSICS_H_