summaryrefslogtreecommitdiff
path: root/compiler/optimizing/intrinsics_riscv64.cc
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2024-04-18 13:21:15 +0000
committer VladimĂ­r Marko <vmarko@google.com> 2024-04-22 07:28:48 +0000
commit4a79b17f374df876803e34edb60476fe33ab1671 (patch)
tree12c07faf384d12d7ae535300f8218527628144b1 /compiler/optimizing/intrinsics_riscv64.cc
parent55e99bd1c5a403c4bddc023403593c9199af56f2 (diff)
Optimizing: Treat app image objects as non-movable.
Treat app image objects similar to boot image objects and avoid unnecessary read barriers for app image `HLoadClass` and `HInstanceOf` checks with app image `HLoadClass` input. Extend other optimizations to treat app image classes the same way as boot image classes even though this remains mostly dormant because we currently do not initialize app image classes with class initializers; the experimental flag `--initialize-app-image-classes` is false by default. Test: m test-art-host-gtest Test: testrunner.py --host --optimizing --speed-profile Bug: 38313278 Change-Id: I359dd8897f6d128213602f5731d40edace298ab8
Diffstat (limited to 'compiler/optimizing/intrinsics_riscv64.cc')
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc8
1 files changed, 4 insertions, 4 deletions
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index 3ce6d73fae..4e248a2b7c 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -3198,7 +3198,7 @@ static void GenerateVarHandleInstanceFieldChecks(HInvoke* invoke,
__ Beqz(object, slow_path->GetEntryLabel());
}
- if (!optimizations.GetUseKnownBootImageVarHandle()) {
+ if (!optimizations.GetUseKnownImageVarHandle()) {
ScratchRegisterScope srs(assembler);
XRegister temp = srs.AllocateXRegister();
@@ -3321,7 +3321,7 @@ static VarHandleSlowPathRISCV64* GenerateVarHandleChecks(HInvoke* invoke,
DataType::Type type) {
size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
VarHandleOptimizations optimizations(invoke);
- if (optimizations.GetUseKnownBootImageVarHandle()) {
+ if (optimizations.GetUseKnownImageVarHandle()) {
DCHECK_NE(expected_coordinates_count, 2u);
if (expected_coordinates_count == 0u || optimizations.GetSkipObjectNullCheck()) {
return nullptr;
@@ -3332,7 +3332,7 @@ static VarHandleSlowPathRISCV64* GenerateVarHandleChecks(HInvoke* invoke,
new (codegen->GetScopedAllocator()) VarHandleSlowPathRISCV64(invoke, order);
codegen->AddSlowPath(slow_path);
- if (!optimizations.GetUseKnownBootImageVarHandle()) {
+ if (!optimizations.GetUseKnownImageVarHandle()) {
GenerateVarHandleAccessModeAndVarTypeChecks(invoke, codegen, slow_path, type);
}
GenerateVarHandleCoordinateChecks(invoke, codegen, slow_path);
@@ -3368,7 +3368,7 @@ static void GenerateVarHandleTarget(HInvoke* invoke,
size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
if (expected_coordinates_count <= 1u) {
- if (VarHandleOptimizations(invoke).GetUseKnownBootImageVarHandle()) {
+ if (VarHandleOptimizations(invoke).GetUseKnownImageVarHandle()) {
ScopedObjectAccess soa(Thread::Current());
ArtField* target_field = GetBootImageVarHandleField(invoke);
if (expected_coordinates_count == 0u) {