summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Serguei Katkov <serguei.i.katkov@intel.com> 2014-09-04 15:21:32 +0700
committer Serguei Katkov <serguei.i.katkov@intel.com> 2014-09-11 09:14:10 +0700
commit9863daf4fdc1a08339edac794452dbc719aef4f1 (patch)
tree6870b147ea60f095ef43fa37f35d193fe41fd33f
parentb9620f305c79914f5159cf9279a7ccd173af1186 (diff)
AddIntrinsicSlowPath with resume requires clobbering
AddIntrinsicSlowPath with resume results in a call. So all temps must be clobbered at the point where AddIntrinsicSlowPath returns. Change-Id: If9eb887e295ff5e59920f4da1cef63258ad490b0 Signed-off-by: Serguei Katkov <serguei.i.katkov@intel.com>
-rw-r--r--compiler/dex/quick/arm/int_arm.cc1
-rw-r--r--compiler/dex/quick/arm64/int_arm64.cc1
-rwxr-xr-xcompiler/dex/quick/gen_invoke.cc3
-rwxr-xr-xcompiler/dex/quick/x86/target_x86.cc2
4 files changed, 6 insertions, 1 deletions
diff --git a/compiler/dex/quick/arm/int_arm.cc b/compiler/dex/quick/arm/int_arm.cc
index b9a17cceb9..0de2a445d0 100644
--- a/compiler/dex/quick/arm/int_arm.cc
+++ b/compiler/dex/quick/arm/int_arm.cc
@@ -1039,6 +1039,7 @@ bool ArmMir2Lir::GenInlinedArrayCopyCharArray(CallInfo* info) {
jmp_to_ret->target = return_point;
AddIntrinsicSlowPath(info, launchpad_branch, return_point);
+ ClobberCallerSave(); // We must clobber everything because slow path will return here
return true;
}
diff --git a/compiler/dex/quick/arm64/int_arm64.cc b/compiler/dex/quick/arm64/int_arm64.cc
index 1777e98fae..094db4cac3 100644
--- a/compiler/dex/quick/arm64/int_arm64.cc
+++ b/compiler/dex/quick/arm64/int_arm64.cc
@@ -918,6 +918,7 @@ bool Arm64Mir2Lir::GenInlinedArrayCopyCharArray(CallInfo* info) {
loop_finished->target = return_point;
AddIntrinsicSlowPath(info, launchpad_branch, return_point);
+ ClobberCallerSave(); // We must clobber everything because slow path will return here
return true;
}
diff --git a/compiler/dex/quick/gen_invoke.cc b/compiler/dex/quick/gen_invoke.cc
index 8ce696ca15..960f21790b 100755
--- a/compiler/dex/quick/gen_invoke.cc
+++ b/compiler/dex/quick/gen_invoke.cc
@@ -1193,7 +1193,7 @@ bool Mir2Lir::GenInlinedReferenceGetReferent(CallInfo* info) {
LIR* intrinsic_finish = NewLIR0(kPseudoTargetLabel);
AddIntrinsicSlowPath(info, slow_path_branch, intrinsic_finish);
-
+ ClobberCallerSave(); // We must clobber everything because slow path will return here
return true;
}
@@ -1492,6 +1492,7 @@ bool Mir2Lir::GenInlinedIndexOf(CallInfo* info, bool zero_based) {
LIR* resume_tgt = NewLIR0(kPseudoTargetLabel);
info->opt_flags |= MIR_IGNORE_NULL_CHECK; // Record that we've null checked.
AddIntrinsicSlowPath(info, high_code_point_branch, resume_tgt);
+ ClobberCallerSave(); // We must clobber everything because slow path will return here
} else {
DCHECK_EQ(mir_graph_->ConstantValue(rl_char) & ~0xFFFF, 0);
DCHECK(high_code_point_branch == nullptr);
diff --git a/compiler/dex/quick/x86/target_x86.cc b/compiler/dex/quick/x86/target_x86.cc
index aadb41a37a..91caa9b448 100755
--- a/compiler/dex/quick/x86/target_x86.cc
+++ b/compiler/dex/quick/x86/target_x86.cc
@@ -1206,6 +1206,7 @@ bool X86Mir2Lir::GenInlinedArrayCopyCharArray(CallInfo* info) {
if (dst_bad_len != nullptr)
dst_bad_len->target = check_failed;
AddIntrinsicSlowPath(info, launchpad_branch, return_point);
+ ClobberCallerSave(); // We must clobber everything because slow path will return here
return true;
}
@@ -1384,6 +1385,7 @@ bool X86Mir2Lir::GenInlinedIndexOf(CallInfo* info, bool zero_based) {
if (slowpath_branch != nullptr) {
LIR *return_point = NewLIR0(kPseudoTargetLabel);
AddIntrinsicSlowPath(info, slowpath_branch, return_point);
+ ClobberCallerSave(); // We must clobber everything because slow path will return here
}
StoreValue(rl_dest, rl_return);