summaryrefslogtreecommitdiff
path: root/compiler/dex/quick/gen_invoke.cc
diff options
context:
space:
mode:
author Dave Allison <dallison@google.com> 2014-05-29 08:20:04 -0700
committer Dave Allison <dallison@google.com> 2014-07-09 16:19:59 -0700
commit34e826ccc80dc1cf7c4c045de6b7f8360d504ccf (patch)
tree76901cff2cddd6d30cb7a4e83ad4e0c9bb673fe1 /compiler/dex/quick/gen_invoke.cc
parentc21dc06adc8c8447561208a3fb72ccf6d0443613 (diff)
Add implicit null and stack checks for x86
This adds compiler and runtime changes for x86 implicit checks. 32 bit only. Both host and target are supported. By default, on the host, the implicit checks are null pointer and stack overflow. Suspend is implemented but not switched on. Change-Id: I88a609e98d6bf32f283eaa4e6ec8bbf8dc1df78a
Diffstat (limited to 'compiler/dex/quick/gen_invoke.cc')
-rwxr-xr-xcompiler/dex/quick/gen_invoke.cc38
1 files changed, 28 insertions, 10 deletions
diff --git a/compiler/dex/quick/gen_invoke.cc b/compiler/dex/quick/gen_invoke.cc
index 6c0dfe80a6..55b68e66b2 100755
--- a/compiler/dex/quick/gen_invoke.cc
+++ b/compiler/dex/quick/gen_invoke.cc
@@ -985,17 +985,31 @@ int Mir2Lir::GenDalvikArgsNoRange(CallInfo* info,
*pcrLabel = GenExplicitNullCheck(TargetRefReg(kArg1), info->opt_flags);
} else {
*pcrLabel = nullptr;
+ if (!(cu_->disable_opt & (1 << kNullCheckElimination)) &&
+ (info->opt_flags & MIR_IGNORE_NULL_CHECK)) {
+ return call_state;
+ }
// In lieu of generating a check for kArg1 being null, we need to
// perform a load when doing implicit checks.
- RegStorage tmp = AllocTemp();
- Load32Disp(TargetRefReg(kArg1), 0, tmp);
- MarkPossibleNullPointerException(info->opt_flags);
- FreeTemp(tmp);
+ GenImplicitNullCheck(TargetReg(kArg1, false), info->opt_flags);
}
}
return call_state;
}
+// Default implementation of implicit null pointer check.
+// Overridden by arch specific as necessary.
+void Mir2Lir::GenImplicitNullCheck(RegStorage reg, int opt_flags) {
+ if (!(cu_->disable_opt & (1 << kNullCheckElimination)) && (opt_flags & MIR_IGNORE_NULL_CHECK)) {
+ return;
+ }
+ RegStorage tmp = AllocTemp();
+ Load32Disp(reg, 0, tmp);
+ MarkPossibleNullPointerException(opt_flags);
+ FreeTemp(tmp);
+}
+
+
/*
* May have 0+ arguments (also used for jumbo). Note that
* source virtual registers may be in physical registers, so may
@@ -1212,12 +1226,13 @@ int Mir2Lir::GenDalvikArgsRange(CallInfo* info, int call_state,
*pcrLabel = GenExplicitNullCheck(TargetRefReg(kArg1), info->opt_flags);
} else {
*pcrLabel = nullptr;
+ if (!(cu_->disable_opt & (1 << kNullCheckElimination)) &&
+ (info->opt_flags & MIR_IGNORE_NULL_CHECK)) {
+ return call_state;
+ }
// In lieu of generating a check for kArg1 being null, we need to
// perform a load when doing implicit checks.
- RegStorage tmp = AllocTemp();
- Load32Disp(TargetRefReg(kArg1), 0, tmp);
- MarkPossibleNullPointerException(info->opt_flags);
- FreeTemp(tmp);
+ GenImplicitNullCheck(TargetReg(kArg1, false), info->opt_flags);
}
}
return call_state;
@@ -1293,11 +1308,14 @@ bool Mir2Lir::GenInlinedCharAt(CallInfo* info) {
// On x86, we can compare to memory directly
// Set up a launch pad to allow retry in case of bounds violation */
if (rl_idx.is_const) {
+ LIR* comparison;
range_check_branch = OpCmpMemImmBranch(
kCondUlt, RegStorage::InvalidReg(), rl_obj.reg, count_offset,
- mir_graph_->ConstantValue(rl_idx.orig_sreg), nullptr);
- } else {
+ mir_graph_->ConstantValue(rl_idx.orig_sreg), nullptr, &comparison);
+ MarkPossibleNullPointerExceptionAfter(0, comparison);
+ } else {
OpRegMem(kOpCmp, rl_idx.reg, rl_obj.reg, count_offset);
+ MarkPossibleNullPointerException(0);
range_check_branch = OpCondBranch(kCondUge, nullptr);
}
}