summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/intrinsics_arm.cc15
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc14
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc17
3 files changed, 31 insertions, 15 deletions
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index f43f8edf06..bfe4956692 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -1451,20 +1451,26 @@ void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
Label conditions_on_positions_validated;
SystemArrayCopyOptimizations optimizations(invoke);
- if (!optimizations.GetDestinationIsSource() &&
- (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
- __ cmp(src, ShifterOperand(dest));
- }
// If source and destination are the same, we go to slow path if we need to do
// forward copying.
if (src_pos.IsConstant()) {
int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
if (dest_pos.IsConstant()) {
+ int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
+ if (optimizations.GetDestinationIsSource()) {
+ // Checked when building locations.
+ DCHECK_GE(src_pos_constant, dest_pos_constant);
+ } else if (src_pos_constant < dest_pos_constant) {
+ __ cmp(src, ShifterOperand(dest));
+ __ b(slow_path->GetEntryLabel(), EQ);
+ }
+
// Checked when building locations.
DCHECK(!optimizations.GetDestinationIsSource()
|| (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
} else {
if (!optimizations.GetDestinationIsSource()) {
+ __ cmp(src, ShifterOperand(dest));
__ b(&conditions_on_positions_validated, NE);
}
__ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
@@ -1472,6 +1478,7 @@ void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
}
} else {
if (!optimizations.GetDestinationIsSource()) {
+ __ cmp(src, ShifterOperand(dest));
__ b(&conditions_on_positions_validated, NE);
}
if (dest_pos.IsConstant()) {
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 1685cf9c3c..549407e1e7 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -2101,20 +2101,25 @@ void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) {
vixl::Label conditions_on_positions_validated;
SystemArrayCopyOptimizations optimizations(invoke);
- if (!optimizations.GetDestinationIsSource() &&
- (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
- __ Cmp(src, dest);
- }
// If source and destination are the same, we go to slow path if we need to do
// forward copying.
if (src_pos.IsConstant()) {
int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
if (dest_pos.IsConstant()) {
+ int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
+ if (optimizations.GetDestinationIsSource()) {
+ // Checked when building locations.
+ DCHECK_GE(src_pos_constant, dest_pos_constant);
+ } else if (src_pos_constant < dest_pos_constant) {
+ __ Cmp(src, dest);
+ __ B(slow_path->GetEntryLabel(), eq);
+ }
// Checked when building locations.
DCHECK(!optimizations.GetDestinationIsSource()
|| (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
} else {
if (!optimizations.GetDestinationIsSource()) {
+ __ Cmp(src, dest);
__ B(&conditions_on_positions_validated, ne);
}
__ Cmp(WRegisterFrom(dest_pos), src_pos_constant);
@@ -2122,6 +2127,7 @@ void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) {
}
} else {
if (!optimizations.GetDestinationIsSource()) {
+ __ Cmp(src, dest);
__ B(&conditions_on_positions_validated, ne);
}
__ Cmp(RegisterFrom(src_pos, invoke->InputAt(1)->GetType()),
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index c5b44d4f5c..5bd49647be 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1103,20 +1103,22 @@ void IntrinsicCodeGeneratorX86_64::VisitSystemArrayCopy(HInvoke* invoke) {
NearLabel conditions_on_positions_validated;
SystemArrayCopyOptimizations optimizations(invoke);
- if (!optimizations.GetDestinationIsSource() &&
- (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
- __ cmpl(src, dest);
- }
// If source and destination are the same, we go to slow path if we need to do
// forward copying.
if (src_pos.IsConstant()) {
int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
if (dest_pos.IsConstant()) {
- // Checked when building locations.
- DCHECK(!optimizations.GetDestinationIsSource()
- || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
+ int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
+ if (optimizations.GetDestinationIsSource()) {
+ // Checked when building locations.
+ DCHECK_GE(src_pos_constant, dest_pos_constant);
+ } else if (src_pos_constant < dest_pos_constant) {
+ __ cmpl(src, dest);
+ __ j(kEqual, slow_path->GetEntryLabel());
+ }
} else {
if (!optimizations.GetDestinationIsSource()) {
+ __ cmpl(src, dest);
__ j(kNotEqual, &conditions_on_positions_validated);
}
__ cmpl(dest_pos.AsRegister<CpuRegister>(), Immediate(src_pos_constant));
@@ -1124,6 +1126,7 @@ void IntrinsicCodeGeneratorX86_64::VisitSystemArrayCopy(HInvoke* invoke) {
}
} else {
if (!optimizations.GetDestinationIsSource()) {
+ __ cmpl(src, dest);
__ j(kNotEqual, &conditions_on_positions_validated);
}
if (dest_pos.IsConstant()) {