summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Lena Djokic <Lena.Djokic@mips.com> 2018-03-02 12:45:35 +0000
committer android-build-merger <android-build-merger@google.com> 2018-03-02 12:45:35 +0000
commitf1f925ae7c4cc4be270fbeb203780282e04778f2 (patch)
treed701847dab8c56c98989e035d37f827d40490963
parentbdc03c9c27ddbb80c78c468b56f3d53084dfff16 (diff)
parentde4b2fe1279bff2f8ac42b09cc127c1f93a12475 (diff)
Merge "MIPS: Use sltiu instead of LoadConst32() + sltu" am: 540ca50846 am: f99a64c761
am: de4b2fe127 Change-Id: I5cb0fd01bb62c14310456909e0e54b5679faffd1
-rw-r--r--compiler/optimizing/code_generator_mips.cc11
-rw-r--r--compiler/optimizing/code_generator_mips64.cc4
2 files changed, 7 insertions, 8 deletions
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index d01b895446..87e6d6834b 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -1929,8 +1929,8 @@ void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCode
enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
__ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
- __ LoadConst32(AT, shifted_initialized_value);
- __ Bltu(TMP, AT, slow_path->GetEntryLabel());
+ __ Sltiu(TMP, TMP, shifted_initialized_value);
+ __ Bnez(TMP, slow_path->GetEntryLabel());
// Even if the initialized flag is set, we need to ensure consistent memory ordering.
__ Sync(0);
__ Bind(slow_path->GetExitLabel());
@@ -7635,10 +7635,6 @@ void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
- // Set the hidden argument.
- __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
- invoke->GetDexMethodIndex());
-
// temp = object->GetClass();
if (receiver.IsStackSlot()) {
__ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
@@ -7663,6 +7659,9 @@ void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke
__ LoadFromOffset(kLoadWord, temp, temp, method_offset);
// T9 = temp->GetEntryPoint();
__ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
+ // Set the hidden argument.
+ __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
+ invoke->GetDexMethodIndex());
// T9();
__ Jalr(T9);
__ NopIfNoReordering();
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index e3529f178a..985ac2ca55 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -1773,8 +1773,8 @@ void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCo
enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
__ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
- __ LoadConst32(AT, shifted_initialized_value);
- __ Bltuc(TMP, AT, slow_path->GetEntryLabel());
+ __ Sltiu(TMP, TMP, shifted_initialized_value);
+ __ Bnezc(TMP, slow_path->GetEntryLabel());
// Even if the initialized flag is set, we need to ensure consistent memory ordering.
__ Sync(0);
__ Bind(slow_path->GetExitLabel());