Small optimization for recursive calls: avoid dex cache.
Change-Id: I044757a2f06e535cdc1480c4fc8182b89635baf6
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 0fe28e8..7731a10 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -527,6 +527,8 @@
bool skip_overflow_check =
IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
+ __ Bind(&frame_entry_label_);
+
if (!skip_overflow_check) {
__ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
__ LoadFromOffset(kLoadWord, IP, IP, 0);
@@ -1185,18 +1187,22 @@
// temp = method;
codegen_->LoadCurrentMethod(temp);
- // temp = temp->dex_cache_resolved_methods_;
- __ LoadFromOffset(
- kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
- // temp = temp[index_in_cache]
- __ LoadFromOffset(
- kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
- // LR = temp[offset_of_quick_compiled_code]
- __ LoadFromOffset(kLoadWord, LR, temp,
- mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
- kArmWordSize).Int32Value());
- // LR()
- __ blx(LR);
+ if (!invoke->IsRecursive()) {
+ // temp = temp->dex_cache_resolved_methods_;
+ __ LoadFromOffset(
+ kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
+ // temp = temp[index_in_cache]
+ __ LoadFromOffset(
+ kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex()));
+ // LR = temp[offset_of_quick_compiled_code]
+ __ LoadFromOffset(kLoadWord, LR, temp,
+ mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
+ kArmWordSize).Int32Value());
+ // LR()
+ __ blx(LR);
+ } else {
+ __ bl(codegen_->GetFrameEntryLabel());
+ }
codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
DCHECK(!codegen_->IsLeafMethod());