Optimizing: Introduce {Increase,Decrease}Frame().
And use it to clean up code generators.
Also fix CFI in MaybeIncrementHotness() for arm/arm64/x86.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: testrunner.py --host --debuggable --ndebuggable \
--optimizing --jit --jit-on-first-use -t 178
Test: aosp_cf_x86_phone-userdebug boots.
Test: aosp_cf_x86_phone-userdebug/jitzygote boots.
Test: # On blueline:
testrunner.py --target --debuggable --ndebuggable \
--optimizing --jit --jit-on-first-use -t 178
Bug: 112189621
Change-Id: I524e6c3054ffe1b05e2860fd7988cd9995df2963
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index d108623..b7f519b 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1157,9 +1157,9 @@
__ B(ne, &done);
if (is_frame_entry) {
if (HasEmptyFrame()) {
- // The entyrpoint expects the method at the bottom of the stack. We
+ // The entrypoint expects the method at the bottom of the stack. We
// claim stack space necessary for alignment.
- __ Claim(kStackAlignment);
+ IncreaseFrame(kStackAlignment);
__ Stp(kArtMethodRegister, lr, MemOperand(sp, 0));
} else if (!RequiresCurrentMethod()) {
__ Str(kArtMethodRegister, MemOperand(sp, 0));
@@ -1176,7 +1176,7 @@
if (HasEmptyFrame()) {
CHECK(is_frame_entry);
__ Ldr(lr, MemOperand(sp, 8));
- __ Drop(kStackAlignment);
+ DecreaseFrame(kStackAlignment);
}
__ Bind(&done);
}
@@ -3654,6 +3654,16 @@
// MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
}
+void CodeGeneratorARM64::IncreaseFrame(size_t adjustment) {
+ __ Claim(adjustment);
+ GetAssembler()->cfi().AdjustCFAOffset(adjustment);
+}
+
+void CodeGeneratorARM64::DecreaseFrame(size_t adjustment) {
+ __ Drop(adjustment);
+ GetAssembler()->cfi().AdjustCFAOffset(-adjustment);
+}
+
void CodeGeneratorARM64::GenerateNop() {
__ Nop();
}
@@ -4448,16 +4458,10 @@
}
break;
case HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative: {
- HParallelMove parallel_move(GetGraph()->GetAllocator());
size_t out_frame_size =
PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorARM64,
kAapcs64StackAlignment,
- GetCriticalNativeDirectCallFrameSize>(invoke, ¶llel_move);
- if (out_frame_size != 0u) {
- __ Claim(out_frame_size);
- GetAssembler()->cfi().AdjustCFAOffset(out_frame_size);
- GetMoveResolver()->EmitNativeCode(¶llel_move);
- }
+ GetCriticalNativeDirectCallFrameSize>(invoke);
call_code_pointer_member(ArtMethod::EntryPointFromJniOffset(kArm64PointerSize));
// Zero-/sign-extend the result when needed due to native and managed ABI mismatch.
switch (invoke->GetType()) {
@@ -4484,8 +4488,7 @@
break;
}
if (out_frame_size != 0u) {
- __ Drop(out_frame_size);
- GetAssembler()->cfi().AdjustCFAOffset(-out_frame_size);
+ DecreaseFrame(out_frame_size);
}
break;
}