diff options
author | 2024-08-08 15:33:56 +0000 | |
---|---|---|
committer | 2024-08-13 08:09:37 +0000 | |
commit | 3e75615ad25b6af1842b194e78b429b0f585b46a (patch) | |
tree | cd74101af342d0798827aa853f1f5e34cc1c01a6 /compiler/optimizing | |
parent | ccbbe37bb19c714be0beac4d21bbe7abc214738c (diff) |
Calculate the number of out vregs.
Determine the number of out vregs needed by invokes that
actually make a call, and by `HStringBuilderAppend`s.
This can yield smaller frame sizes of compiled methods when
some calls are inlined or fully intrinsified.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 358519867
Change-Id: I4930a9bd811b1de14658f5ef44e65eadea6a7961
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/builder.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator.cc | 7 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/instruction_builder.cc | 8 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 20 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.cc | 1 | ||||
-rw-r--r-- | compiler/optimizing/nodes.cc | 1 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 59 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_unit_test.h | 4 | ||||
-rw-r--r-- | compiler/optimizing/register_allocator_linear_scan.cc | 20 |
10 files changed, 83 insertions, 41 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc index 1dea39626c..a1318c917a 100644 --- a/compiler/optimizing/builder.cc +++ b/compiler/optimizing/builder.cc @@ -93,7 +93,6 @@ GraphAnalysisResult HGraphBuilder::BuildGraph() { graph_->SetNumberOfVRegs(code_item_accessor_.RegistersSize()); graph_->SetNumberOfInVRegs(code_item_accessor_.InsSize()); - graph_->SetMaximumNumberOfOutVRegs(code_item_accessor_.OutsSize()); // Use ScopedArenaAllocator for all local allocations. ScopedArenaAllocator local_allocator(graph_->GetArenaStack()); @@ -157,7 +156,6 @@ void HGraphBuilder::BuildIntrinsicGraph(ArtMethod* method) { size_t return_vregs = 2u; graph_->SetNumberOfVRegs(return_vregs + num_arg_vregs); graph_->SetNumberOfInVRegs(num_arg_vregs); - graph_->SetMaximumNumberOfOutVRegs(num_arg_vregs); // Use ScopedArenaAllocator for all local allocations. ScopedArenaAllocator local_allocator(graph_->GetArenaStack()); diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 51714ef548..019aede361 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -632,11 +632,8 @@ void CodeGenerator::CreateStringBuilderAppendLocations(HStringBuilderAppend* ins stack_offset += sizeof(uint32_t); } DCHECK_EQ(f, 0u); - - size_t param_size = stack_offset - static_cast<size_t>(pointer_size); - DCHECK_ALIGNED(param_size, kVRegSize); - size_t num_vregs = param_size / kVRegSize; - graph_->UpdateMaximumNumberOfOutVRegs(num_vregs); + DCHECK_EQ(stack_offset, + static_cast<size_t>(pointer_size) + kVRegSize * instruction->GetNumberOfOutVRegs()); } void CodeGenerator::CreateUnresolvedFieldLocationSummary( diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 8a25e82816..68e75fb5d5 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -1378,6 +1378,7 @@ bool HInliner::TryDevirtualize(HInvoke* invoke_instruction, HInvokeStaticOrDirect* new_invoke = new (graph_->GetAllocator()) HInvokeStaticOrDirect( graph_->GetAllocator(), invoke_instruction->GetNumberOfArguments(), + invoke_instruction->GetNumberOfOutVRegs(), invoke_instruction->GetType(), invoke_instruction->GetDexPc(), MethodReference(invoke_instruction->GetMethodReference().dex_file, dex_method_index), @@ -1597,6 +1598,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, HInvokeVirtual* new_invoke = new (graph_->GetAllocator()) HInvokeVirtual( graph_->GetAllocator(), invoke_instruction->GetNumberOfArguments(), + invoke_instruction->GetNumberOfOutVRegs(), invoke_instruction->GetType(), invoke_instruction->GetDexPc(), invoke_instruction->GetMethodReference(), // Use existing invoke's method's reference. diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc index c97c78ca17..5cdefd9717 100644 --- a/compiler/optimizing/instruction_builder.cc +++ b/compiler/optimizing/instruction_builder.cc @@ -501,6 +501,7 @@ void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) { HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect( allocator_, number_of_arguments, + /* number_of_out_vregs= */ in_vregs, return_type_, kNoDexPc, target_method, @@ -1066,6 +1067,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, MethodCompilationStat::kUnresolvedMethod); HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1086,6 +1088,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect( allocator_, number_of_arguments - 1, + operands.GetNumberOfOperands() - 1, /* return_type= */ DataType::Type::kReference, dex_pc, method_reference, @@ -1151,6 +1154,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, } invoke = new (allocator_) HInvokeStaticOrDirect(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1170,6 +1174,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, } else if (invoke_type == kVirtual) { invoke = new (allocator_) HInvokeVirtual(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1191,6 +1196,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, .method_load_kind; invoke = new (allocator_) HInvokeInterface(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1392,6 +1398,7 @@ bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc, MethodReference method_reference(&graph_->GetDexFile(), method_idx); HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1433,6 +1440,7 @@ bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc, MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex); HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), call_site_idx, return_type, dex_pc, diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index e972e4795e..cafa83bece 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -2789,7 +2789,7 @@ static bool NoEscapeForStringBufferReference(HInstruction* reference, HInstructi return false; } -static bool TryReplaceStringBuilderAppend(HInvoke* invoke) { +static bool TryReplaceStringBuilderAppend(CodeGenerator* codegen, HInvoke* invoke) { DCHECK_EQ(invoke->GetIntrinsic(), Intrinsics::kStringBuilderToString); if (invoke->CanThrowIntoCatchBlock()) { return false; @@ -2946,11 +2946,25 @@ static bool TryReplaceStringBuilderAppend(HInvoke* invoke) { } } + // Calculate outgoing vregs, including padding for 64-bit arg alignment. + const PointerSize pointer_size = InstructionSetPointerSize(codegen->GetInstructionSet()); + const size_t method_vregs = static_cast<size_t>(pointer_size) / kVRegSize; + uint32_t number_of_out_vregs = method_vregs; // For correct alignment padding; subtracted below. + for (uint32_t f = format; f != 0u; f >>= StringBuilderAppend::kBitsPerArg) { + auto a = enum_cast<StringBuilderAppend::Argument>(f & StringBuilderAppend::kArgMask); + if (a == StringBuilderAppend::Argument::kLong || a == StringBuilderAppend::Argument::kDouble) { + number_of_out_vregs += /* alignment */ ((number_of_out_vregs) & 1u) + /* vregs */ 2u; + } else { + number_of_out_vregs += /* vregs */ 1u; + } + } + number_of_out_vregs -= method_vregs; + // Create replacement instruction. HIntConstant* fmt = block->GetGraph()->GetIntConstant(static_cast<int32_t>(format)); ArenaAllocator* allocator = block->GetGraph()->GetAllocator(); HStringBuilderAppend* append = new (allocator) HStringBuilderAppend( - fmt, num_args, has_fp_args, allocator, invoke->GetDexPc()); + fmt, num_args, number_of_out_vregs, has_fp_args, allocator, invoke->GetDexPc()); append->SetReferenceTypeInfoIfValid(invoke->GetReferenceTypeInfo()); for (size_t i = 0; i != num_args; ++i) { append->SetArgumentAt(i, args[num_args - 1u - i]); @@ -2995,7 +3009,7 @@ void InstructionSimplifierVisitor::SimplifyAllocationIntrinsic(HInvoke* invoke) RecordSimplification(); } } else if (invoke->GetIntrinsic() == Intrinsics::kStringBuilderToString && - TryReplaceStringBuilderAppend(invoke)) { + TryReplaceStringBuilderAppend(codegen_, invoke)) { RecordSimplification(); } } diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc index 06ea1c6ffb..b87f6f3975 100644 --- a/compiler/optimizing/intrinsics.cc +++ b/compiler/optimizing/intrinsics.cc @@ -254,6 +254,7 @@ void InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect* invoke, size_t input_ind HInvokeStaticOrDirect* new_input = new (allocator) HInvokeStaticOrDirect( allocator, /*number_of_arguments=*/ 1u, + /*number_of_out_vregs=*/ is_double ? 2u : 1u, converted_type, invoke->GetDexPc(), /*method_reference=*/ MethodReference(nullptr, dex::kDexNoIndex), diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index f2cac19786..38711c074b 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -2673,7 +2673,6 @@ HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) { } } } - outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs()); if (HasBoundsChecks()) { outer_graph->SetHasBoundsChecks(true); diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 825134497d..99bb5f8478 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -397,7 +397,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { linear_order_(allocator->Adapter(kArenaAllocLinearOrder)), entry_block_(nullptr), exit_block_(nullptr), - maximum_number_of_out_vregs_(0), number_of_vregs_(0), number_of_in_vregs_(0), temporaries_vreg_slots_(0), @@ -549,18 +548,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { current_instruction_id_ = id; } - uint16_t GetMaximumNumberOfOutVRegs() const { - return maximum_number_of_out_vregs_; - } - - void SetMaximumNumberOfOutVRegs(uint16_t new_value) { - maximum_number_of_out_vregs_ = new_value; - } - - void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { - maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); - } - void UpdateTemporariesVRegSlots(size_t slots) { temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); } @@ -800,9 +787,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { HBasicBlock* entry_block_; HBasicBlock* exit_block_; - // The maximum number of virtual registers arguments passed to a HInvoke in this graph. - uint16_t maximum_number_of_out_vregs_; - // The number of virtual registers in this method. Contains the parameters. uint16_t number_of_vregs_; @@ -4173,8 +4157,7 @@ class HEqual final : public HCondition { class HNotEqual final : public HCondition { public: - HNotEqual(HInstruction* first, HInstruction* second, - uint32_t dex_pc = kNoDexPc) + HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) : HCondition(kNotEqual, first, second, dex_pc) { } @@ -4219,8 +4202,7 @@ class HNotEqual final : public HCondition { class HLessThan final : public HCondition { public: - HLessThan(HInstruction* first, HInstruction* second, - uint32_t dex_pc = kNoDexPc) + HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) : HCondition(kLessThan, first, second, dex_pc) { } @@ -4259,8 +4241,7 @@ class HLessThan final : public HCondition { class HLessThanOrEqual final : public HCondition { public: - HLessThanOrEqual(HInstruction* first, HInstruction* second, - uint32_t dex_pc = kNoDexPc) + HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) : HCondition(kLessThanOrEqual, first, second, dex_pc) { } @@ -4758,6 +4739,9 @@ class HInvoke : public HVariableInputSizeInstruction { // inputs at the end of their list of inputs. uint32_t GetNumberOfArguments() const { return number_of_arguments_; } + // Return the number of outgoing vregs. + uint32_t GetNumberOfOutVRegs() const { return number_of_out_vregs_; } + InvokeType GetInvokeType() const { return GetPackedField<InvokeTypeField>(); } @@ -4823,6 +4807,7 @@ class HInvoke : public HVariableInputSizeInstruction { HInvoke(InstructionKind kind, ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, uint32_t number_of_other_inputs, DataType::Type return_type, uint32_t dex_pc, @@ -4839,9 +4824,10 @@ class HInvoke : public HVariableInputSizeInstruction { allocator, number_of_arguments + number_of_other_inputs, kArenaAllocInvokeInputs), - number_of_arguments_(number_of_arguments), method_reference_(method_reference), resolved_method_reference_(resolved_method_reference), + number_of_arguments_(dchecked_integral_cast<uint16_t>(number_of_arguments)), + number_of_out_vregs_(dchecked_integral_cast<uint16_t>(number_of_out_vregs)), intrinsic_(Intrinsics::kNone), intrinsic_optimizations_(0) { SetPackedField<InvokeTypeField>(invoke_type); @@ -4851,11 +4837,14 @@ class HInvoke : public HVariableInputSizeInstruction { DEFAULT_COPY_CONSTRUCTOR(Invoke); - uint32_t number_of_arguments_; ArtMethod* resolved_method_; const MethodReference method_reference_; // Cached values of the resolved method, to avoid needing the mutator lock. const MethodReference resolved_method_reference_; + + uint16_t number_of_arguments_; + uint16_t number_of_out_vregs_; + Intrinsics intrinsic_; // A magic word holding optimizations for intrinsics. See intrinsics.h. @@ -4866,6 +4855,7 @@ class HInvokeUnresolved final : public HInvoke { public: HInvokeUnresolved(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -4873,6 +4863,7 @@ class HInvokeUnresolved final : public HInvoke { : HInvoke(kInvokeUnresolved, allocator, number_of_arguments, + number_of_out_vregs, /* number_of_other_inputs= */ 0u, return_type, dex_pc, @@ -4895,6 +4886,7 @@ class HInvokePolymorphic final : public HInvoke { public: HInvokePolymorphic(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -4907,6 +4899,7 @@ class HInvokePolymorphic final : public HInvoke { : HInvoke(kInvokePolymorphic, allocator, number_of_arguments, + number_of_out_vregs, /* number_of_other_inputs= */ 0u, return_type, dex_pc, @@ -4932,6 +4925,7 @@ class HInvokeCustom final : public HInvoke { public: HInvokeCustom(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, uint32_t call_site_index, DataType::Type return_type, uint32_t dex_pc, @@ -4940,6 +4934,7 @@ class HInvokeCustom final : public HInvoke { : HInvoke(kInvokeCustom, allocator, number_of_arguments, + number_of_out_vregs, /* number_of_other_inputs= */ 0u, return_type, dex_pc, @@ -4987,6 +4982,7 @@ class HInvokeStaticOrDirect final : public HInvoke { HInvokeStaticOrDirect(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -4999,6 +4995,7 @@ class HInvokeStaticOrDirect final : public HInvoke { : HInvoke(kInvokeStaticOrDirect, allocator, number_of_arguments, + number_of_out_vregs, // There is potentially one extra argument for the HCurrentMethod input, // and one other if the clinit check is explicit. These can be removed later. (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) + @@ -5214,6 +5211,7 @@ class HInvokeVirtual final : public HInvoke { public: HInvokeVirtual(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -5224,6 +5222,7 @@ class HInvokeVirtual final : public HInvoke { : HInvoke(kInvokeVirtual, allocator, number_of_arguments, + number_of_out_vregs, 0u, return_type, dex_pc, @@ -5277,6 +5276,7 @@ class HInvokeInterface final : public HInvoke { public: HInvokeInterface(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -5288,6 +5288,7 @@ class HInvokeInterface final : public HInvoke { : HInvoke(kInvokeInterface, allocator, number_of_arguments + (NeedsCurrentMethod(load_kind) ? 1 : 0), + number_of_out_vregs, 0u, return_type, dex_pc, @@ -7476,6 +7477,7 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { public: HStringBuilderAppend(HIntConstant* format, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, bool has_fp_args, ArenaAllocator* allocator, uint32_t dex_pc) @@ -7490,7 +7492,8 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { dex_pc, allocator, number_of_arguments + /* format */ 1u, - kArenaAllocInvokeInputs) { + kArenaAllocInvokeInputs), + number_of_out_vregs_(number_of_out_vregs) { DCHECK_GE(number_of_arguments, 1u); // There must be something to append. SetRawInputAt(FormatIndex(), format); } @@ -7506,6 +7509,9 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { return InputCount() - 1u; } + // Return the number of outgoing vregs. + uint32_t GetNumberOfOutVRegs() const { return number_of_out_vregs_; } + size_t FormatIndex() const { return GetNumberOfArguments(); } @@ -7524,6 +7530,9 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { protected: DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend); + + private: + uint32_t number_of_out_vregs_; }; class HUnresolvedInstanceFieldGet final : public HExpression<1> { diff --git a/compiler/optimizing/optimizing_unit_test.h b/compiler/optimizing/optimizing_unit_test.h index b256d439fb..c8412b876b 100644 --- a/compiler/optimizing/optimizing_unit_test.h +++ b/compiler/optimizing/optimizing_unit_test.h @@ -542,9 +542,13 @@ class OptimizingUnitTestHelper { const std::vector<HInstruction*>& args, uint32_t dex_pc = kNoDexPc) { MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++}; + size_t num_64bit_args = std::count_if(args.begin(), args.end(), [](HInstruction* insn) { + return DataType::Is64BitType(insn->GetType()); + }); HInvokeStaticOrDirect* invoke = new (GetAllocator()) HInvokeStaticOrDirect(GetAllocator(), args.size(), + /* number_of_out_vregs= */ args.size() + num_64bit_args, return_type, dex_pc, method_reference, diff --git a/compiler/optimizing/register_allocator_linear_scan.cc b/compiler/optimizing/register_allocator_linear_scan.cc index 458d1a740e..35a0ab404e 100644 --- a/compiler/optimizing/register_allocator_linear_scan.cc +++ b/compiler/optimizing/register_allocator_linear_scan.cc @@ -79,11 +79,6 @@ RegisterAllocatorLinearScan::RegisterAllocatorLinearScan(ScopedArenaAllocator* a codegen->SetupBlockedRegisters(); physical_core_register_intervals_.resize(codegen->GetNumberOfCoreRegisters(), nullptr); physical_fp_register_intervals_.resize(codegen->GetNumberOfFloatingPointRegisters(), nullptr); - // Always reserve for the current method and the graph's max out registers. - // TODO: compute it instead. - // ArtMethod* takes 2 vregs for 64 bits. - size_t ptr_size = static_cast<size_t>(InstructionSetPointerSize(codegen->GetInstructionSet())); - reserved_out_slots_ = ptr_size / kVRegSize + codegen->GetGraph()->GetMaximumNumberOfOutVRegs(); } RegisterAllocatorLinearScan::~RegisterAllocatorLinearScan() {} @@ -181,6 +176,10 @@ void RegisterAllocatorLinearScan::AllocateRegistersInternal() { } } + // Add the current method to the `reserved_out_slots_`. ArtMethod* takes 2 vregs for 64 bits. + PointerSize pointer_size = InstructionSetPointerSize(codegen_->GetInstructionSet()); + reserved_out_slots_ += static_cast<size_t>(pointer_size) / kVRegSize; + number_of_registers_ = codegen_->GetNumberOfCoreRegisters(); registers_array_ = allocator_->AllocArray<size_t>(number_of_registers_, kArenaAllocRegisterAllocator); @@ -247,6 +246,17 @@ void RegisterAllocatorLinearScan::ProcessInstruction(HInstruction* instruction) return; } + if (locations->CanCall()) { + // Update the `reserved_out_slots_` for invokes that make a call, including intrinsics + // that make the call only on the slow-path. Same for the `HStringBuilderAppend`. + if (instruction->IsInvoke()) { + reserved_out_slots_ = std::max<size_t>( + reserved_out_slots_, instruction->AsInvoke()->GetNumberOfOutVRegs()); + } else if (instruction->IsStringBuilderAppend()) { + reserved_out_slots_ = std::max<size_t>( + reserved_out_slots_, instruction->AsStringBuilderAppend()->GetNumberOfOutVRegs()); + } + } bool will_call = locations->WillCall(); if (will_call) { // If a call will happen, add the range to a fixed interval that represents all the |