diff options
author | 2024-10-04 09:18:58 +0000 | |
---|---|---|
committer | 2024-10-07 07:00:16 +0000 | |
commit | c50d67991682a9ae5e6215031a9852bbf018504b (patch) | |
tree | a9b685384873c864fe333fac743d40085ecc8e18 | |
parent | 00db5b25da2d2ff8005476c7c735eb4f921d3a56 (diff) |
Reland "Calculate the number of out vregs."
This reverts commit 434a327234f74eed3ef4072314d2e2bdb73e4dda.
Reason for revert: Relanding with no change. The regressions
that were the reason for the revert may reappear. However,
these regressions are probably caused by subtle effects that
are not directly related to this change. For example, a code
size improvement can regress performance simply by moving
the start of a loop from an aligned address to an unaligned
address, or by splitting a loop across two cache lines.
Bug: 358519867
Bug: 359722268
Change-Id: I997b8a4219418f79b3a5fc4e7e50817911f0a737
-rw-r--r-- | compiler/optimizing/builder.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator.cc | 7 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/instruction_builder.cc | 8 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 20 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.cc | 1 | ||||
-rw-r--r-- | compiler/optimizing/nodes.cc | 1 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 59 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_unit_test.h | 4 | ||||
-rw-r--r-- | compiler/optimizing/register_allocator_linear_scan.cc | 20 |
10 files changed, 83 insertions, 41 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc index 1dea39626c..a1318c917a 100644 --- a/compiler/optimizing/builder.cc +++ b/compiler/optimizing/builder.cc @@ -93,7 +93,6 @@ GraphAnalysisResult HGraphBuilder::BuildGraph() { graph_->SetNumberOfVRegs(code_item_accessor_.RegistersSize()); graph_->SetNumberOfInVRegs(code_item_accessor_.InsSize()); - graph_->SetMaximumNumberOfOutVRegs(code_item_accessor_.OutsSize()); // Use ScopedArenaAllocator for all local allocations. ScopedArenaAllocator local_allocator(graph_->GetArenaStack()); @@ -157,7 +156,6 @@ void HGraphBuilder::BuildIntrinsicGraph(ArtMethod* method) { size_t return_vregs = 2u; graph_->SetNumberOfVRegs(return_vregs + num_arg_vregs); graph_->SetNumberOfInVRegs(num_arg_vregs); - graph_->SetMaximumNumberOfOutVRegs(num_arg_vregs); // Use ScopedArenaAllocator for all local allocations. ScopedArenaAllocator local_allocator(graph_->GetArenaStack()); diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 757fdc2cee..e805b9ee9e 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -632,11 +632,8 @@ void CodeGenerator::CreateStringBuilderAppendLocations(HStringBuilderAppend* ins stack_offset += sizeof(uint32_t); } DCHECK_EQ(f, 0u); - - size_t param_size = stack_offset - static_cast<size_t>(pointer_size); - DCHECK_ALIGNED(param_size, kVRegSize); - size_t num_vregs = param_size / kVRegSize; - graph_->UpdateMaximumNumberOfOutVRegs(num_vregs); + DCHECK_EQ(stack_offset, + static_cast<size_t>(pointer_size) + kVRegSize * instruction->GetNumberOfOutVRegs()); } void CodeGenerator::CreateUnresolvedFieldLocationSummary( diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 90df4521e8..a0c28ca365 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -1385,6 +1385,7 @@ bool HInliner::TryDevirtualize(HInvoke* invoke_instruction, HInvokeStaticOrDirect* new_invoke = new (graph_->GetAllocator()) HInvokeStaticOrDirect( graph_->GetAllocator(), invoke_instruction->GetNumberOfArguments(), + invoke_instruction->GetNumberOfOutVRegs(), invoke_instruction->GetType(), invoke_instruction->GetDexPc(), MethodReference(invoke_instruction->GetMethodReference().dex_file, dex_method_index), @@ -1604,6 +1605,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, HInvokeVirtual* new_invoke = new (graph_->GetAllocator()) HInvokeVirtual( graph_->GetAllocator(), invoke_instruction->GetNumberOfArguments(), + invoke_instruction->GetNumberOfOutVRegs(), invoke_instruction->GetType(), invoke_instruction->GetDexPc(), invoke_instruction->GetMethodReference(), // Use existing invoke's method's reference. diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc index 5b762d69fa..332fc4ff17 100644 --- a/compiler/optimizing/instruction_builder.cc +++ b/compiler/optimizing/instruction_builder.cc @@ -502,6 +502,7 @@ void HInstructionBuilder::BuildIntrinsic(ArtMethod* method) { HInvokeStaticOrDirect* invoke = new (allocator_) HInvokeStaticOrDirect( allocator_, number_of_arguments, + /* number_of_out_vregs= */ in_vregs, return_type_, kNoDexPc, target_method, @@ -1067,6 +1068,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, MethodCompilationStat::kUnresolvedMethod); HInvoke* invoke = new (allocator_) HInvokeUnresolved(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1087,6 +1089,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, HInvoke* invoke = new (allocator_) HInvokeStaticOrDirect( allocator_, number_of_arguments - 1, + operands.GetNumberOfOperands() - 1, /* return_type= */ DataType::Type::kReference, dex_pc, method_reference, @@ -1152,6 +1155,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, } invoke = new (allocator_) HInvokeStaticOrDirect(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1171,6 +1175,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, } else if (invoke_type == kVirtual) { invoke = new (allocator_) HInvokeVirtual(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1192,6 +1197,7 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction, .method_load_kind; invoke = new (allocator_) HInvokeInterface(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), return_type, dex_pc, method_reference, @@ -1403,6 +1409,7 @@ bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc, HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), number_of_other_inputs, return_type, dex_pc, @@ -1447,6 +1454,7 @@ bool HInstructionBuilder::BuildInvokeCustom(uint32_t dex_pc, MethodReference method_reference(&graph_->GetDexFile(), dex::kDexNoIndex); HInvoke* invoke = new (allocator_) HInvokeCustom(allocator_, number_of_arguments, + operands.GetNumberOfOperands(), call_site_idx, return_type, dex_pc, diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index a09a9f0ead..cd2371d90c 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -2996,7 +2996,7 @@ static bool NoEscapeForStringBufferReference(HInstruction* reference, HInstructi return false; } -static bool TryReplaceStringBuilderAppend(HInvoke* invoke) { +static bool TryReplaceStringBuilderAppend(CodeGenerator* codegen, HInvoke* invoke) { DCHECK_EQ(invoke->GetIntrinsic(), Intrinsics::kStringBuilderToString); if (invoke->CanThrowIntoCatchBlock()) { return false; @@ -3153,11 +3153,25 @@ static bool TryReplaceStringBuilderAppend(HInvoke* invoke) { } } + // Calculate outgoing vregs, including padding for 64-bit arg alignment. + const PointerSize pointer_size = InstructionSetPointerSize(codegen->GetInstructionSet()); + const size_t method_vregs = static_cast<size_t>(pointer_size) / kVRegSize; + uint32_t number_of_out_vregs = method_vregs; // For correct alignment padding; subtracted below. + for (uint32_t f = format; f != 0u; f >>= StringBuilderAppend::kBitsPerArg) { + auto a = enum_cast<StringBuilderAppend::Argument>(f & StringBuilderAppend::kArgMask); + if (a == StringBuilderAppend::Argument::kLong || a == StringBuilderAppend::Argument::kDouble) { + number_of_out_vregs += /* alignment */ ((number_of_out_vregs) & 1u) + /* vregs */ 2u; + } else { + number_of_out_vregs += /* vregs */ 1u; + } + } + number_of_out_vregs -= method_vregs; + // Create replacement instruction. HIntConstant* fmt = block->GetGraph()->GetIntConstant(static_cast<int32_t>(format)); ArenaAllocator* allocator = block->GetGraph()->GetAllocator(); HStringBuilderAppend* append = new (allocator) HStringBuilderAppend( - fmt, num_args, has_fp_args, allocator, invoke->GetDexPc()); + fmt, num_args, number_of_out_vregs, has_fp_args, allocator, invoke->GetDexPc()); append->SetReferenceTypeInfoIfValid(invoke->GetReferenceTypeInfo()); for (size_t i = 0; i != num_args; ++i) { append->SetArgumentAt(i, args[num_args - 1u - i]); @@ -3202,7 +3216,7 @@ void InstructionSimplifierVisitor::SimplifyAllocationIntrinsic(HInvoke* invoke) RecordSimplification(); } } else if (invoke->GetIntrinsic() == Intrinsics::kStringBuilderToString && - TryReplaceStringBuilderAppend(invoke)) { + TryReplaceStringBuilderAppend(codegen_, invoke)) { RecordSimplification(); } } diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc index 06ea1c6ffb..b87f6f3975 100644 --- a/compiler/optimizing/intrinsics.cc +++ b/compiler/optimizing/intrinsics.cc @@ -254,6 +254,7 @@ void InsertFpToIntegralIntrinsic(HInvokeStaticOrDirect* invoke, size_t input_ind HInvokeStaticOrDirect* new_input = new (allocator) HInvokeStaticOrDirect( allocator, /*number_of_arguments=*/ 1u, + /*number_of_out_vregs=*/ is_double ? 2u : 1u, converted_type, invoke->GetDexPc(), /*method_reference=*/ MethodReference(nullptr, dex::kDexNoIndex), diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index f7ec0871d4..79e61d0285 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -2673,7 +2673,6 @@ HInstruction* HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) { } } } - outer_graph->UpdateMaximumNumberOfOutVRegs(GetMaximumNumberOfOutVRegs()); if (HasBoundsChecks()) { outer_graph->SetHasBoundsChecks(true); diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 4a7eb1cd22..78c0ad7231 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -398,7 +398,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { linear_order_(allocator->Adapter(kArenaAllocLinearOrder)), entry_block_(nullptr), exit_block_(nullptr), - maximum_number_of_out_vregs_(0), number_of_vregs_(0), number_of_in_vregs_(0), temporaries_vreg_slots_(0), @@ -550,18 +549,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { current_instruction_id_ = id; } - uint16_t GetMaximumNumberOfOutVRegs() const { - return maximum_number_of_out_vregs_; - } - - void SetMaximumNumberOfOutVRegs(uint16_t new_value) { - maximum_number_of_out_vregs_ = new_value; - } - - void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) { - maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value); - } - void UpdateTemporariesVRegSlots(size_t slots) { temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_); } @@ -801,9 +788,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> { HBasicBlock* entry_block_; HBasicBlock* exit_block_; - // The maximum number of virtual registers arguments passed to a HInvoke in this graph. - uint16_t maximum_number_of_out_vregs_; - // The number of virtual registers in this method. Contains the parameters. uint16_t number_of_vregs_; @@ -4206,8 +4190,7 @@ class HEqual final : public HCondition { class HNotEqual final : public HCondition { public: - HNotEqual(HInstruction* first, HInstruction* second, - uint32_t dex_pc = kNoDexPc) + HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) : HCondition(kNotEqual, first, second, dex_pc) { } @@ -4252,8 +4235,7 @@ class HNotEqual final : public HCondition { class HLessThan final : public HCondition { public: - HLessThan(HInstruction* first, HInstruction* second, - uint32_t dex_pc = kNoDexPc) + HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) : HCondition(kLessThan, first, second, dex_pc) { } @@ -4292,8 +4274,7 @@ class HLessThan final : public HCondition { class HLessThanOrEqual final : public HCondition { public: - HLessThanOrEqual(HInstruction* first, HInstruction* second, - uint32_t dex_pc = kNoDexPc) + HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc) : HCondition(kLessThanOrEqual, first, second, dex_pc) { } @@ -4807,6 +4788,9 @@ class HInvoke : public HVariableInputSizeInstruction { // inputs at the end of their list of inputs. uint32_t GetNumberOfArguments() const { return number_of_arguments_; } + // Return the number of outgoing vregs. + uint32_t GetNumberOfOutVRegs() const { return number_of_out_vregs_; } + InvokeType GetInvokeType() const { return GetPackedField<InvokeTypeField>(); } @@ -4872,6 +4856,7 @@ class HInvoke : public HVariableInputSizeInstruction { HInvoke(InstructionKind kind, ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, uint32_t number_of_other_inputs, DataType::Type return_type, uint32_t dex_pc, @@ -4888,9 +4873,10 @@ class HInvoke : public HVariableInputSizeInstruction { allocator, number_of_arguments + number_of_other_inputs, kArenaAllocInvokeInputs), - number_of_arguments_(number_of_arguments), method_reference_(method_reference), resolved_method_reference_(resolved_method_reference), + number_of_arguments_(dchecked_integral_cast<uint16_t>(number_of_arguments)), + number_of_out_vregs_(dchecked_integral_cast<uint16_t>(number_of_out_vregs)), intrinsic_(Intrinsics::kNone), intrinsic_optimizations_(0) { SetPackedField<InvokeTypeField>(invoke_type); @@ -4900,11 +4886,14 @@ class HInvoke : public HVariableInputSizeInstruction { DEFAULT_COPY_CONSTRUCTOR(Invoke); - uint32_t number_of_arguments_; ArtMethod* resolved_method_; const MethodReference method_reference_; // Cached values of the resolved method, to avoid needing the mutator lock. const MethodReference resolved_method_reference_; + + uint16_t number_of_arguments_; + uint16_t number_of_out_vregs_; + Intrinsics intrinsic_; // A magic word holding optimizations for intrinsics. See intrinsics.h. @@ -4915,6 +4904,7 @@ class HInvokeUnresolved final : public HInvoke { public: HInvokeUnresolved(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -4922,6 +4912,7 @@ class HInvokeUnresolved final : public HInvoke { : HInvoke(kInvokeUnresolved, allocator, number_of_arguments, + number_of_out_vregs, /* number_of_other_inputs= */ 0u, return_type, dex_pc, @@ -4944,6 +4935,7 @@ class HInvokePolymorphic final : public HInvoke { public: HInvokePolymorphic(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, uint32_t number_of_other_inputs, DataType::Type return_type, uint32_t dex_pc, @@ -4957,6 +4949,7 @@ class HInvokePolymorphic final : public HInvoke { : HInvoke(kInvokePolymorphic, allocator, number_of_arguments, + number_of_out_vregs, number_of_other_inputs, return_type, dex_pc, @@ -4992,6 +4985,7 @@ class HInvokeCustom final : public HInvoke { public: HInvokeCustom(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, uint32_t call_site_index, DataType::Type return_type, uint32_t dex_pc, @@ -5000,6 +4994,7 @@ class HInvokeCustom final : public HInvoke { : HInvoke(kInvokeCustom, allocator, number_of_arguments, + number_of_out_vregs, /* number_of_other_inputs= */ 0u, return_type, dex_pc, @@ -5047,6 +5042,7 @@ class HInvokeStaticOrDirect final : public HInvoke { HInvokeStaticOrDirect(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -5059,6 +5055,7 @@ class HInvokeStaticOrDirect final : public HInvoke { : HInvoke(kInvokeStaticOrDirect, allocator, number_of_arguments, + number_of_out_vregs, // There is potentially one extra argument for the HCurrentMethod input, // and one other if the clinit check is explicit. These can be removed later. (NeedsCurrentMethodInput(dispatch_info) ? 1u : 0u) + @@ -5274,6 +5271,7 @@ class HInvokeVirtual final : public HInvoke { public: HInvokeVirtual(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -5284,6 +5282,7 @@ class HInvokeVirtual final : public HInvoke { : HInvoke(kInvokeVirtual, allocator, number_of_arguments, + number_of_out_vregs, 0u, return_type, dex_pc, @@ -5337,6 +5336,7 @@ class HInvokeInterface final : public HInvoke { public: HInvokeInterface(ArenaAllocator* allocator, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, DataType::Type return_type, uint32_t dex_pc, MethodReference method_reference, @@ -5348,6 +5348,7 @@ class HInvokeInterface final : public HInvoke { : HInvoke(kInvokeInterface, allocator, number_of_arguments + (NeedsCurrentMethod(load_kind) ? 1 : 0), + number_of_out_vregs, 0u, return_type, dex_pc, @@ -7561,6 +7562,7 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { public: HStringBuilderAppend(HIntConstant* format, uint32_t number_of_arguments, + uint32_t number_of_out_vregs, bool has_fp_args, ArenaAllocator* allocator, uint32_t dex_pc) @@ -7575,7 +7577,8 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { dex_pc, allocator, number_of_arguments + /* format */ 1u, - kArenaAllocInvokeInputs) { + kArenaAllocInvokeInputs), + number_of_out_vregs_(number_of_out_vregs) { DCHECK_GE(number_of_arguments, 1u); // There must be something to append. SetRawInputAt(FormatIndex(), format); } @@ -7591,6 +7594,9 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { return InputCount() - 1u; } + // Return the number of outgoing vregs. + uint32_t GetNumberOfOutVRegs() const { return number_of_out_vregs_; } + size_t FormatIndex() const { return GetNumberOfArguments(); } @@ -7609,6 +7615,9 @@ class HStringBuilderAppend final : public HVariableInputSizeInstruction { protected: DEFAULT_COPY_CONSTRUCTOR(StringBuilderAppend); + + private: + uint32_t number_of_out_vregs_; }; class HUnresolvedInstanceFieldGet final : public HExpression<1> { diff --git a/compiler/optimizing/optimizing_unit_test.h b/compiler/optimizing/optimizing_unit_test.h index 6d451663e5..34eed29116 100644 --- a/compiler/optimizing/optimizing_unit_test.h +++ b/compiler/optimizing/optimizing_unit_test.h @@ -647,9 +647,13 @@ class OptimizingUnitTestHelper { std::initializer_list<HInstruction*> env = {}, uint32_t dex_pc = kNoDexPc) { MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++}; + size_t num_64bit_args = std::count_if(args.begin(), args.end(), [](HInstruction* insn) { + return DataType::Is64BitType(insn->GetType()); + }); HInvokeStaticOrDirect* invoke = new (GetAllocator()) HInvokeStaticOrDirect(GetAllocator(), args.size(), + /* number_of_out_vregs= */ args.size() + num_64bit_args, return_type, dex_pc, method_reference, diff --git a/compiler/optimizing/register_allocator_linear_scan.cc b/compiler/optimizing/register_allocator_linear_scan.cc index 458d1a740e..35a0ab404e 100644 --- a/compiler/optimizing/register_allocator_linear_scan.cc +++ b/compiler/optimizing/register_allocator_linear_scan.cc @@ -79,11 +79,6 @@ RegisterAllocatorLinearScan::RegisterAllocatorLinearScan(ScopedArenaAllocator* a codegen->SetupBlockedRegisters(); physical_core_register_intervals_.resize(codegen->GetNumberOfCoreRegisters(), nullptr); physical_fp_register_intervals_.resize(codegen->GetNumberOfFloatingPointRegisters(), nullptr); - // Always reserve for the current method and the graph's max out registers. - // TODO: compute it instead. - // ArtMethod* takes 2 vregs for 64 bits. - size_t ptr_size = static_cast<size_t>(InstructionSetPointerSize(codegen->GetInstructionSet())); - reserved_out_slots_ = ptr_size / kVRegSize + codegen->GetGraph()->GetMaximumNumberOfOutVRegs(); } RegisterAllocatorLinearScan::~RegisterAllocatorLinearScan() {} @@ -181,6 +176,10 @@ void RegisterAllocatorLinearScan::AllocateRegistersInternal() { } } + // Add the current method to the `reserved_out_slots_`. ArtMethod* takes 2 vregs for 64 bits. + PointerSize pointer_size = InstructionSetPointerSize(codegen_->GetInstructionSet()); + reserved_out_slots_ += static_cast<size_t>(pointer_size) / kVRegSize; + number_of_registers_ = codegen_->GetNumberOfCoreRegisters(); registers_array_ = allocator_->AllocArray<size_t>(number_of_registers_, kArenaAllocRegisterAllocator); @@ -247,6 +246,17 @@ void RegisterAllocatorLinearScan::ProcessInstruction(HInstruction* instruction) return; } + if (locations->CanCall()) { + // Update the `reserved_out_slots_` for invokes that make a call, including intrinsics + // that make the call only on the slow-path. Same for the `HStringBuilderAppend`. + if (instruction->IsInvoke()) { + reserved_out_slots_ = std::max<size_t>( + reserved_out_slots_, instruction->AsInvoke()->GetNumberOfOutVRegs()); + } else if (instruction->IsStringBuilderAppend()) { + reserved_out_slots_ = std::max<size_t>( + reserved_out_slots_, instruction->AsStringBuilderAppend()->GetNumberOfOutVRegs()); + } + } bool will_call = locations->WillCall(); if (will_call) { // If a call will happen, add the range to a fixed interval that represents all the |