diff options
author | 2024-10-11 16:49:12 +0200 | |
---|---|---|
committer | 2025-02-28 00:24:38 -0800 | |
commit | 225ed9282e8676f13ec0b69786052684881596c3 (patch) | |
tree | eef492bdc3c3f5fd8c0524e04514c49922332d2e /compiler | |
parent | a04fda337fa7bf06a2104a1a6c4dce04af110c18 (diff) |
Remove `HInstruction::GetAllocator()`.
And clean up some uses of
instruction->GetBlock()->GetGraph()->GetAllocator()
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Change-Id: Iae218056495a0b9cf94d2a3c1cebd6c8eb909096
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.h | 8 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm_vixl.h | 8 | ||||
-rw-r--r-- | compiler/optimizing/dead_code_elimination.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 8 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm_vixl.cc | 12 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_riscv64.cc | 2 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 12 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 1 | ||||
-rw-r--r-- | compiler/optimizing/reference_type_propagation.cc | 2 |
12 files changed, 33 insertions, 32 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 5c2e4dbc51..d63b0abcc7 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -645,7 +645,7 @@ void CodeGenerator::CreateUnresolvedFieldLocationSummary( bool is_get = field_access->IsUnresolvedInstanceFieldGet() || field_access->IsUnresolvedStaticFieldGet(); - ArenaAllocator* allocator = field_access->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(field_access, LocationSummary::kCallOnMainOnly); diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 4566cdf0ca..de13814eaf 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -186,11 +186,12 @@ class JumpTableARM64 : public DeletableArenaObject<kArenaAllocSwitchTable> { public: using VIXLInt32Literal = vixl::aarch64::Literal<int32_t>; - explicit JumpTableARM64(HPackedSwitch* switch_instr) + JumpTableARM64(HPackedSwitch* switch_instr, ArenaAllocator* allocator) : switch_instr_(switch_instr), table_start_(), - jump_targets_(switch_instr->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) { + jump_targets_(allocator->Adapter(kArenaAllocCodeGenerator)) { uint32_t num_entries = switch_instr_->GetNumEntries(); + jump_targets_.reserve(num_entries); for (uint32_t i = 0; i < num_entries; i++) { VIXLInt32Literal* lit = new VIXLInt32Literal(0); jump_targets_.emplace_back(lit); @@ -765,7 +766,8 @@ class CodeGeneratorARM64 : public CodeGenerator { uint32_t GetPreferredSlotsAlignment() const override { return vixl::aarch64::kXRegSizeInBytes; } JumpTableARM64* CreateJumpTable(HPackedSwitch* switch_instr) { - jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARM64(switch_instr)); + ArenaAllocator* allocator = GetGraph()->GetAllocator(); + jump_tables_.emplace_back(new (allocator) JumpTableARM64(switch_instr, allocator)); return jump_tables_.back().get(); } diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h index 2e20591c98..bbc519fcf9 100644 --- a/compiler/optimizing/code_generator_arm_vixl.h +++ b/compiler/optimizing/code_generator_arm_vixl.h @@ -204,11 +204,12 @@ ALWAYS_INLINE inline StoreOperandType GetStoreOperandType(DataType::Type type) { class JumpTableARMVIXL : public DeletableArenaObject<kArenaAllocSwitchTable> { public: - explicit JumpTableARMVIXL(HPackedSwitch* switch_instr) + JumpTableARMVIXL(HPackedSwitch* switch_instr, ArenaAllocator* allocator) : switch_instr_(switch_instr), table_start_(), - bb_addresses_(switch_instr->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) { + bb_addresses_(allocator->Adapter(kArenaAllocCodeGenerator)) { uint32_t num_entries = switch_instr_->GetNumEntries(); + bb_addresses_.reserve(num_entries); for (uint32_t i = 0; i < num_entries; i++) { VIXLInt32Literal *lit = new VIXLInt32Literal(0, vixl32::RawLiteral::kManuallyPlaced); bb_addresses_.emplace_back(lit); @@ -883,7 +884,8 @@ class CodeGeneratorARMVIXL : public CodeGenerator { void GenerateExplicitNullCheck(HNullCheck* instruction) override; JumpTableARMVIXL* CreateJumpTable(HPackedSwitch* switch_instr) { - jump_tables_.emplace_back(new (GetGraph()->GetAllocator()) JumpTableARMVIXL(switch_instr)); + ArenaAllocator* allocator = GetGraph()->GetAllocator(); + jump_tables_.emplace_back(new (allocator) JumpTableARMVIXL(switch_instr, allocator)); return jump_tables_.back().get(); } void EmitJumpTables(); diff --git a/compiler/optimizing/dead_code_elimination.cc b/compiler/optimizing/dead_code_elimination.cc index 486625a71e..c367a20a06 100644 --- a/compiler/optimizing/dead_code_elimination.cc +++ b/compiler/optimizing/dead_code_elimination.cc @@ -201,7 +201,7 @@ static bool RemoveNonNullControlDependences(HBasicBlock* block, HBasicBlock* thr user_block != throws && block->Dominates(user_block)) { if (bound == nullptr) { - bound = new (obj->GetBlock()->GetGraph()->GetAllocator()) HBoundType(obj); + bound = new (block->GetGraph()->GetAllocator()) HBoundType(obj); bound->SetUpperBound(ti, /*can_be_null*/ false); bound->SetReferenceTypeInfo(ti); bound->SetCanBeNull(false); diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index 26efefa2d8..4ef0fc907a 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -301,7 +301,7 @@ bool InstructionSimplifierVisitor::TryCombineVecMultiplyAccumulate(HVecMul* mul) return false; } - ArenaAllocator* allocator = mul->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = GetGraph()->GetAllocator(); if (!mul->HasOnlyOneNonEnvironmentUse()) { return false; } @@ -3637,8 +3637,8 @@ bool InstructionSimplifierVisitor::TrySubtractionChainSimplification( bool is_x_negated = is_y_negated ^ ((x == right) && y->IsSub()); int64_t const3_val = ComputeAddition(type, const1_val, const2_val); HBasicBlock* block = instruction->GetBlock(); - HConstant* const3 = block->GetGraph()->GetConstant(type, const3_val); - ArenaAllocator* allocator = instruction->GetAllocator(); + HConstant* const3 = GetGraph()->GetConstant(type, const3_val); + ArenaAllocator* allocator = GetGraph()->GetAllocator(); HInstruction* z; if (is_x_negated) { diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc index 5323ae2445..edd454c93e 100644 --- a/compiler/optimizing/intrinsics.cc +++ b/compiler/optimizing/intrinsics.cc @@ -198,7 +198,7 @@ void IntrinsicVisitor::CreateReferenceRefersToLocations(HInvoke* invoke, CodeGen return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 98aa5600b4..3eaaa6cb94 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -2961,9 +2961,8 @@ void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopyChar(HInvoke* invoke) { } } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); LocationSummary* locations = - new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); + new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); // arraycopy(char[] src, int src_pos, char[] dst, int dst_pos, int length). locations->SetInAt(0, Location::RequiresRegister()); locations->SetInAt(1, LocationForSystemArrayCopyInput(invoke->InputAt(1))); @@ -4925,7 +4924,7 @@ static LocationSummary* CreateVarHandleCommonLocations(HInvoke* invoke, size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); DataType::Type return_type = invoke->GetType(); - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); @@ -5976,8 +5975,7 @@ void VarHandleSlowPathARM64::EmitByteArrayViewCode(CodeGenerator* codegen_in) { } void IntrinsicLocationsBuilderARM64::VisitMethodHandleInvokeExact(HInvoke* invoke) { - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); - LocationSummary* locations = new (allocator) + LocationSummary* locations = new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified); InvokeDexCallingConventionVisitorARM64 calling_convention; diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index a6f6eb0ba0..9e60090a03 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -2690,7 +2690,7 @@ static void CreateUnsafeGetLocations(HInvoke* invoke, DataType::Type type, bool atomic) { bool can_call = codegen->EmitReadBarrier() && IsUnsafeGetReference(invoke); - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, can_call @@ -3101,7 +3101,7 @@ static void CreateUnsafePutLocations(HInvoke* invoke, CodeGeneratorARMVIXL* codegen, DataType::Type type, bool atomic) { - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); locations->SetInAt(0, Location::NoLocation()); // Unused receiver. @@ -3115,7 +3115,7 @@ static void CreateUnsafePutAbsoluteLocations(HInvoke* invoke, CodeGeneratorARMVIXL* codegen, DataType::Type type, bool atomic) { - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); locations->SetInAt(0, Location::NoLocation()); // Unused receiver. @@ -3752,7 +3752,7 @@ class ReadBarrierCasSlowPathARMVIXL : public SlowPathCodeARMVIXL { static void CreateUnsafeCASLocations(HInvoke* invoke, CodeGeneratorARMVIXL* codegen) { const bool can_call = codegen->EmitReadBarrier() && IsUnsafeCASReference(invoke); - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, can_call @@ -4046,7 +4046,7 @@ static void CreateUnsafeGetAndUpdateLocations(HInvoke* invoke, DataType::Type type, GetAndUpdateOp get_and_update_op) { const bool can_call = codegen->EmitReadBarrier() && IsUnsafeGetAndSetReference(invoke); - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, can_call @@ -4653,7 +4653,7 @@ static LocationSummary* CreateVarHandleCommonLocations(HInvoke* invoke, size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); DataType::Type return_type = invoke->GetType(); - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc index cc0f114c56..4c56800920 100644 --- a/compiler/optimizing/intrinsics_riscv64.cc +++ b/compiler/optimizing/intrinsics_riscv64.cc @@ -3858,7 +3858,7 @@ static LocationSummary* CreateVarHandleCommonLocations(HInvoke* invoke, size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); DataType::Type return_type = invoke->GetType(); - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 5a6b8832c4..5710ce42bb 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -4117,7 +4117,7 @@ static void CreateVarHandleGetLocations(HInvoke* invoke, CodeGeneratorX86* codeg return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary( invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); @@ -4253,7 +4253,7 @@ static void CreateVarHandleSetLocations(HInvoke* invoke, CodeGeneratorX86* codeg return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary( invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->SetInAt(0, Location::RequiresRegister()); @@ -4430,7 +4430,7 @@ static void CreateVarHandleGetAndSetLocations(HInvoke* invoke, CodeGeneratorX86* return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary( invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->AddRegisterTemps(2); @@ -4630,7 +4630,7 @@ static void CreateVarHandleCompareAndSetOrExchangeLocations(HInvoke* invoke, return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary( invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->AddRegisterTemps(2); @@ -4810,7 +4810,7 @@ static void CreateVarHandleGetAndAddLocations(HInvoke* invoke, CodeGeneratorX86* return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary( invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); locations->AddRegisterTemps(2); @@ -4985,7 +4985,7 @@ static void CreateVarHandleGetAndBitwiseOpLocations(HInvoke* invoke, CodeGenerat return; } - ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); LocationSummary* locations = new (allocator) LocationSummary( invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); // We need a byte register temp to store the result of the bitwise operation diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 772828d9ef..bcf27ae9fa 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -2113,7 +2113,6 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { HInstruction* GetPreviousDisregardingMoves() const; HBasicBlock* GetBlock() const { return block_; } - ArenaAllocator* GetAllocator() const { return block_->GetGraph()->GetAllocator(); } void SetBlock(HBasicBlock* block) { block_ = block; } bool IsInBlock() const { return block_ != nullptr; } bool IsInLoop() const { return block_->IsInLoop(); } diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc index 9867e11f35..0417f04c12 100644 --- a/compiler/optimizing/reference_type_propagation.cc +++ b/compiler/optimizing/reference_type_propagation.cc @@ -218,7 +218,7 @@ static void BoundTypeIn(HInstruction* receiver, : start_block->GetFirstInstruction(); if (ShouldCreateBoundType( insert_point, receiver, class_rti, start_instruction, start_block)) { - bound_type = new (receiver->GetBlock()->GetGraph()->GetAllocator()) HBoundType(receiver); + bound_type = new (start_block->GetGraph()->GetAllocator()) HBoundType(receiver); bound_type->SetUpperBound(class_rti, /* can_be_null= */ false); start_block->InsertInstructionBefore(bound_type, insert_point); // To comply with the RTP algorithm, don't type the bound type just yet, it will |