diff options
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 57 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.h | 7 | ||||
-rw-r--r-- | compiler/optimizing/register_allocator.cc | 51 | ||||
-rw-r--r-- | compiler/optimizing/ssa_liveness_analysis.h | 55 |
4 files changed, 94 insertions, 76 deletions
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 0fa4fa4256..a5ddd6ba82 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -466,8 +466,10 @@ void CodeGeneratorARM64::GenerateFrameEntry() { // sp[0] : current method. __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex)); GetAssembler()->cfi().AdjustCFAOffset(frame_size); - SpillRegisters(GetFramePreservedCoreRegisters(), frame_size - GetCoreSpillSize()); - SpillRegisters(GetFramePreservedFPRegisters(), frame_size - FrameEntrySpillSize()); + GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(), + frame_size - GetCoreSpillSize()); + GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(), + frame_size - FrameEntrySpillSize()); } } @@ -475,8 +477,10 @@ void CodeGeneratorARM64::GenerateFrameExit() { GetAssembler()->cfi().RememberState(); if (!HasEmptyFrame()) { int frame_size = GetFrameSize(); - UnspillRegisters(GetFramePreservedFPRegisters(), frame_size - FrameEntrySpillSize()); - UnspillRegisters(GetFramePreservedCoreRegisters(), frame_size - GetCoreSpillSize()); + GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(), + frame_size - FrameEntrySpillSize()); + GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(), + frame_size - GetCoreSpillSize()); __ Drop(frame_size); GetAssembler()->cfi().AdjustCFAOffset(-frame_size); } @@ -485,51 +489,6 @@ void CodeGeneratorARM64::GenerateFrameExit() { GetAssembler()->cfi().DefCFAOffset(GetFrameSize()); } -static inline dwarf::Reg DWARFReg(CPURegister reg) { - if (reg.IsFPRegister()) { - return dwarf::Reg::Arm64Fp(reg.code()); - } else { - DCHECK_LT(reg.code(), 31u); // X0 - X30. - return dwarf::Reg::Arm64Core(reg.code()); - } -} - -void CodeGeneratorARM64::SpillRegisters(vixl::CPURegList registers, int offset) { - int size = registers.RegisterSizeInBytes(); - while (registers.Count() >= 2) { - const CPURegister& dst0 = registers.PopLowestIndex(); - const CPURegister& dst1 = registers.PopLowestIndex(); - __ Stp(dst0, dst1, MemOperand(__ StackPointer(), offset)); - GetAssembler()->cfi().RelOffset(DWARFReg(dst0), offset); - GetAssembler()->cfi().RelOffset(DWARFReg(dst1), offset + size); - offset += 2 * size; - } - if (!registers.IsEmpty()) { - const CPURegister& dst0 = registers.PopLowestIndex(); - __ Str(dst0, MemOperand(__ StackPointer(), offset)); - GetAssembler()->cfi().RelOffset(DWARFReg(dst0), offset); - } - DCHECK(registers.IsEmpty()); -} - -void CodeGeneratorARM64::UnspillRegisters(vixl::CPURegList registers, int offset) { - int size = registers.RegisterSizeInBytes(); - while (registers.Count() >= 2) { - const CPURegister& dst0 = registers.PopLowestIndex(); - const CPURegister& dst1 = registers.PopLowestIndex(); - __ Ldp(dst0, dst1, MemOperand(__ StackPointer(), offset)); - GetAssembler()->cfi().Restore(DWARFReg(dst0)); - GetAssembler()->cfi().Restore(DWARFReg(dst1)); - offset += 2 * size; - } - if (!registers.IsEmpty()) { - const CPURegister& dst0 = registers.PopLowestIndex(); - __ Ldr(dst0, MemOperand(__ StackPointer(), offset)); - GetAssembler()->cfi().Restore(DWARFReg(dst0)); - } - DCHECK(registers.IsEmpty()); -} - void CodeGeneratorARM64::Bind(HBasicBlock* block) { __ Bind(GetLabelOf(block)); } diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 9430e31037..07c6dd059a 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -46,14 +46,11 @@ static constexpr size_t kParameterFPRegistersLength = arraysize(kParameterFPRegi const vixl::Register tr = vixl::x18; // Thread Register static const vixl::Register kArtMethodRegister = vixl::w0; // Method register on invoke. -const vixl::Register kQuickSuspendRegister = vixl::x19; const vixl::CPURegList vixl_reserved_core_registers(vixl::ip0, vixl::ip1); const vixl::CPURegList vixl_reserved_fp_registers(vixl::d31); -// TODO: When the runtime does not use kQuickSuspendRegister as a suspend -// counter remove it from the reserved registers list. -const vixl::CPURegList runtime_reserved_core_registers(tr, kQuickSuspendRegister, vixl::lr); +const vixl::CPURegList runtime_reserved_core_registers(tr, vixl::lr); // Callee-saved registers defined by AAPCS64. const vixl::CPURegList callee_saved_core_registers(vixl::CPURegister::kRegister, @@ -227,8 +224,6 @@ class CodeGeneratorARM64 : public CodeGenerator { void GenerateFrameEntry() OVERRIDE; void GenerateFrameExit() OVERRIDE; - void SpillRegisters(vixl::CPURegList registers, int offset); - void UnspillRegisters(vixl::CPURegList registers, int offset); vixl::CPURegList GetFramePreservedCoreRegisters() const { return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize, diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc index 4bca43499f..8f2632880d 100644 --- a/compiler/optimizing/register_allocator.cc +++ b/compiler/optimizing/register_allocator.cc @@ -224,7 +224,7 @@ void RegisterAllocator::ProcessInstruction(HInstruction* instruction) { temp_intervals_.Add(interval); interval->AddTempUse(instruction, i); if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) { - interval->AddHighInterval(true); + interval->AddHighInterval(/* is_temp */ true); LiveInterval* high = interval->GetHighInterval(); temp_intervals_.Add(high); unhandled_fp_intervals_.Add(high); @@ -310,6 +310,29 @@ void RegisterAllocator::ProcessInstruction(HInstruction* instruction) { current->AddHighInterval(); } + for (size_t safepoint_index = safepoints_.Size(); safepoint_index > 0; --safepoint_index) { + HInstruction* safepoint = safepoints_.Get(safepoint_index - 1); + size_t safepoint_position = safepoint->GetLifetimePosition(); + + // Test that safepoints are ordered in the optimal way. + DCHECK(safepoint_index == safepoints_.Size() + || safepoints_.Get(safepoint_index)->GetLifetimePosition() < safepoint_position); + + if (safepoint_position == current->GetStart()) { + // The safepoint is for this instruction, so the location of the instruction + // does not need to be saved. + DCHECK_EQ(safepoint_index, safepoints_.Size()); + DCHECK_EQ(safepoint, instruction); + continue; + } else if (current->IsDeadAt(safepoint_position)) { + break; + } else if (!current->Covers(safepoint_position)) { + // Hole in the interval. + continue; + } + current->AddSafepoint(safepoint); + } + // Some instructions define their output in fixed register/stack slot. We need // to ensure we know these locations before doing register allocation. For a // given register, we create an interval that covers these locations. The register @@ -1399,7 +1422,7 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) { : Location::StackSlot(interval->GetParent()->GetSpillSlot())); } UsePosition* use = current->GetFirstUse(); - size_t safepoint_index = safepoints_.Size(); + SafepointPosition* safepoint_position = interval->GetFirstSafepoint(); // Walk over all siblings, updating locations of use positions, and // connecting them when they are adjacent. @@ -1450,28 +1473,13 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) { InsertParallelMoveAt(current->GetEnd(), interval->GetDefinedBy(), source, destination); } - // At each safepoint, we record stack and register information. - // We iterate backwards to test safepoints in ascending order of positions, - // which is what LiveInterval::Covers is optimized for. - for (; safepoint_index > 0; --safepoint_index) { - HInstruction* safepoint = safepoints_.Get(safepoint_index - 1); - size_t position = safepoint->GetLifetimePosition(); - - // Test that safepoints are ordered in the optimal way. - DCHECK(safepoint_index == safepoints_.Size() - || safepoints_.Get(safepoint_index)->GetLifetimePosition() <= position); - - if (current->IsDeadAt(position)) { + for (; safepoint_position != nullptr; safepoint_position = safepoint_position->GetNext()) { + if (!current->Covers(safepoint_position->GetPosition())) { + DCHECK(next_sibling != nullptr); break; - } else if (!current->Covers(position)) { - continue; - } else if (interval->GetStart() == position) { - // The safepoint is for this instruction, so the location of the instruction - // does not need to be saved. - continue; } - LocationSummary* locations = safepoint->GetLocations(); + LocationSummary* locations = safepoint_position->GetLocations(); if ((current->GetType() == Primitive::kPrimNot) && current->GetParent()->HasSpillSlot()) { locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize); } @@ -1515,6 +1523,7 @@ void RegisterAllocator::ConnectSiblings(LiveInterval* interval) { } while (current != nullptr); if (kIsDebugBuild) { + DCHECK(safepoint_position == nullptr); // Following uses can only be environment uses. The location for // these environments will be none. while (use != nullptr) { diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h index d2da84c0c0..b6e4028c3f 100644 --- a/compiler/optimizing/ssa_liveness_analysis.h +++ b/compiler/optimizing/ssa_liveness_analysis.h @@ -149,6 +149,39 @@ class UsePosition : public ArenaObject<kArenaAllocMisc> { DISALLOW_COPY_AND_ASSIGN(UsePosition); }; +class SafepointPosition : public ArenaObject<kArenaAllocMisc> { + public: + explicit SafepointPosition(HInstruction* instruction) + : instruction_(instruction), + next_(nullptr) {} + + void SetNext(SafepointPosition* next) { + next_ = next; + } + + size_t GetPosition() const { + return instruction_->GetLifetimePosition(); + } + + SafepointPosition* GetNext() const { + return next_; + } + + LocationSummary* GetLocations() const { + return instruction_->GetLocations(); + } + + HInstruction* GetInstruction() const { + return instruction_; + } + + private: + HInstruction* const instruction_; + SafepointPosition* next_; + + DISALLOW_COPY_AND_ASSIGN(SafepointPosition); +}; + /** * An interval is a list of disjoint live ranges where an instruction is live. * Each instruction that has uses gets an interval. @@ -703,6 +736,22 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { UNREACHABLE(); } + void AddSafepoint(HInstruction* instruction) { + SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction); + if (first_safepoint_ == nullptr) { + first_safepoint_ = last_safepoint_ = safepoint; + } else { + DCHECK_LT(last_safepoint_->GetPosition(), safepoint->GetPosition()); + last_safepoint_->SetNext(safepoint); + last_safepoint_ = safepoint; + } + } + + SafepointPosition* GetFirstSafepoint() const { + DCHECK_EQ(GetParent(), this) << "Only the first sibling lists safepoints"; + return first_safepoint_; + } + private: LiveInterval(ArenaAllocator* allocator, Primitive::Type type, @@ -715,6 +764,8 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { : allocator_(allocator), first_range_(nullptr), last_range_(nullptr), + first_safepoint_(nullptr), + last_safepoint_(nullptr), last_visited_range_(nullptr), first_use_(nullptr), type_(type), @@ -771,6 +822,10 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { LiveRange* first_range_; LiveRange* last_range_; + // Safepoints where this interval is live. Only set in the parent interval. + SafepointPosition* first_safepoint_; + SafepointPosition* last_safepoint_; + // Last visited range. This is a range search optimization leveraging the fact // that the register allocator does a linear scan through the intervals. LiveRange* last_visited_range_; |