diff options
| -rw-r--r-- | compiler/optimizing/nodes.h | 2 | ||||
| -rw-r--r-- | compiler/optimizing/register_allocator.cc | 20 | ||||
| -rw-r--r-- | compiler/optimizing/ssa_liveness_analysis.h | 20 |
3 files changed, 31 insertions, 11 deletions
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 649038b532..44931c62db 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -2162,7 +2162,7 @@ class HInvoke : public HInstruction { uint32_t GetDexMethodIndex() const { return dex_method_index_; } - Intrinsics GetIntrinsic() { + Intrinsics GetIntrinsic() const { return intrinsic_; } diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc index 830f2c26e1..e11ca65ca2 100644 --- a/compiler/optimizing/register_allocator.cc +++ b/compiler/optimizing/register_allocator.cc @@ -901,6 +901,10 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) { return false; } + // We use the first use to compare with other intervals. If this interval + // is used after any active intervals, we will spill this interval. + size_t first_use = current->FirstUseAfter(current->GetStart()); + // First set all registers as not being used. size_t* next_use = registers_array_; for (size_t i = 0; i < number_of_registers_; ++i) { @@ -915,7 +919,7 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) { if (active->IsFixed()) { next_use[active->GetRegister()] = current->GetStart(); } else { - size_t use = active->FirstRegisterUseAfter(current->GetStart()); + size_t use = active->FirstUseAfter(current->GetStart()); if (use != kNoLifetime) { next_use[active->GetRegister()] = use; } @@ -943,7 +947,7 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) { next_use[inactive->GetRegister()] = std::min(next_intersection, next_use[inactive->GetRegister()]); } else { - size_t use = inactive->FirstRegisterUseAfter(current->GetStart()); + size_t use = inactive->FirstUseAfter(current->GetStart()); if (use != kNoLifetime) { next_use[inactive->GetRegister()] = std::min(use, next_use[inactive->GetRegister()]); } @@ -957,16 +961,16 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) { DCHECK(current->IsHighInterval()); reg = current->GetRegister(); // When allocating the low part, we made sure the high register was available. - DCHECK_LT(first_register_use, next_use[reg]); + DCHECK_LT(first_use, next_use[reg]); } else if (current->IsLowInterval()) { reg = FindAvailableRegisterPair(next_use, first_register_use); // We should spill if both registers are not available. - should_spill = (first_register_use >= next_use[reg]) - || (first_register_use >= next_use[GetHighForLowRegister(reg)]); + should_spill = (first_use >= next_use[reg]) + || (first_use >= next_use[GetHighForLowRegister(reg)]); } else { DCHECK(!current->IsHighInterval()); reg = FindAvailableRegister(next_use); - should_spill = (first_register_use >= next_use[reg]); + should_spill = (first_use >= next_use[reg]); } DCHECK_NE(reg, kNoRegister); @@ -996,10 +1000,12 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) { DumpInterval(std::cerr, current); DumpAllIntervals(std::cerr); // This situation has the potential to infinite loop, so we make it a non-debug CHECK. + HInstruction* at = liveness_.GetInstructionFromPosition(first_register_use / 2); CHECK(false) << "There is not enough registers available for " << split->GetParent()->GetDefinedBy()->DebugName() << " " << split->GetParent()->GetDefinedBy()->GetId() - << " at " << first_register_use - 1; + << " at " << first_register_use - 1 << " " + << (at == nullptr ? "" : at->DebugName()); } AddSorted(unhandled_, split); } diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h index beb4907bc1..e3a4d8138e 100644 --- a/compiler/optimizing/ssa_liveness_analysis.h +++ b/compiler/optimizing/ssa_liveness_analysis.h @@ -131,6 +131,9 @@ class UsePosition : public ArenaObject<kArenaAllocMisc> { void Dump(std::ostream& stream) const { stream << position_; + if (is_environment_) { + stream << " (env)"; + } } UsePosition* Dup(ArenaAllocator* allocator) const { @@ -363,6 +366,10 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { LiveInterval* GetParent() const { return parent_; } + // Returns whether this interval is the parent interval, that is, the interval + // that starts where the HInstruction is defined. + bool IsParent() const { return parent_ == this; } + LiveRange* GetFirstRange() const { return first_range_; } LiveRange* GetLastRange() const { return last_range_; } @@ -421,7 +428,7 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { if (is_temp_) { return position == GetStart() ? position : kNoLifetime; } - if (position == GetStart() && defined_by_ != nullptr) { + if (position == GetStart() && IsParent()) { LocationSummary* locations = defined_by_->GetLocations(); Location location = locations->Out(); // This interval is the first interval of the instruction. If the output @@ -470,12 +477,19 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { return position == GetStart() ? position : kNoLifetime; } + if (position == GetStart() && IsParent()) { + if (defined_by_->GetLocations()->Out().IsValid()) { + return position; + } + } + UsePosition* use = first_use_; size_t end = GetEnd(); while (use != nullptr && use->GetPosition() <= end) { if (!use->GetIsEnvironment()) { + Location location = use->GetUser()->GetLocations()->InAt(use->GetInputIndex()); size_t use_position = use->GetPosition(); - if (use_position > position) { + if (use_position > position && location.IsValid()) { return use_position; } } @@ -693,7 +707,7 @@ class LiveInterval : public ArenaObject<kArenaAllocMisc> { } void AddHighInterval(bool is_temp = false) { - DCHECK_EQ(GetParent(), this); + DCHECK(IsParent()); DCHECK(!HasHighInterval()); DCHECK(!HasLowInterval()); high_or_low_interval_ = new (allocator_) LiveInterval( |