summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc97
-rw-r--r--compiler/optimizing/intrinsics_x86.cc92
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc92
-rw-r--r--compiler/optimizing/register_allocator.cc51
4 files changed, 310 insertions, 22 deletions
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 56313730dc..a5332ea794 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -1055,6 +1055,102 @@ void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
__ Bind(slow_path->GetExitLabel());
}
+void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ // Temporary registers to store lengths of strings and for calculations.
+ locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister());
+
+ locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
+ vixl::MacroAssembler* masm = GetVIXLAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ Register str = WRegisterFrom(locations->InAt(0));
+ Register arg = WRegisterFrom(locations->InAt(1));
+ Register out = XRegisterFrom(locations->Out());
+
+ UseScratchRegisterScope scratch_scope(masm);
+ Register temp = scratch_scope.AcquireW();
+ Register temp1 = WRegisterFrom(locations->GetTemp(0));
+ Register temp2 = WRegisterFrom(locations->GetTemp(1));
+
+ vixl::Label loop;
+ vixl::Label end;
+ vixl::Label return_true;
+ vixl::Label return_false;
+
+ // Get offsets of count, value, and class fields within a string object.
+ const int32_t count_offset = mirror::String::CountOffset().Int32Value();
+ const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
+ const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ // Check if input is null, return false if it is.
+ __ Cbz(arg, &return_false);
+
+ // Reference equality check, return true if same reference.
+ __ Cmp(str, arg);
+ __ B(&return_true, eq);
+
+ // Instanceof check for the argument by comparing class fields.
+ // All string objects must have the same type since String cannot be subclassed.
+ // Receiver must be a string object, so its class field is equal to all strings' class fields.
+ // If the argument is a string object, its class field must be equal to receiver's class field.
+ __ Ldr(temp, MemOperand(str.X(), class_offset));
+ __ Ldr(temp1, MemOperand(arg.X(), class_offset));
+ __ Cmp(temp, temp1);
+ __ B(&return_false, ne);
+
+ // Load lengths of this and argument strings.
+ __ Ldr(temp, MemOperand(str.X(), count_offset));
+ __ Ldr(temp1, MemOperand(arg.X(), count_offset));
+ // Check if lengths are equal, return false if they're not.
+ __ Cmp(temp, temp1);
+ __ B(&return_false, ne);
+ // Store offset of string value in preparation for comparison loop
+ __ Mov(temp1, value_offset);
+ // Return true if both strings are empty.
+ __ Cbz(temp, &return_true);
+
+ // Assertions that must hold in order to compare strings 4 characters at a time.
+ DCHECK_ALIGNED(value_offset, 8);
+ static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
+
+ temp1 = temp1.X();
+ temp2 = temp2.X();
+
+ // Loop to compare strings 4 characters at a time starting at the beginning of the string.
+ // Ok to do this because strings are zero-padded to be 8-byte aligned.
+ __ Bind(&loop);
+ __ Ldr(out, MemOperand(str.X(), temp1));
+ __ Ldr(temp2, MemOperand(arg.X(), temp1));
+ __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
+ __ Cmp(out, temp2);
+ __ B(&return_false, ne);
+ __ Sub(temp, temp, Operand(4), SetFlags);
+ __ B(&loop, gt);
+
+ // Return true and exit the function.
+ // If loop does not result in returning false, we return true.
+ __ Bind(&return_true);
+ __ Mov(out, 1);
+ __ B(&end);
+
+ // Return false and exit the function.
+ __ Bind(&return_false);
+ __ Mov(out, 0);
+ __ Bind(&end);
+}
+
static void GenerateVisitStringIndexOf(HInvoke* invoke,
vixl::MacroAssembler* masm,
CodeGeneratorARM64* codegen,
@@ -1229,7 +1325,6 @@ void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
-UNIMPLEMENTED_INTRINSIC(StringEquals)
#undef UNIMPLEMENTED_INTRINSIC
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 3c8be27fb9..4471d713e2 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -945,6 +945,97 @@ void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
__ Bind(slow_path->GetExitLabel());
}
+void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+
+ // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
+ locations->AddTemp(Location::RegisterLocation(ECX));
+ locations->AddTemp(Location::RegisterLocation(EDI));
+
+ // Set output, ESI needed for repe_cmpsl instruction anyways.
+ locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
+ X86Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ Register str = locations->InAt(0).AsRegister<Register>();
+ Register arg = locations->InAt(1).AsRegister<Register>();
+ Register ecx = locations->GetTemp(0).AsRegister<Register>();
+ Register edi = locations->GetTemp(1).AsRegister<Register>();
+ Register esi = locations->Out().AsRegister<Register>();
+
+ Label end;
+ Label return_true;
+ Label return_false;
+
+ // Get offsets of count, value, and class fields within a string object.
+ const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
+ const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
+ const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ // Check if input is null, return false if it is.
+ __ testl(arg, arg);
+ __ j(kEqual, &return_false);
+
+ // Instanceof check for the argument by comparing class fields.
+ // All string objects must have the same type since String cannot be subclassed.
+ // Receiver must be a string object, so its class field is equal to all strings' class fields.
+ // If the argument is a string object, its class field must be equal to receiver's class field.
+ __ movl(ecx, Address(str, class_offset));
+ __ cmpl(ecx, Address(arg, class_offset));
+ __ j(kNotEqual, &return_false);
+
+ // Reference equality check, return true if same reference.
+ __ cmpl(str, arg);
+ __ j(kEqual, &return_true);
+
+ // Load length of receiver string.
+ __ movl(ecx, Address(str, count_offset));
+ // Check if lengths are equal, return false if they're not.
+ __ cmpl(ecx, Address(arg, count_offset));
+ __ j(kNotEqual, &return_false);
+ // Return true if both strings are empty.
+ __ testl(ecx, ecx);
+ __ j(kEqual, &return_true);
+
+ // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
+ __ leal(esi, Address(str, value_offset));
+ __ leal(edi, Address(arg, value_offset));
+
+ // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
+ __ addl(ecx, Immediate(1));
+ __ shrl(ecx, Immediate(1));
+
+ // Assertions that must hold in order to compare strings 2 characters at a time.
+ DCHECK_ALIGNED(value_offset, 4);
+ static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
+
+ // Loop to compare strings two characters at a time starting at the beginning of the string.
+ __ repe_cmpsl();
+ // If strings are not equal, zero flag will be cleared.
+ __ j(kNotEqual, &return_false);
+
+ // Return true and exit the function.
+ // If loop does not result in returning false, we return true.
+ __ Bind(&return_true);
+ __ movl(esi, Immediate(1));
+ __ jmp(&end);
+
+ // Return false and exit the function.
+ __ Bind(&return_false);
+ __ xorl(esi, esi);
+ __ Bind(&end);
+}
+
static void CreateStringIndexOfLocations(HInvoke* invoke,
ArenaAllocator* allocator,
bool start_at_zero) {
@@ -1758,7 +1849,6 @@ UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros)
UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros)
-UNIMPLEMENTED_INTRINSIC(StringEquals)
#undef UNIMPLEMENTED_INTRINSIC
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index b4926c2afa..9ea68ec07d 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -854,6 +854,97 @@ void IntrinsicCodeGeneratorX86_64::VisitStringCompareTo(HInvoke* invoke) {
__ Bind(slow_path->GetExitLabel());
}
+void IntrinsicLocationsBuilderX86_64::VisitStringEquals(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+
+ // Request temporary registers, RCX and RDI needed for repe_cmpsq instruction.
+ locations->AddTemp(Location::RegisterLocation(RCX));
+ locations->AddTemp(Location::RegisterLocation(RDI));
+
+ // Set output, RSI needed for repe_cmpsq instruction anyways.
+ locations->SetOut(Location::RegisterLocation(RSI), Location::kOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorX86_64::VisitStringEquals(HInvoke* invoke) {
+ X86_64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ CpuRegister str = locations->InAt(0).AsRegister<CpuRegister>();
+ CpuRegister arg = locations->InAt(1).AsRegister<CpuRegister>();
+ CpuRegister rcx = locations->GetTemp(0).AsRegister<CpuRegister>();
+ CpuRegister rdi = locations->GetTemp(1).AsRegister<CpuRegister>();
+ CpuRegister rsi = locations->Out().AsRegister<CpuRegister>();
+
+ Label end;
+ Label return_true;
+ Label return_false;
+
+ // Get offsets of count, value, and class fields within a string object.
+ const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
+ const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
+ const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
+
+ // Note that the null check must have been done earlier.
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ // Check if input is null, return false if it is.
+ __ testl(arg, arg);
+ __ j(kEqual, &return_false);
+
+ // Instanceof check for the argument by comparing class fields.
+ // All string objects must have the same type since String cannot be subclassed.
+ // Receiver must be a string object, so its class field is equal to all strings' class fields.
+ // If the argument is a string object, its class field must be equal to receiver's class field.
+ __ movl(rcx, Address(str, class_offset));
+ __ cmpl(rcx, Address(arg, class_offset));
+ __ j(kNotEqual, &return_false);
+
+ // Reference equality check, return true if same reference.
+ __ cmpl(str, arg);
+ __ j(kEqual, &return_true);
+
+ // Load length of receiver string.
+ __ movl(rcx, Address(str, count_offset));
+ // Check if lengths are equal, return false if they're not.
+ __ cmpl(rcx, Address(arg, count_offset));
+ __ j(kNotEqual, &return_false);
+ // Return true if both strings are empty.
+ __ testl(rcx, rcx);
+ __ j(kEqual, &return_true);
+
+ // Load starting addresses of string values into RSI/RDI as required for repe_cmpsq instruction.
+ __ leal(rsi, Address(str, value_offset));
+ __ leal(rdi, Address(arg, value_offset));
+
+ // Divide string length by 4 and adjust for lengths not divisible by 4.
+ __ addl(rcx, Immediate(3));
+ __ shrl(rcx, Immediate(2));
+
+ // Assertions that must hold in order to compare strings 4 characters at a time.
+ DCHECK_ALIGNED(value_offset, 8);
+ static_assert(IsAligned<8>(kObjectAlignment), "String is not zero padded");
+
+ // Loop to compare strings four characters at a time starting at the beginning of the string.
+ __ repe_cmpsq();
+ // If strings are not equal, zero flag will be cleared.
+ __ j(kNotEqual, &return_false);
+
+ // Return true and exit the function.
+ // If loop does not result in returning false, we return true.
+ __ Bind(&return_true);
+ __ movl(rsi, Immediate(1));
+ __ jmp(&end);
+
+ // Return false and exit the function.
+ __ Bind(&return_false);
+ __ xorl(rsi, rsi);
+ __ Bind(&end);
+}
+
static void CreateStringIndexOfLocations(HInvoke* invoke,
ArenaAllocator* allocator,
bool start_at_zero) {
@@ -1607,7 +1698,6 @@ UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros)
UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros)
-UNIMPLEMENTED_INTRINSIC(StringEquals)
#undef UNIMPLEMENTED_INTRINSIC
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index 60f5ab24da..9f32a9eaf8 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -952,7 +952,16 @@ bool RegisterAllocator::PotentiallyRemoveOtherHalf(LiveInterval* interval,
// we spill `current` instead.
bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) {
size_t first_register_use = current->FirstRegisterUse();
- if (first_register_use == kNoLifetime) {
+ if (current->HasRegister()) {
+ DCHECK(current->IsHighInterval());
+ // The low interval has allocated the register for the high interval. In
+ // case the low interval had to split both intervals, we may end up in a
+ // situation where the high interval does not have a register use anymore.
+ // We must still proceed in order to split currently active and inactive
+ // uses of the high interval's register, and put the high interval in the
+ // active set.
+ DCHECK(first_register_use != kNoLifetime || (current->GetNextSibling() != nullptr));
+ } else if (first_register_use == kNoLifetime) {
AllocateSpillSlotFor(current);
return false;
}
@@ -1019,7 +1028,7 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) {
// When allocating the low part, we made sure the high register was available.
DCHECK_LT(first_use, next_use[reg]);
} else if (current->IsLowInterval()) {
- reg = FindAvailableRegisterPair(next_use, first_register_use);
+ reg = FindAvailableRegisterPair(next_use, first_use);
// We should spill if both registers are not available.
should_spill = (first_use >= next_use[reg])
|| (first_use >= next_use[GetHighForLowRegister(reg)]);
@@ -1033,16 +1042,28 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) {
if (should_spill) {
DCHECK(!current->IsHighInterval());
bool is_allocation_at_use_site = (current->GetStart() >= (first_register_use - 1));
- if (current->IsLowInterval()
- && is_allocation_at_use_site
- && TrySplitNonPairOrUnalignedPairIntervalAt(current->GetStart(),
- first_register_use,
- next_use)) {
+ if (is_allocation_at_use_site) {
+ if (!current->IsLowInterval()) {
+ DumpInterval(std::cerr, current);
+ DumpAllIntervals(std::cerr);
+ // This situation has the potential to infinite loop, so we make it a non-debug CHECK.
+ HInstruction* at = liveness_.GetInstructionFromPosition(first_register_use / 2);
+ CHECK(false) << "There is not enough registers available for "
+ << current->GetParent()->GetDefinedBy()->DebugName() << " "
+ << current->GetParent()->GetDefinedBy()->GetId()
+ << " at " << first_register_use - 1 << " "
+ << (at == nullptr ? "" : at->DebugName());
+ }
+
// If we're allocating a register for `current` because the instruction at
// that position requires it, but we think we should spill, then there are
// non-pair intervals or unaligned pair intervals blocking the allocation.
// We split the first interval found, and put ourselves first in the
// `unhandled_` list.
+ bool success = TrySplitNonPairOrUnalignedPairIntervalAt(current->GetStart(),
+ first_register_use,
+ next_use);
+ DCHECK(success);
LiveInterval* existing = unhandled_->Peek();
DCHECK(existing->IsHighInterval());
DCHECK_EQ(existing->GetLowInterval(), current);
@@ -1052,17 +1073,7 @@ bool RegisterAllocator::AllocateBlockedReg(LiveInterval* current) {
// register, we split this interval just before its first register use.
AllocateSpillSlotFor(current);
LiveInterval* split = SplitBetween(current, current->GetStart(), first_register_use - 1);
- if (current == split) {
- DumpInterval(std::cerr, current);
- DumpAllIntervals(std::cerr);
- // This situation has the potential to infinite loop, so we make it a non-debug CHECK.
- HInstruction* at = liveness_.GetInstructionFromPosition(first_register_use / 2);
- CHECK(false) << "There is not enough registers available for "
- << split->GetParent()->GetDefinedBy()->DebugName() << " "
- << split->GetParent()->GetDefinedBy()->GetId()
- << " at " << first_register_use - 1 << " "
- << (at == nullptr ? "" : at->DebugName());
- }
+ DCHECK(current != split);
AddSorted(unhandled_, split);
}
return false;
@@ -1243,7 +1254,9 @@ LiveInterval* RegisterAllocator::Split(LiveInterval* interval, size_t position)
void RegisterAllocator::AllocateSpillSlotFor(LiveInterval* interval) {
if (interval->IsHighInterval()) {
- // The low interval will contain the spill slot.
+ // The low interval already took care of allocating the spill slot.
+ DCHECK(!interval->GetLowInterval()->HasRegister());
+ DCHECK(interval->GetLowInterval()->GetParent()->HasSpillSlot());
return;
}