summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author denis.tomashev <denis.tomashev@syntacore.com> 2024-02-12 17:43:23 +0300
committer VladimĂ­r Marko <vmarko@google.com> 2024-03-19 07:06:21 +0000
commit91bb8a6fac641439ff9e929ab9b9eccc0ecbf4d2 (patch)
tree7aa14947b26feded7bae463044ab94944e3decbf /compiler/optimizing
parente1e79fc839c1e0e5b1a77f64ee6d7fbd66c1c285 (diff)
riscv64: Implemented StringEquals intrinsic
Bug: 283082089 Test: art/test/testrunner/testrunner.py --target --64 --ndebug --no-image --jit-on-first-use Change-Id: Ifb4076d7f12cbad174049995980f0f08db41357a Signed-off-by: denis.tomashev <denis.tomashev@syntacore.com>
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator_riscv64.h1
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc116
2 files changed, 116 insertions, 1 deletions
diff --git a/compiler/optimizing/code_generator_riscv64.h b/compiler/optimizing/code_generator_riscv64.h
index 653e83dd57..4a80b17c28 100644
--- a/compiler/optimizing/code_generator_riscv64.h
+++ b/compiler/optimizing/code_generator_riscv64.h
@@ -72,7 +72,6 @@ static constexpr int32_t kFClassNaNMinValue = 0x100;
V(FP16Min) \
V(FP16Max) \
V(StringCompareTo) \
- V(StringEquals) \
V(StringGetCharsNoCheck) \
V(StringStringIndexOf) \
V(StringStringIndexOfAfter) \
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index d5bcf1a7ae..12eec101b7 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -1073,6 +1073,122 @@ static void EmitLoadReserved(Riscv64Assembler* assembler,
}
}
+void IntrinsicLocationsBuilderRISCV64::VisitStringEquals(HInvoke* invoke) {
+ LocationSummary* locations =
+ new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister());
+ // TODO: If the String.equals() is used only for an immediately following HIf, we can
+ // mark it as emitted-at-use-site and emit branches directly to the appropriate blocks.
+ // Then we shall need an extra temporary register instead of the output register.
+ locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorRISCV64::VisitStringEquals(HInvoke* invoke) {
+ Riscv64Assembler* assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ // Get offsets of count, value, and class fields within a string object.
+ const int32_t count_offset = mirror::String::CountOffset().Int32Value();
+ const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
+ const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+
+ XRegister str = locations->InAt(0).AsRegister<XRegister>();
+ XRegister arg = locations->InAt(1).AsRegister<XRegister>();
+ XRegister out = locations->Out().AsRegister<XRegister>();
+
+ ScratchRegisterScope srs(assembler);
+ XRegister temp = srs.AllocateXRegister();
+ XRegister temp1 = locations->GetTemp(0).AsRegister<XRegister>();
+
+ Riscv64Label loop;
+ Riscv64Label end;
+ Riscv64Label return_true;
+ Riscv64Label return_false;
+
+ DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
+
+ StringEqualsOptimizations optimizations(invoke);
+ if (!optimizations.GetArgumentNotNull()) {
+ // Check if input is null, return false if it is.
+ __ Beqz(arg, &return_false);
+ }
+
+ // Reference equality check, return true if same reference.
+ __ Beq(str, arg, &return_true);
+
+ if (!optimizations.GetArgumentIsString()) {
+ // Instanceof check for the argument by comparing class fields.
+ // All string objects must have the same type since String cannot be subclassed.
+ // Receiver must be a string object, so its class field is equal to all strings' class fields.
+ // If the argument is a string object, its class field must be equal to receiver's class field.
+ //
+ // As the String class is expected to be non-movable, we can read the class
+ // field from String.equals' arguments without read barriers.
+ AssertNonMovableStringClass();
+ // /* HeapReference<Class> */ temp = str->klass_
+ __ Loadwu(temp, str, class_offset);
+ // /* HeapReference<Class> */ temp1 = arg->klass_
+ __ Loadwu(temp1, arg, class_offset);
+ // Also, because we use the previously loaded class references only in the
+ // following comparison, we don't need to unpoison them.
+ __ Bne(temp, temp1, &return_false);
+ }
+
+ // Load `count` fields of this and argument strings.
+ __ Loadwu(temp, str, count_offset);
+ __ Loadwu(temp1, arg, count_offset);
+ // Check if `count` fields are equal, return false if they're not.
+ // Also compares the compression style, if differs return false.
+ __ Bne(temp, temp1, &return_false);
+
+ // Assertions that must hold in order to compare strings 8 bytes at a time.
+ // Ok to do this because strings are zero-padded to kObjectAlignment.
+ DCHECK_ALIGNED(value_offset, 8);
+ static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
+
+ // Return true if both strings are empty. Even with string compression `count == 0` means empty.
+ static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
+ "Expecting 0=compressed, 1=uncompressed");
+ __ Beqz(temp, &return_true);
+
+ if (mirror::kUseStringCompression) {
+ // For string compression, calculate the number of bytes to compare (not chars).
+ // This could in theory exceed INT32_MAX, so treat temp as unsigned.
+ __ Andi(temp1, temp, 1); // Extract compression flag.
+ __ Srliw(temp, temp, 1u); // Extract length.
+ __ Sllw(temp, temp, temp1); // Calculate number of bytes to compare.
+ }
+
+ // Store offset of string value in preparation for comparison loop
+ __ Li(temp1, value_offset);
+
+ XRegister temp2 = srs.AllocateXRegister();
+ // Loop to compare strings 8 bytes at a time starting at the front of the string.
+ __ Bind(&loop);
+ __ Add(out, str, temp1);
+ __ Ld(out, out, 0);
+ __ Add(temp2, arg, temp1);
+ __ Ld(temp2, temp2, 0);
+ __ Addi(temp1, temp1, sizeof(uint64_t));
+ __ Bne(out, temp2, &return_false);
+ // With string compression, we have compared 8 bytes, otherwise 4 chars.
+ __ Addi(temp, temp, mirror::kUseStringCompression ? -8 : -4);
+ __ Bgt(temp, Zero, &loop);
+
+ // Return true and exit the function.
+ // If loop does not result in returning false, we return true.
+ __ Bind(&return_true);
+ __ Li(out, 1);
+ __ J(&end);
+
+ // Return false and exit the function.
+ __ Bind(&return_false);
+ __ Li(out, 0);
+ __ Bind(&end);
+}
+
static void EmitStoreConditional(Riscv64Assembler* assembler,
DataType::Type type,
XRegister ptr,