riscv64: Implement boxing `valueOf()` intrinsics.

Test: testrunner.py --target --64 --ndebug --optimizing --jit
Bug: 283082089
Change-Id: Icd2175edcb835638b7d0667908ea9076580111e1
diff --git a/compiler/optimizing/code_generator_riscv64.cc b/compiler/optimizing/code_generator_riscv64.cc
index fb44abc..7331ad6 100644
--- a/compiler/optimizing/code_generator_riscv64.cc
+++ b/compiler/optimizing/code_generator_riscv64.cc
@@ -6581,6 +6581,38 @@
   EmitPcRelativeLwuPlaceholder(info_low, dest, dest);
 }
 
+void CodeGeneratorRISCV64::LoadBootImageAddress(XRegister dest, uint32_t boot_image_reference) {
+  if (GetCompilerOptions().IsBootImage()) {
+    PcRelativePatchInfo* info_high = NewBootImageIntrinsicPatch(boot_image_reference);
+    EmitPcRelativeAuipcPlaceholder(info_high, dest);
+    PcRelativePatchInfo* info_low = NewBootImageIntrinsicPatch(boot_image_reference, info_high);
+    EmitPcRelativeAddiPlaceholder(info_low, dest, dest);
+  } else if (GetCompilerOptions().GetCompilePic()) {
+    LoadBootImageRelRoEntry(dest, boot_image_reference);
+  } else {
+    DCHECK(GetCompilerOptions().IsJitCompiler());
+    gc::Heap* heap = Runtime::Current()->GetHeap();
+    DCHECK(!heap->GetBootImageSpaces().empty());
+    const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
+    // Note: Boot image is in the low 4GiB (usually the low 2GiB, requiring just LUI+ADDI).
+    // We may not have an available scratch register for `LoadConst64()` but it never
+    // emits better code than `Li()` for 32-bit unsigned constants anyway.
+    __ Li(dest, reinterpret_cast32<uint32_t>(address));
+  }
+}
+
+void CodeGeneratorRISCV64::LoadIntrinsicDeclaringClass(XRegister dest, HInvoke* invoke) {
+  DCHECK_NE(invoke->GetIntrinsic(), Intrinsics::kNone);
+  if (GetCompilerOptions().IsBootImage()) {
+    MethodReference target_method = invoke->GetResolvedMethodReference();
+    dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
+    LoadTypeForBootImageIntrinsic(dest, TypeReference(target_method.dex_file, type_idx));
+  } else {
+    uint32_t boot_image_offset = GetBootImageOffsetOfIntrinsicDeclaringClass(invoke);
+    LoadBootImageAddress(dest, boot_image_offset);
+  }
+}
+
 void CodeGeneratorRISCV64::LoadClassRootForIntrinsic(XRegister dest, ClassRoot class_root) {
   if (GetCompilerOptions().IsBootImage()) {
     ScopedObjectAccess soa(Thread::Current());
@@ -6589,15 +6621,7 @@
     LoadTypeForBootImageIntrinsic(dest, target_type);
   } else {
     uint32_t boot_image_offset = GetBootImageOffset(class_root);
-    if (GetCompilerOptions().GetCompilePic()) {
-      LoadBootImageRelRoEntry(dest, boot_image_offset);
-    } else {
-      DCHECK(GetCompilerOptions().IsJitCompiler());
-      gc::Heap* heap = Runtime::Current()->GetHeap();
-      DCHECK(!heap->GetBootImageSpaces().empty());
-      const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset;
-      __ Loadwu(dest, DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
-    }
+    LoadBootImageAddress(dest, boot_image_offset);
   }
 }
 
diff --git a/compiler/optimizing/code_generator_riscv64.h b/compiler/optimizing/code_generator_riscv64.h
index 20c48db..522adfd 100644
--- a/compiler/optimizing/code_generator_riscv64.h
+++ b/compiler/optimizing/code_generator_riscv64.h
@@ -160,10 +160,6 @@
   V(CRC32UpdateByteBuffer)                      \
   V(MethodHandleInvokeExact)                    \
   V(MethodHandleInvoke)                         \
-  V(ByteValueOf)                                \
-  V(ShortValueOf)                               \
-  V(CharacterValueOf)                           \
-  V(IntegerValueOf)                             \
 
 // Method register on invoke.
 static const XRegister kArtMethodRegister = A0;
@@ -690,6 +686,8 @@
 
   void LoadTypeForBootImageIntrinsic(XRegister dest, TypeReference target_type);
   void LoadBootImageRelRoEntry(XRegister dest, uint32_t boot_image_offset);
+  void LoadBootImageAddress(XRegister dest, uint32_t boot_image_reference);
+  void LoadIntrinsicDeclaringClass(XRegister dest, HInvoke* invoke);
   void LoadClassRootForIntrinsic(XRegister dest, ClassRoot class_root);
 
   void LoadMethod(MethodLoadKind load_kind, Location temp, HInvoke* invoke);
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index fb3f113..de15382 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -17,7 +17,9 @@
 #include "intrinsics_riscv64.h"
 
 #include "code_generator_riscv64.h"
+#include "intrinsic_objects.h"
 #include "intrinsics_utils.h"
+#include "well_known_classes.h"
 
 namespace art HIDDEN {
 namespace riscv64 {
@@ -473,6 +475,88 @@
   EmitIntegralUnOp(invoke, [&](XRegister rd, XRegister rs1) { __ Ctz(rd, rs1); });
 }
 
+#define VISIT_INTRINSIC(name, low, high, type, start_index) \
+  void IntrinsicLocationsBuilderRISCV64::Visit ##name ##ValueOf(HInvoke* invoke) { \
+    InvokeRuntimeCallingConvention calling_convention; \
+    IntrinsicVisitor::ComputeValueOfLocations( \
+        invoke, \
+        codegen_, \
+        low, \
+        high - low + 1, \
+        calling_convention.GetReturnLocation(DataType::Type::kReference), \
+        Location::RegisterLocation(calling_convention.GetRegisterAt(0))); \
+  } \
+  void IntrinsicCodeGeneratorRISCV64::Visit ##name ##ValueOf(HInvoke* invoke) { \
+    IntrinsicVisitor::ValueOfInfo info = \
+        IntrinsicVisitor::ComputeValueOfInfo( \
+            invoke, \
+            codegen_->GetCompilerOptions(), \
+            WellKnownClasses::java_lang_ ##name ##_value, \
+            low, \
+            high - low + 1, \
+            start_index); \
+    HandleValueOf(invoke, info, type); \
+  }
+  BOXED_TYPES(VISIT_INTRINSIC)
+#undef VISIT_INTRINSIC
+
+void IntrinsicCodeGeneratorRISCV64::HandleValueOf(HInvoke* invoke,
+                                                  const IntrinsicVisitor::ValueOfInfo& info,
+                                                  DataType::Type type) {
+  Riscv64Assembler* assembler = codegen_->GetAssembler();
+  LocationSummary* locations = invoke->GetLocations();
+  XRegister out = locations->Out().AsRegister<XRegister>();
+  ScratchRegisterScope srs(assembler);
+  XRegister temp = srs.AllocateXRegister();
+  auto allocate_instance = [&]() {
+    DCHECK_EQ(out, InvokeRuntimeCallingConvention().GetRegisterAt(0));
+    codegen_->LoadIntrinsicDeclaringClass(out, invoke);
+    codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
+    CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
+  };
+  if (invoke->InputAt(0)->IsIntConstant()) {
+    int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
+    if (static_cast<uint32_t>(value - info.low) < info.length) {
+      // Just embed the object in the code.
+      DCHECK_NE(info.value_boot_image_reference, ValueOfInfo::kInvalidReference);
+      codegen_->LoadBootImageAddress(out, info.value_boot_image_reference);
+    } else {
+      DCHECK(locations->CanCall());
+      // Allocate and initialize a new object.
+      // TODO: If we JIT, we could allocate the object now, and store it in the
+      // JIT object table.
+      allocate_instance();
+      __ Li(temp, value);
+      codegen_->GetInstructionVisitor()->Store(
+          Location::RegisterLocation(temp), out, info.value_offset, type);
+      // Class pointer and `value` final field stores require a barrier before publication.
+      codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
+    }
+  } else {
+    DCHECK(locations->CanCall());
+    XRegister in = locations->InAt(0).AsRegister<XRegister>();
+    Riscv64Label allocate, done;
+    // Check bounds of our cache.
+    __ AddConst32(out, in, -info.low);
+    __ Li(temp, info.length);
+    __ Bgeu(out, temp, &allocate);
+    // If the value is within the bounds, load the object directly from the array.
+    codegen_->LoadBootImageAddress(temp, info.array_data_boot_image_reference);
+    __ Sh2Add(temp, out, temp);
+    __ Loadwu(out, temp, 0);
+    codegen_->MaybeUnpoisonHeapReference(out);
+    __ J(&done);
+    __ Bind(&allocate);
+    // Otherwise allocate and initialize a new object.
+    allocate_instance();
+    codegen_->GetInstructionVisitor()->Store(
+        Location::RegisterLocation(in), out, info.value_offset, type);
+    // Class pointer and `value` final field stores require a barrier before publication.
+    codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
+    __ Bind(&done);
+  }
+}
+
 static void GenerateVisitStringIndexOf(HInvoke* invoke,
                                        Riscv64Assembler* assembler,
                                        CodeGeneratorRISCV64* codegen,
diff --git a/compiler/optimizing/intrinsics_riscv64.h b/compiler/optimizing/intrinsics_riscv64.h
index 49c057d..8160c05 100644
--- a/compiler/optimizing/intrinsics_riscv64.h
+++ b/compiler/optimizing/intrinsics_riscv64.h
@@ -70,9 +70,12 @@
 
  private:
   Riscv64Assembler* GetAssembler();
-
   ArenaAllocator* GetAllocator();
 
+  void HandleValueOf(HInvoke* invoke,
+                     const IntrinsicVisitor::ValueOfInfo& info,
+                     DataType::Type type);
+
   CodeGeneratorRISCV64* const codegen_;
 
   DISALLOW_COPY_AND_ASSIGN(IntrinsicCodeGeneratorRISCV64);