arm/arm64: Clean up intrinsic slow paths.
Generalize and use the slow path template IntrinsicSlowPath
from intrinsics_utils.h.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: aosp_taimen-userdebug boot image is unchanged.
Change-Id: Ia8fa4e1b31c1f190fc5f02671336caec15e4cf4d
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 8349732..487d091 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -668,10 +668,7 @@
void GenerateVirtualCall(
HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) override;
- void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
- DataType::Type type ATTRIBUTE_UNUSED) override {
- UNIMPLEMENTED(FATAL);
- }
+ void MoveFromReturnRegister(Location trg, DataType::Type type) override;
// Add a new boot image intrinsic patch for an instruction and return the label
// to be bound before the instruction. The instruction will be either the