AArch64: Enable Inlining.

This patch fixes the remaining issues with inlining for ARM64.

Change-Id: I2d85b7c4f3fb2b667bf6029fbc271ab954378889
Signed-off-by: Serban Constantinescu <serban.constantinescu@arm.com>
Signed-off-by: Matteo Franchin <matteo.franchin@arm.com>
diff --git a/compiler/dex/quick/arm64/utility_arm64.cc b/compiler/dex/quick/arm64/utility_arm64.cc
index aaee91b..eb08404 100644
--- a/compiler/dex/quick/arm64/utility_arm64.cc
+++ b/compiler/dex/quick/arm64/utility_arm64.cc
@@ -893,7 +893,14 @@
   int expected_scale = 0;
   ArmOpcode opcode = kA64Brk1d;
   r_base = Check64BitReg(r_base);
-  r_index = Check64BitReg(r_index);
+
+  // TODO(Arm64): The sign extension of r_index should be carried out by using an extended
+  //   register offset load (rather than doing the sign extension in a separate instruction).
+  if (r_index.Is32Bit()) {
+    // Assemble: ``sxtw xN, wN''.
+    r_index = As64BitReg(r_index);
+    NewLIR4(WIDE(kA64Sbfm4rrdd), r_index.GetReg(), r_index.GetReg(), 0, 31);
+  }
 
   if (r_dest.IsFloat()) {
     if (r_dest.IsDouble()) {
@@ -920,9 +927,11 @@
       opcode = WIDE(kA64Ldr4rXxG);
       expected_scale = 3;
       break;
+    case kReference:
+      // TODO(Arm64): r_dest must be 64-bit below. Remove the hack below.
+      r_dest = (r_dest.Is64Bit()) ? As32BitReg(r_dest) : r_dest;
     case kSingle:
     case k32:
-    case kReference:
       r_dest = Check32BitReg(r_dest);
       opcode = kA64Ldr4rXxG;
       expected_scale = 2;
@@ -973,7 +982,14 @@
   int expected_scale = 0;
   ArmOpcode opcode = kA64Brk1d;
   r_base = Check64BitReg(r_base);
-  r_index = Check64BitReg(r_index);
+
+  // TODO(Arm64): The sign extension of r_index should be carried out by using an extended
+  //   register offset store (rather than doing the sign extension in a separate instruction).
+  if (r_index.Is32Bit()) {
+    // Assemble: ``sxtw xN, wN''.
+    r_index = As64BitReg(r_index);
+    NewLIR4(WIDE(kA64Sbfm4rrdd), r_index.GetReg(), r_index.GetReg(), 0, 31);
+  }
 
   if (r_src.IsFloat()) {
     if (r_src.IsDouble()) {
@@ -1000,9 +1016,11 @@
       opcode = WIDE(kA64Str4rXxG);
       expected_scale = 3;
       break;
+    case kReference:
+      // TODO(Arm64): r_src must be 64-bit below. Remove the hack below.
+      r_src = (r_src.Is64Bit()) ? As32BitReg(r_src) : r_src;
     case kSingle:     // Intentional fall-trough.
     case k32:         // Intentional fall-trough.
-    case kReference:
       r_src = Check32BitReg(r_src);
       opcode = kA64Str4rXxG;
       expected_scale = 2;
@@ -1066,9 +1084,11 @@
         alt_opcode = WIDE(kA64Ldur3rXd);
       }
       break;
+    case kReference:
+      // TODO(Arm64): r_dest must be 64-bit below. Remove the hack below.
+      r_dest = (r_dest.Is64Bit()) ? As32BitReg(r_dest) : r_dest;
     case kSingle:     // Intentional fall-through.
     case k32:         // Intentional fall-trough.
-    case kReference:
       r_dest = Check32BitReg(r_dest);
       scale = 2;
       if (r_dest.IsFloat()) {
@@ -1165,9 +1185,11 @@
         alt_opcode = FWIDE(kA64Stur3rXd);
       }
       break;
+    case kReference:
+      // TODO(Arm64): r_src must be 64-bit below. Remove the hack below.
+      r_src = (r_src.Is64Bit()) ? As32BitReg(r_src) : r_src;
     case kSingle:     // Intentional fall-through.
     case k32:         // Intentional fall-trough.
-    case kReference:
       r_src = Check32BitReg(r_src);
       scale = 2;
       if (r_src.IsFloat()) {