Add tests for Thumb2RelativePatcher.

Also make the thumb2/arm64 thunk allocation precise instead
of eagerly allocating thunk space. This allows the calls to
use the maximum positive offset.

Change-Id: Ifa95b0bb00bd73eeab0c2905d21e2f3078f4b0a8
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk
index 58a3290..f834a38 100644
--- a/build/Android.gtest.mk
+++ b/build/Android.gtest.mk
@@ -193,6 +193,7 @@
   compiler/elf_writer_test.cc \
   compiler/image_test.cc \
   compiler/jni/jni_compiler_test.cc \
+  compiler/linker/arm/relative_patcher_thumb2_test.cc \
   compiler/linker/x86/relative_patcher_x86_test.cc \
   compiler/linker/x86_64/relative_patcher_x86_64_test.cc \
   compiler/oat_test.cc \
diff --git a/compiler/linker/arm/relative_patcher_arm_base.cc b/compiler/linker/arm/relative_patcher_arm_base.cc
index ecbbd09..2eae2a8 100644
--- a/compiler/linker/arm/relative_patcher_arm_base.cc
+++ b/compiler/linker/arm/relative_patcher_arm_base.cc
@@ -24,8 +24,9 @@
 namespace linker {
 
 uint32_t ArmBaseRelativePatcher::ReserveSpace(uint32_t offset,
-                                              const CompiledMethod* compiled_method) {
-  return ReserveSpaceInternal(offset, compiled_method, 0u);
+                                              const CompiledMethod* compiled_method,
+                                              MethodReference method_ref) {
+  return ReserveSpaceInternal(offset, compiled_method, method_ref, 0u);
 }
 
 uint32_t ArmBaseRelativePatcher::WriteThunks(OutputStream* out, uint32_t offset) {
@@ -66,6 +67,7 @@
 
 uint32_t ArmBaseRelativePatcher::ReserveSpaceInternal(uint32_t offset,
                                                       const CompiledMethod* compiled_method,
+                                                      MethodReference method_ref,
                                                       uint32_t max_extra_space) {
   // NOTE: The final thunk can be reserved from InitCodeMethodVisitor::EndClass() while it
   // may be written early by WriteCodeMethodVisitor::VisitMethod() for a deduplicated chunk
@@ -73,7 +75,8 @@
   // offset after reserving of writing any chunk.
   if (UNLIKELY(compiled_method == nullptr)) {
     uint32_t aligned_offset = CompiledMethod::AlignCode(offset, instruction_set_);
-    bool needs_thunk = ReserveSpaceProcessPatches(aligned_offset);
+    DCHECK(method_ref.dex_file == nullptr && method_ref.dex_method_index == 0u);
+    bool needs_thunk = ReserveSpaceProcessPatches(aligned_offset, method_ref, aligned_offset);
     if (needs_thunk) {
       thunk_locations_.push_back(aligned_offset);
       offset = CompiledMethod::AlignCode(aligned_offset + thunk_code_.size(), instruction_set_);
@@ -86,9 +89,12 @@
   uint32_t next_aligned_offset = compiled_method->AlignCode(quick_code_offset + quick_code_size);
   // Adjust for extra space required by the subclass.
   next_aligned_offset = compiled_method->AlignCode(next_aligned_offset + max_extra_space);
+  // TODO: ignore unprocessed patches targeting this method if they can reach quick_code_offset.
+  // We need the MethodReference for that.
   if (!unprocessed_patches_.empty() &&
       next_aligned_offset - unprocessed_patches_.front().second > max_positive_displacement_) {
-    bool needs_thunk = ReserveSpaceProcessPatches(next_aligned_offset);
+    bool needs_thunk = ReserveSpaceProcessPatches(quick_code_offset, method_ref,
+                                                  next_aligned_offset);
     if (needs_thunk) {
       // A single thunk will cover all pending patches.
       unprocessed_patches_.clear();
@@ -129,30 +135,42 @@
   return displacement;
 }
 
-bool ArmBaseRelativePatcher::ReserveSpaceProcessPatches(uint32_t next_aligned_offset) {
+bool ArmBaseRelativePatcher::ReserveSpaceProcessPatches(uint32_t quick_code_offset,
+                                                        MethodReference method_ref,
+                                                        uint32_t next_aligned_offset) {
   // Process as many patches as possible, stop only on unresolved targets or calls too far back.
   while (!unprocessed_patches_.empty()) {
+    MethodReference patch_ref = unprocessed_patches_.front().first;
     uint32_t patch_offset = unprocessed_patches_.front().second;
-    auto result = provider_->FindMethodOffset(unprocessed_patches_.front().first);
-    if (!result.first) {
-      // If still unresolved, check if we have a thunk within range.
-      DCHECK(thunk_locations_.empty() || thunk_locations_.back() <= patch_offset);
-      if (thunk_locations_.empty() ||
-          patch_offset - thunk_locations_.back() > max_negative_displacement_) {
-        return next_aligned_offset - patch_offset > max_positive_displacement_;
-      }
-    } else if (result.second >= patch_offset) {
-      DCHECK_LE(result.second - patch_offset, max_positive_displacement_);
-    } else {
-      // When calling back, check if we have a thunk that's closer than the actual target.
-      uint32_t target_offset =
-          (thunk_locations_.empty() || result.second > thunk_locations_.back())
-          ? result.second
-          : thunk_locations_.back();
-      DCHECK_GT(patch_offset, target_offset);
-      if (patch_offset - target_offset > max_negative_displacement_) {
+    DCHECK(thunk_locations_.empty() || thunk_locations_.back() <= patch_offset);
+    if (patch_ref.dex_file == method_ref.dex_file &&
+        patch_ref.dex_method_index == method_ref.dex_method_index) {
+      DCHECK_GT(quick_code_offset, patch_offset);
+      if (quick_code_offset - patch_offset > max_positive_displacement_) {
         return true;
       }
+    } else {
+      auto result = provider_->FindMethodOffset(patch_ref);
+      if (!result.first) {
+        // If still unresolved, check if we have a thunk within range.
+        if (thunk_locations_.empty() ||
+            patch_offset - thunk_locations_.back() > max_negative_displacement_) {
+          return next_aligned_offset - patch_offset > max_positive_displacement_;
+        }
+      } else {
+        uint32_t target_offset = result.second - CompiledCode::CodeDelta(instruction_set_);
+        if (target_offset >= patch_offset) {
+          DCHECK_LE(target_offset - patch_offset, max_positive_displacement_);
+        } else {
+          // When calling back, check if we have a thunk that's closer than the actual target.
+          if (!thunk_locations_.empty()) {
+            target_offset = std::max(target_offset, thunk_locations_.back());
+          }
+          if (patch_offset - target_offset > max_negative_displacement_) {
+            return true;
+          }
+        }
+      }
     }
     unprocessed_patches_.pop_front();
   }
diff --git a/compiler/linker/arm/relative_patcher_arm_base.h b/compiler/linker/arm/relative_patcher_arm_base.h
index a88d25b..35a8b8e 100644
--- a/compiler/linker/arm/relative_patcher_arm_base.h
+++ b/compiler/linker/arm/relative_patcher_arm_base.h
@@ -27,7 +27,8 @@
 
 class ArmBaseRelativePatcher : public RelativePatcher {
  public:
-  uint32_t ReserveSpace(uint32_t offset, const CompiledMethod* compiled_method) OVERRIDE;
+  uint32_t ReserveSpace(uint32_t offset, const CompiledMethod* compiled_method,
+                        MethodReference method_ref) OVERRIDE;
   uint32_t WriteThunks(OutputStream* out, uint32_t offset) OVERRIDE;
 
  protected:
@@ -36,11 +37,12 @@
                          uint32_t max_positive_displacement, uint32_t max_negative_displacement);
 
   uint32_t ReserveSpaceInternal(uint32_t offset, const CompiledMethod* compiled_method,
-                                uint32_t max_extra_space);
+                                MethodReference method_ref, uint32_t max_extra_space);
   uint32_t CalculateDisplacement(uint32_t patch_offset, uint32_t target_offset);
 
  private:
-  bool ReserveSpaceProcessPatches(uint32_t next_aligned_offset);
+  bool ReserveSpaceProcessPatches(uint32_t quick_code_offset, MethodReference method_ref,
+                                  uint32_t next_aligned_offset);
 
   RelativePatcherTargetProvider* const provider_;
   const InstructionSet instruction_set_;
@@ -54,6 +56,8 @@
   typedef std::pair<MethodReference, uint32_t> UnprocessedPatch;
   std::deque<UnprocessedPatch> unprocessed_patches_;
 
+  friend class Thumb2RelativePatcherTest;
+
   DISALLOW_COPY_AND_ASSIGN(ArmBaseRelativePatcher);
 };
 
diff --git a/compiler/linker/arm/relative_patcher_thumb2_test.cc b/compiler/linker/arm/relative_patcher_thumb2_test.cc
new file mode 100644
index 0000000..abdfd6d
--- /dev/null
+++ b/compiler/linker/arm/relative_patcher_thumb2_test.cc
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "linker/relative_patcher_test.h"
+#include "linker/arm/relative_patcher_thumb2.h"
+
+namespace art {
+namespace linker {
+
+class Thumb2RelativePatcherTest : public RelativePatcherTest {
+ public:
+  Thumb2RelativePatcherTest() : RelativePatcherTest(kThumb2, "default") { }
+
+ protected:
+  static const uint8_t kCallRawCode[];
+  static const ArrayRef<const uint8_t> kCallCode;
+  static const uint8_t kNopRawCode[];
+  static const ArrayRef<const uint8_t> kNopCode;
+
+  // Branches within range [-256, 256) can be created from these by adding the low 8 bits.
+  static constexpr uint32_t kBlPlus0 = 0xf000f800;
+  static constexpr uint32_t kBlMinus256 = 0xf7ffff00;
+
+  // Special BL values.
+  static constexpr uint32_t kBlPlusMax = 0xf3ffd7ff;
+  static constexpr uint32_t kBlMinusMax = 0xf400d000;
+
+  bool Create2MethodsWithGap(const ArrayRef<const uint8_t>& method1_code,
+                             const ArrayRef<LinkerPatch>& method1_patches,
+                             const ArrayRef<const uint8_t>& method3_code,
+                             const ArrayRef<LinkerPatch>& method3_patches,
+                             uint32_t distance_without_thunks) {
+    CHECK_EQ(distance_without_thunks % kArmAlignment, 0u);
+    const uint32_t method1_offset =
+        CompiledCode::AlignCode(kTrampolineSize, kThumb2) + sizeof(OatQuickMethodHeader);
+    AddCompiledMethod(MethodRef(1u), method1_code, ArrayRef<LinkerPatch>(method1_patches));
+
+    // We want to put the method3 at a very precise offset.
+    const uint32_t method3_offset = method1_offset + distance_without_thunks;
+    CHECK(IsAligned<kArmAlignment>(method3_offset - sizeof(OatQuickMethodHeader)));
+
+    // Calculate size of method2 so that we put method3 at the correct place.
+    const uint32_t method2_offset =
+        CompiledCode::AlignCode(method1_offset + method1_code.size(), kThumb2) +
+        sizeof(OatQuickMethodHeader);
+    const uint32_t method2_size = (method3_offset - sizeof(OatQuickMethodHeader) - method2_offset);
+    std::vector<uint8_t> method2_raw_code(method2_size);
+    ArrayRef<const uint8_t> method2_code(method2_raw_code);
+    AddCompiledMethod(MethodRef(2u), method2_code, ArrayRef<LinkerPatch>());
+
+    AddCompiledMethod(MethodRef(3u), method3_code, method3_patches);
+
+    Link();
+
+    // Check assumptions.
+    CHECK_EQ(GetMethodOffset(1), method1_offset);
+    CHECK_EQ(GetMethodOffset(2), method2_offset);
+    auto result3 = method_offset_map_.FindMethodOffset(MethodRef(3));
+    CHECK(result3.first);
+    // There may be a thunk before method2.
+    if (result3.second == method3_offset + 1 /* thumb mode */) {
+      return false;  // No thunk.
+    } else {
+      uint32_t aligned_thunk_size = CompiledCode::AlignCode(ThunkSize(), kThumb2);
+      CHECK_EQ(result3.second, method3_offset + aligned_thunk_size + 1 /* thumb mode */);
+      return true;   // Thunk present.
+    }
+  }
+
+  uint32_t GetMethodOffset(uint32_t method_idx) {
+    auto result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
+    CHECK(result.first);
+    CHECK_NE(result.second & 1u, 0u);
+    return result.second - 1 /* thumb mode */;
+  }
+
+  uint32_t ThunkSize() {
+    return static_cast<Thumb2RelativePatcher*>(patcher_.get())->thunk_code_.size();
+  }
+
+  bool CheckThunk(uint32_t thunk_offset) {
+    Thumb2RelativePatcher* patcher = static_cast<Thumb2RelativePatcher*>(patcher_.get());
+    ArrayRef<const uint8_t> expected_code(patcher->thunk_code_);
+    if (output_.size() < thunk_offset + expected_code.size()) {
+      LOG(ERROR) << "output_.size() == " << output_.size() << " < "
+          << "thunk_offset + expected_code.size() == " << (thunk_offset + expected_code.size());
+      return false;
+    }
+    ArrayRef<const uint8_t> linked_code(&output_[thunk_offset], expected_code.size());
+    if (linked_code == expected_code) {
+      return true;
+    }
+    // Log failure info.
+    DumpDiff(expected_code, linked_code);
+    return false;
+  }
+
+  std::vector<uint8_t> GenNopsAndBl(size_t num_nops, uint32_t bl) {
+    std::vector<uint8_t> result;
+    result.reserve(num_nops * 2u + 4u);
+    for (size_t i = 0; i != num_nops; ++i) {
+      result.push_back(0x00);
+      result.push_back(0xbf);
+    }
+    result.push_back(static_cast<uint8_t>(bl >> 16));
+    result.push_back(static_cast<uint8_t>(bl >> 24));
+    result.push_back(static_cast<uint8_t>(bl));
+    result.push_back(static_cast<uint8_t>(bl >> 8));
+    return result;
+  }
+};
+
+const uint8_t Thumb2RelativePatcherTest::kCallRawCode[] = {
+    0x00, 0xf0, 0x00, 0xf8
+};
+
+const ArrayRef<const uint8_t> Thumb2RelativePatcherTest::kCallCode(kCallRawCode);
+
+const uint8_t Thumb2RelativePatcherTest::kNopRawCode[] = {
+    0x00, 0xbf
+};
+
+const ArrayRef<const uint8_t> Thumb2RelativePatcherTest::kNopCode(kNopRawCode);
+
+TEST_F(Thumb2RelativePatcherTest, CallSelf) {
+  LinkerPatch patches[] = {
+      LinkerPatch::RelativeCodePatch(0u, nullptr, 1u),
+  };
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  Link();
+
+  static const uint8_t expected_code[] = {
+      0xff, 0xf7, 0xfe, 0xff
+  };
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
+}
+
+TEST_F(Thumb2RelativePatcherTest, CallOther) {
+  LinkerPatch method1_patches[] = {
+      LinkerPatch::RelativeCodePatch(0u, nullptr, 2u),
+  };
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(method1_patches));
+  LinkerPatch method2_patches[] = {
+      LinkerPatch::RelativeCodePatch(0u, nullptr, 1u),
+  };
+  AddCompiledMethod(MethodRef(2u), kCallCode, ArrayRef<LinkerPatch>(method2_patches));
+  Link();
+
+  uint32_t method1_offset = GetMethodOffset(1u);
+  uint32_t method2_offset = GetMethodOffset(2u);
+  uint32_t diff_after = method2_offset - (method1_offset + 4u /* PC adjustment */);
+  ASSERT_EQ(diff_after & 1u, 0u);
+  ASSERT_LT(diff_after >> 1, 1u << 8);  // Simple encoding, (diff_after >> 1) fits into 8 bits.
+  static const uint8_t method1_expected_code[] = {
+      0x00, 0xf0, static_cast<uint8_t>(diff_after >> 1), 0xf8
+  };
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(method1_expected_code)));
+  uint32_t diff_before = method1_offset - (method2_offset + 4u /* PC adjustment */);
+  ASSERT_EQ(diff_before & 1u, 0u);
+  ASSERT_GE(diff_before, -1u << 9);  // Simple encoding, -256 <= (diff >> 1) < 0.
+  auto method2_expected_code = GenNopsAndBl(0u, kBlMinus256 | ((diff_before >> 1) & 0xffu));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(2u), ArrayRef<const uint8_t>(method2_expected_code)));
+}
+
+TEST_F(Thumb2RelativePatcherTest, CallTrampoline) {
+  LinkerPatch patches[] = {
+      LinkerPatch::RelativeCodePatch(0u, nullptr, 2u),
+  };
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  Link();
+
+  uint32_t method1_offset = GetMethodOffset(1u);
+  uint32_t diff = kTrampolineOffset - (method1_offset + 4u);
+  ASSERT_EQ(diff & 1u, 0u);
+  ASSERT_GE(diff, -1u << 9);  // Simple encoding, -256 <= (diff >> 1) < 0 (checked as unsigned).
+  auto expected_code = GenNopsAndBl(0u, kBlMinus256 | ((diff >> 1) & 0xffu));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
+}
+
+TEST_F(Thumb2RelativePatcherTest, CallOtherAlmostTooFarAfter) {
+  auto method1_raw_code = GenNopsAndBl(3u, kBlPlus0);
+  constexpr uint32_t bl_offset_in_method1 = 3u * 2u;  // After NOPs.
+  ArrayRef<const uint8_t> method1_code(method1_raw_code);
+  ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size());
+  LinkerPatch method1_patches[] = {
+      LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, 3u),
+  };
+
+  constexpr uint32_t max_positive_disp = 16 * MB - 2u + 4u /* PC adjustment */;
+  bool thunk_in_gap = Create2MethodsWithGap(method1_code, method1_patches,
+                                            kNopCode, ArrayRef<LinkerPatch>(),
+                                            bl_offset_in_method1 + max_positive_disp);
+  ASSERT_FALSE(thunk_in_gap);  // There should be no thunk.
+
+  // Check linked code.
+  auto expected_code = GenNopsAndBl(3u, kBlPlusMax);
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
+}
+
+TEST_F(Thumb2RelativePatcherTest, CallOtherAlmostTooFarBefore) {
+  auto method3_raw_code = GenNopsAndBl(2u, kBlPlus0);
+  constexpr uint32_t bl_offset_in_method3 = 2u * 2u;  // After NOPs.
+  ArrayRef<const uint8_t> method3_code(method3_raw_code);
+  ASSERT_EQ(bl_offset_in_method3 + 4u, method3_code.size());
+  LinkerPatch method3_patches[] = {
+      LinkerPatch::RelativeCodePatch(bl_offset_in_method3, nullptr, 1u),
+  };
+
+  constexpr uint32_t just_over_max_negative_disp = 16 * MB - 4u /* PC adjustment */;
+  bool thunk_in_gap = Create2MethodsWithGap(kNopCode, ArrayRef<LinkerPatch>(),
+                                            method3_code, method3_patches,
+                                            just_over_max_negative_disp - bl_offset_in_method3);
+  ASSERT_FALSE(thunk_in_gap);  // There should be no thunk.
+
+  // Check linked code.
+  auto expected_code = GenNopsAndBl(2u, kBlMinusMax);
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(3u), ArrayRef<const uint8_t>(expected_code)));
+}
+
+TEST_F(Thumb2RelativePatcherTest, CallOtherJustTooFarAfter) {
+  auto method1_raw_code = GenNopsAndBl(2u, kBlPlus0);
+  constexpr uint32_t bl_offset_in_method1 = 2u * 2u;  // After NOPs.
+  ArrayRef<const uint8_t> method1_code(method1_raw_code);
+  ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size());
+  LinkerPatch method1_patches[] = {
+      LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, 3u),
+  };
+
+  constexpr uint32_t just_over_max_positive_disp = 16 * MB + 4u /* PC adjustment */;
+  bool thunk_in_gap = Create2MethodsWithGap(method1_code, method1_patches,
+                                            kNopCode, ArrayRef<LinkerPatch>(),
+                                            bl_offset_in_method1 + just_over_max_positive_disp);
+  ASSERT_TRUE(thunk_in_gap);
+
+  uint32_t method1_offset = GetMethodOffset(1u);
+  uint32_t method3_offset = GetMethodOffset(3u);
+  uint32_t method3_header_offset = method3_offset - sizeof(OatQuickMethodHeader);
+  ASSERT_TRUE(IsAligned<kArmAlignment>(method3_header_offset));
+  uint32_t thunk_offset = method3_header_offset - CompiledCode::AlignCode(ThunkSize(), kThumb2);
+  ASSERT_TRUE(IsAligned<kArmAlignment>(thunk_offset));
+  uint32_t diff = thunk_offset - (method1_offset + bl_offset_in_method1 + 4u /* PC adjustment */);
+  ASSERT_EQ(diff & 1u, 0u);
+  ASSERT_GE(diff, 16 * MB - (1u << 9));  // Simple encoding, unknown bits fit into the low 8 bits.
+  auto expected_code = GenNopsAndBl(2u, 0xf3ffd700 | ((diff >> 1) & 0xffu));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
+  CheckThunk(thunk_offset);
+}
+
+TEST_F(Thumb2RelativePatcherTest, CallOtherJustTooFarBefore) {
+  auto method3_raw_code = GenNopsAndBl(3u, kBlPlus0);
+  constexpr uint32_t bl_offset_in_method3 = 3u * 2u;  // After NOPs.
+  ArrayRef<const uint8_t> method3_code(method3_raw_code);
+  ASSERT_EQ(bl_offset_in_method3 + 4u, method3_code.size());
+  LinkerPatch method3_patches[] = {
+      LinkerPatch::RelativeCodePatch(bl_offset_in_method3, nullptr, 1u),
+  };
+
+  constexpr uint32_t just_over_max_negative_disp = 16 * MB + 2 - 4u /* PC adjustment */;
+  bool thunk_in_gap = Create2MethodsWithGap(kNopCode, ArrayRef<LinkerPatch>(),
+                                            method3_code, method3_patches,
+                                            just_over_max_negative_disp - bl_offset_in_method3);
+  ASSERT_FALSE(thunk_in_gap);  // There should be a thunk but it should be after the method2.
+
+  // Check linked code.
+  uint32_t method3_offset = GetMethodOffset(3u);
+  uint32_t thunk_offset = CompiledCode::AlignCode(method3_offset + method3_code.size(), kThumb2);
+  uint32_t diff = thunk_offset - (method3_offset + bl_offset_in_method3 + 4u /* PC adjustment */);
+  ASSERT_EQ(diff & 1u, 0u);
+  ASSERT_LT(diff >> 1, 1u << 8);  // Simple encoding, (diff >> 1) fits into 8 bits.
+  auto expected_code = GenNopsAndBl(3u, kBlPlus0 | ((diff >> 1) & 0xffu));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(3u), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckThunk(thunk_offset));
+}
+
+}  // namespace linker
+}  // namespace art
diff --git a/compiler/linker/arm64/relative_patcher_arm64.cc b/compiler/linker/arm64/relative_patcher_arm64.cc
index bac3001..b61b3d8 100644
--- a/compiler/linker/arm64/relative_patcher_arm64.cc
+++ b/compiler/linker/arm64/relative_patcher_arm64.cc
@@ -41,10 +41,11 @@
 }
 
 uint32_t Arm64RelativePatcher::ReserveSpace(uint32_t offset,
-                                            const CompiledMethod* compiled_method) {
+                                            const CompiledMethod* compiled_method,
+                                            MethodReference method_ref) {
   if (!fix_cortex_a53_843419_) {
     DCHECK(adrp_thunk_locations_.empty());
-    return ReserveSpaceInternal(offset, compiled_method, 0u);
+    return ReserveSpaceInternal(offset, compiled_method, method_ref, 0u);
   }
 
   // Add thunks for previous method if any.
@@ -65,7 +66,7 @@
       }
     }
   }
-  offset = ReserveSpaceInternal(offset, compiled_method, kAdrpThunkSize * num_adrp);
+  offset = ReserveSpaceInternal(offset, compiled_method, method_ref, kAdrpThunkSize * num_adrp);
   if (num_adrp == 0u) {
     return offset;
   }
diff --git a/compiler/linker/arm64/relative_patcher_arm64.h b/compiler/linker/arm64/relative_patcher_arm64.h
index f8c6802..b2a1da5 100644
--- a/compiler/linker/arm64/relative_patcher_arm64.h
+++ b/compiler/linker/arm64/relative_patcher_arm64.h
@@ -28,7 +28,8 @@
   Arm64RelativePatcher(RelativePatcherTargetProvider* provider,
                        const Arm64InstructionSetFeatures* features);
 
-  uint32_t ReserveSpace(uint32_t offset, const CompiledMethod* compiled_method) OVERRIDE;
+  uint32_t ReserveSpace(uint32_t offset, const CompiledMethod* compiled_method,
+                        MethodReference method_ref) OVERRIDE;
   uint32_t WriteThunks(OutputStream* out, uint32_t offset) OVERRIDE;
   void PatchCall(std::vector<uint8_t>* code, uint32_t literal_offset,
                  uint32_t patch_offset, uint32_t target_offset) OVERRIDE;
diff --git a/compiler/linker/relative_patcher.cc b/compiler/linker/relative_patcher.cc
index ed82ac2..71f38b4 100644
--- a/compiler/linker/relative_patcher.cc
+++ b/compiler/linker/relative_patcher.cc
@@ -33,7 +33,8 @@
     RelativePatcherNone() { }
 
     uint32_t ReserveSpace(uint32_t offset,
-                          const CompiledMethod* compiled_method ATTRIBUTE_UNUSED) OVERRIDE {
+                          const CompiledMethod* compiled_method ATTRIBUTE_UNUSED,
+                          MethodReference method_ref ATTRIBUTE_UNUSED) OVERRIDE {
       return offset;  // No space reserved; no patches expected.
     }
 
diff --git a/compiler/linker/relative_patcher.h b/compiler/linker/relative_patcher.h
index dbb18cc..7a78254 100644
--- a/compiler/linker/relative_patcher.h
+++ b/compiler/linker/relative_patcher.h
@@ -84,7 +84,8 @@
 
   // Reserve space for relative call thunks if needed, return adjusted offset. After all methods
   // of a class have been processed it's called one last time with compiled_method == nullptr.
-  virtual uint32_t ReserveSpace(uint32_t offset, const CompiledMethod* compiled_method) = 0;
+  virtual uint32_t ReserveSpace(uint32_t offset, const CompiledMethod* compiled_method,
+                                MethodReference method_ref) = 0;
 
   // Write relative call thunks if needed, return adjusted offset.
   virtual uint32_t WriteThunks(OutputStream* out, uint32_t offset) = 0;
diff --git a/compiler/linker/relative_patcher_test.h b/compiler/linker/relative_patcher_test.h
index 515355b..9efcf60 100644
--- a/compiler/linker/relative_patcher_test.h
+++ b/compiler/linker/relative_patcher_test.h
@@ -63,6 +63,7 @@
   }
 
   MethodReference MethodRef(uint32_t method_idx) {
+    CHECK_NE(method_idx, 0u);
     return MethodReference(nullptr, method_idx);
   }
 
@@ -83,7 +84,7 @@
     uint32_t offset = kTrampolineSize;
     size_t idx = 0u;
     for (auto& compiled_method : compiled_methods_) {
-      offset = patcher_->ReserveSpace(offset, compiled_method.get());
+      offset = patcher_->ReserveSpace(offset, compiled_method.get(), compiled_method_refs_[idx]);
 
       uint32_t aligned_offset = compiled_method->AlignCode(offset);
       uint32_t aligned_code_delta = aligned_offset - offset;
@@ -97,7 +98,7 @@
       method_offset_map_.map.Put(compiled_method_refs_[idx], quick_code_offset);
       ++idx;
     }
-    offset = patcher_->ReserveSpace(offset, nullptr);
+    offset = patcher_->ReserveSpace(offset, nullptr, MethodReference(nullptr, 0u));
     uint32_t output_size = offset;
     output_.reserve(output_size);
 
@@ -166,7 +167,7 @@
 
     auto result = method_offset_map_.FindMethodOffset(method_ref);
     CHECK(result.first);  // Must have been linked.
-    size_t offset = result.second;
+    size_t offset = result.second - compiled_methods_[idx]->CodeDelta();
     CHECK_LT(offset, output_.size());
     CHECK_LE(offset + expected_code.size(), output_.size());
     ArrayRef<const uint8_t> linked_code(&output_[offset], expected_code.size());
@@ -174,6 +175,12 @@
       return true;
     }
     // Log failure info.
+    DumpDiff(expected_code, linked_code);
+    return false;
+  }
+
+  void DumpDiff(const ArrayRef<const uint8_t>& expected_code,
+                const ArrayRef<const uint8_t>& linked_code) {
     std::ostringstream expected_hex;
     std::ostringstream linked_hex;
     std::ostringstream diff_indicator;
@@ -193,7 +200,6 @@
     LOG(ERROR) << "<" << expected_hex.str();
     LOG(ERROR) << ">" << linked_hex.str();
     LOG(ERROR) << " " << diff_indicator.str();
-    return false;
   }
 
   // Map method reference to assinged offset.
diff --git a/compiler/linker/x86/relative_patcher_x86_base.cc b/compiler/linker/x86/relative_patcher_x86_base.cc
index 3108ca7..ea3472d 100644
--- a/compiler/linker/x86/relative_patcher_x86_base.cc
+++ b/compiler/linker/x86/relative_patcher_x86_base.cc
@@ -20,7 +20,9 @@
 namespace linker {
 
 uint32_t X86BaseRelativePatcher::ReserveSpace(
-    uint32_t offset, const CompiledMethod* compiled_method ATTRIBUTE_UNUSED) {
+    uint32_t offset,
+    const CompiledMethod* compiled_method ATTRIBUTE_UNUSED,
+    MethodReference method_ref ATTRIBUTE_UNUSED) {
   return offset;  // No space reserved; no limit on relative call distance.
 }
 
diff --git a/compiler/linker/x86/relative_patcher_x86_base.h b/compiler/linker/x86/relative_patcher_x86_base.h
index 9e1fc0c..1f38cf2 100644
--- a/compiler/linker/x86/relative_patcher_x86_base.h
+++ b/compiler/linker/x86/relative_patcher_x86_base.h
@@ -25,8 +25,9 @@
 class X86BaseRelativePatcher : public RelativePatcher {
  public:
   uint32_t ReserveSpace(uint32_t offset,
-                        const CompiledMethod* compiled_method ATTRIBUTE_UNUSED) OVERRIDE;
-  uint32_t WriteThunks(OutputStream* out ATTRIBUTE_UNUSED, uint32_t offset) OVERRIDE;
+                        const CompiledMethod* compiled_method,
+                        MethodReference method_ref) OVERRIDE;
+  uint32_t WriteThunks(OutputStream* out, uint32_t offset) OVERRIDE;
   void PatchCall(std::vector<uint8_t>* code, uint32_t literal_offset,
                  uint32_t patch_offset, uint32_t target_offset) OVERRIDE;
 
diff --git a/compiler/linker/x86/relative_patcher_x86_test.cc b/compiler/linker/x86/relative_patcher_x86_test.cc
index f471c83..c18a743 100644
--- a/compiler/linker/x86/relative_patcher_x86_test.cc
+++ b/compiler/linker/x86/relative_patcher_x86_test.cc
@@ -27,6 +27,12 @@
  protected:
   static const uint8_t kCallRawCode[];
   static const ArrayRef<const uint8_t> kCallCode;
+
+  uint32_t GetMethodOffset(uint32_t method_idx) {
+    auto result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
+    CHECK(result.first);
+    return result.second;
+  }
 };
 
 const uint8_t X86RelativePatcherTest::kCallRawCode[] = {
@@ -37,45 +43,54 @@
 
 TEST_F(X86RelativePatcherTest, CallSelf) {
   LinkerPatch patches[] = {
-      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 0u),
+      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 1u),
   };
-  AddCompiledMethod(MethodRef(0u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(patches));
   Link();
 
   static const uint8_t expected_code[] = {
       0xe8, 0xfb, 0xff, 0xff, 0xff
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
 }
 
 TEST_F(X86RelativePatcherTest, CallOther) {
-  static constexpr uint32_t kMethod1Offset = 0x12345678;
-  method_offset_map_.map.Put(MethodRef(1), kMethod1Offset);
-  LinkerPatch patches[] = {
+  LinkerPatch method1_patches[] = {
+      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 2u),
+  };
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(method1_patches));
+  LinkerPatch method2_patches[] = {
       LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 1u),
   };
-  AddCompiledMethod(MethodRef(0u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(2u), kCallCode, ArrayRef<LinkerPatch>(method2_patches));
   Link();
 
-  auto result = method_offset_map_.FindMethodOffset(MethodRef(0));
-  ASSERT_TRUE(result.first);
-  uint32_t diff = kMethod1Offset - (result.second + kCallCode.size());
-  static const uint8_t expected_code[] = {
+  uint32_t method1_offset = GetMethodOffset(1u);
+  uint32_t method2_offset = GetMethodOffset(2u);
+  uint32_t diff_after = method2_offset - (method1_offset + kCallCode.size() /* PC adjustment */);
+  static const uint8_t method1_expected_code[] = {
       0xe8,
-      static_cast<uint8_t>(diff), static_cast<uint8_t>(diff >> 8),
-      static_cast<uint8_t>(diff >> 16), static_cast<uint8_t>(diff >> 24)
+      static_cast<uint8_t>(diff_after), static_cast<uint8_t>(diff_after >> 8),
+      static_cast<uint8_t>(diff_after >> 16), static_cast<uint8_t>(diff_after >> 24)
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(method1_expected_code)));
+  uint32_t diff_before = method1_offset - (method2_offset + kCallCode.size() /* PC adjustment */);
+  static const uint8_t method2_expected_code[] = {
+      0xe8,
+      static_cast<uint8_t>(diff_before), static_cast<uint8_t>(diff_before >> 8),
+      static_cast<uint8_t>(diff_before >> 16), static_cast<uint8_t>(diff_before >> 24)
+  };
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(2u), ArrayRef<const uint8_t>(method2_expected_code)));
 }
 
 TEST_F(X86RelativePatcherTest, CallTrampoline) {
   LinkerPatch patches[] = {
-      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 1u),
+      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 2u),
   };
-  AddCompiledMethod(MethodRef(0u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(patches));
   Link();
 
-  auto result = method_offset_map_.FindMethodOffset(MethodRef(0));
+  auto result = method_offset_map_.FindMethodOffset(MethodRef(1));
   ASSERT_TRUE(result.first);
   uint32_t diff = kTrampolineOffset - (result.second + kCallCode.size());
   static const uint8_t expected_code[] = {
@@ -83,7 +98,7 @@
       static_cast<uint8_t>(diff), static_cast<uint8_t>(diff >> 8),
       static_cast<uint8_t>(diff >> 16), static_cast<uint8_t>(diff >> 24)
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
 }
 
 }  // namespace linker
diff --git a/compiler/linker/x86_64/relative_patcher_x86_64_test.cc b/compiler/linker/x86_64/relative_patcher_x86_64_test.cc
index 7ca2fa1..9d9529c 100644
--- a/compiler/linker/x86_64/relative_patcher_x86_64_test.cc
+++ b/compiler/linker/x86_64/relative_patcher_x86_64_test.cc
@@ -29,6 +29,12 @@
   static const ArrayRef<const uint8_t> kCallCode;
   static const uint8_t kDexCacheLoadRawCode[];
   static const ArrayRef<const uint8_t> kDexCacheLoadCode;
+
+  uint32_t GetMethodOffset(uint32_t method_idx) {
+    auto result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
+    CHECK(result.first);
+    return result.second;
+  }
 };
 
 const uint8_t X86_64RelativePatcherTest::kCallRawCode[] = {
@@ -47,45 +53,54 @@
 
 TEST_F(X86_64RelativePatcherTest, CallSelf) {
   LinkerPatch patches[] = {
-      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 0u),
+      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 1u),
   };
-  AddCompiledMethod(MethodRef(0u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(patches));
   Link();
 
   static const uint8_t expected_code[] = {
       0xe8, 0xfb, 0xff, 0xff, 0xff
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
 }
 
 TEST_F(X86_64RelativePatcherTest, CallOther) {
-  static constexpr uint32_t kMethod1Offset = 0x12345678;
-  method_offset_map_.map.Put(MethodRef(1), kMethod1Offset);
-  LinkerPatch patches[] = {
+  LinkerPatch method1_patches[] = {
+      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 2u),
+  };
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(method1_patches));
+  LinkerPatch method2_patches[] = {
       LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 1u),
   };
-  AddCompiledMethod(MethodRef(0u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(2u), kCallCode, ArrayRef<LinkerPatch>(method2_patches));
   Link();
 
-  auto result = method_offset_map_.FindMethodOffset(MethodRef(0));
-  ASSERT_TRUE(result.first);
-  uint32_t diff = kMethod1Offset - (result.second + kCallCode.size());
-  static const uint8_t expected_code[] = {
+  uint32_t method1_offset = GetMethodOffset(1u);
+  uint32_t method2_offset = GetMethodOffset(2u);
+  uint32_t diff_after = method2_offset - (method1_offset + kCallCode.size() /* PC adjustment */);
+  static const uint8_t method1_expected_code[] = {
       0xe8,
-      static_cast<uint8_t>(diff), static_cast<uint8_t>(diff >> 8),
-      static_cast<uint8_t>(diff >> 16), static_cast<uint8_t>(diff >> 24)
+      static_cast<uint8_t>(diff_after), static_cast<uint8_t>(diff_after >> 8),
+      static_cast<uint8_t>(diff_after >> 16), static_cast<uint8_t>(diff_after >> 24)
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(method1_expected_code)));
+  uint32_t diff_before = method1_offset - (method2_offset + kCallCode.size() /* PC adjustment */);
+  static const uint8_t method2_expected_code[] = {
+      0xe8,
+      static_cast<uint8_t>(diff_before), static_cast<uint8_t>(diff_before >> 8),
+      static_cast<uint8_t>(diff_before >> 16), static_cast<uint8_t>(diff_before >> 24)
+  };
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(2u), ArrayRef<const uint8_t>(method2_expected_code)));
 }
 
 TEST_F(X86_64RelativePatcherTest, CallTrampoline) {
   LinkerPatch patches[] = {
-      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 1u),
+      LinkerPatch::RelativeCodePatch(kCallCode.size() - 4u, nullptr, 2u),
   };
-  AddCompiledMethod(MethodRef(0u), kCallCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<LinkerPatch>(patches));
   Link();
 
-  auto result = method_offset_map_.FindMethodOffset(MethodRef(0));
+  auto result = method_offset_map_.FindMethodOffset(MethodRef(1u));
   ASSERT_TRUE(result.first);
   uint32_t diff = kTrampolineOffset - (result.second + kCallCode.size());
   static const uint8_t expected_code[] = {
@@ -93,7 +108,7 @@
       static_cast<uint8_t>(diff), static_cast<uint8_t>(diff >> 8),
       static_cast<uint8_t>(diff >> 16), static_cast<uint8_t>(diff >> 24)
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
 }
 
 TEST_F(X86_64RelativePatcherTest, DexCacheReference) {
@@ -102,10 +117,10 @@
   LinkerPatch patches[] = {
       LinkerPatch::DexCacheArrayPatch(kDexCacheLoadCode.size() - 4u, nullptr, 0u, kElementOffset),
   };
-  AddCompiledMethod(MethodRef(0u), kDexCacheLoadCode, ArrayRef<LinkerPatch>(patches));
+  AddCompiledMethod(MethodRef(1u), kDexCacheLoadCode, ArrayRef<LinkerPatch>(patches));
   Link();
 
-  auto result = method_offset_map_.FindMethodOffset(MethodRef(0));
+  auto result = method_offset_map_.FindMethodOffset(MethodRef(1u));
   ASSERT_TRUE(result.first);
   uint32_t diff =
       dex_cache_arrays_begin_ + kElementOffset - (result.second + kDexCacheLoadCode.size());
@@ -114,7 +129,7 @@
       static_cast<uint8_t>(diff), static_cast<uint8_t>(diff >> 8),
       static_cast<uint8_t>(diff >> 16), static_cast<uint8_t>(diff >> 24)
   };
-  EXPECT_TRUE(CheckLinkedMethod(MethodRef(0), ArrayRef<const uint8_t>(expected_code)));
+  EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
 }
 
 }  // namespace linker
diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc
index d2a0f4f..e15890f 100644
--- a/compiler/oat_writer.cc
+++ b/compiler/oat_writer.cc
@@ -357,7 +357,8 @@
   bool EndClass() {
     OatDexMethodVisitor::EndClass();
     if (oat_class_index_ == writer_->oat_classes_.size()) {
-      offset_ = writer_->relative_patcher_->ReserveSpace(offset_, nullptr);
+      offset_ = writer_->relative_patcher_->ReserveSpace(offset_, nullptr,
+                                                         MethodReference(nullptr, 0u));
     }
     return true;
   }
@@ -384,7 +385,8 @@
         quick_code_offset = lb->second;
         deduped = true;
       } else {
-        offset_ = writer_->relative_patcher_->ReserveSpace(offset_, compiled_method);
+        offset_ = writer_->relative_patcher_->ReserveSpace(
+            offset_, compiled_method, MethodReference(dex_file_, it.GetMemberIndex()));
         offset_ = compiled_method->AlignCode(offset_);
         DCHECK_ALIGNED_PARAM(offset_,
                              GetInstructionSetAlignment(compiled_method->GetInstructionSet()));