blob: 05459a2a8237f4807437f0b4599a3b12d6a5b667 [file] [log] [blame]
Vladimir Marko3f311cf2015-04-02 15:28:45 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070017#include "linker/arm64/relative_patcher_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "base/casts.h"
Vladimir Marko3f311cf2015-04-02 15:28:45 +010020#include "linker/relative_patcher_test.h"
Vladimir Markof4f2daa2017-03-20 18:26:59 +000021#include "lock_word.h"
Vladimir Marko66d691d2017-04-07 17:53:39 +010022#include "mirror/array-inl.h"
Vladimir Markof4f2daa2017-03-20 18:26:59 +000023#include "mirror/object.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010024#include "oat_quick_method_header.h"
Vladimir Marko3f311cf2015-04-02 15:28:45 +010025
26namespace art {
27namespace linker {
28
29class Arm64RelativePatcherTest : public RelativePatcherTest {
30 public:
31 explicit Arm64RelativePatcherTest(const std::string& variant)
Vladimir Marko33bff252017-11-01 14:35:42 +000032 : RelativePatcherTest(InstructionSet::kArm64, variant) { }
Vladimir Marko3f311cf2015-04-02 15:28:45 +010033
34 protected:
35 static const uint8_t kCallRawCode[];
36 static const ArrayRef<const uint8_t> kCallCode;
37 static const uint8_t kNopRawCode[];
38 static const ArrayRef<const uint8_t> kNopCode;
39
Vladimir Markof4f2daa2017-03-20 18:26:59 +000040 // NOP instruction.
41 static constexpr uint32_t kNopInsn = 0xd503201f;
42
Vladimir Marko3f311cf2015-04-02 15:28:45 +010043 // All branches can be created from kBlPlus0 or kBPlus0 by adding the low 26 bits.
44 static constexpr uint32_t kBlPlus0 = 0x94000000u;
45 static constexpr uint32_t kBPlus0 = 0x14000000u;
46
47 // Special BL values.
48 static constexpr uint32_t kBlPlusMax = 0x95ffffffu;
49 static constexpr uint32_t kBlMinusMax = 0x96000000u;
50
Vladimir Marko66d691d2017-04-07 17:53:39 +010051 // LDR immediate, 32-bit, unsigned offset.
Vladimir Markocac5a7e2016-02-22 10:39:50 +000052 static constexpr uint32_t kLdrWInsn = 0xb9400000u;
53
Vladimir Marko66d691d2017-04-07 17:53:39 +010054 // LDR register, 32-bit, LSL #2.
55 static constexpr uint32_t kLdrWLsl2Insn = 0xb8607800u;
56
57 // LDUR, 32-bit.
58 static constexpr uint32_t kLdurWInsn = 0xb8400000u;
59
Vladimir Markocac5a7e2016-02-22 10:39:50 +000060 // ADD/ADDS/SUB/SUBS immediate, 64-bit.
61 static constexpr uint32_t kAddXInsn = 0x91000000u;
62 static constexpr uint32_t kAddsXInsn = 0xb1000000u;
63 static constexpr uint32_t kSubXInsn = 0xd1000000u;
64 static constexpr uint32_t kSubsXInsn = 0xf1000000u;
65
Vladimir Marko3f311cf2015-04-02 15:28:45 +010066 // LDUR x2, [sp, #4], i.e. unaligned load crossing 64-bit boundary (assuming aligned sp).
67 static constexpr uint32_t kLdurInsn = 0xf840405fu;
68
Matteo Franchin97e2f262015-04-02 15:49:06 +010069 // LDR w12, <label> and LDR x12, <label>. Bits 5-23 contain label displacement in 4-byte units.
70 static constexpr uint32_t kLdrWPcRelInsn = 0x1800000cu;
71 static constexpr uint32_t kLdrXPcRelInsn = 0x5800000cu;
72
73 // LDR w13, [SP, #<pimm>] and LDR x13, [SP, #<pimm>]. Bits 10-21 contain displacement from SP
74 // in units of 4-bytes (for 32-bit load) or 8-bytes (for 64-bit load).
75 static constexpr uint32_t kLdrWSpRelInsn = 0xb94003edu;
76 static constexpr uint32_t kLdrXSpRelInsn = 0xf94003edu;
77
Vladimir Markof4f2daa2017-03-20 18:26:59 +000078 // CBNZ x17, +0. Bits 5-23 are a placeholder for target offset from PC in units of 4-bytes.
Vladimir Marko66d691d2017-04-07 17:53:39 +010079 static constexpr uint32_t kCbnzIP1Plus0Insn = 0xb5000011u;
Vladimir Markof4f2daa2017-03-20 18:26:59 +000080
81 void InsertInsn(std::vector<uint8_t>* code, size_t pos, uint32_t insn) {
82 CHECK_LE(pos, code->size());
83 const uint8_t insn_code[] = {
84 static_cast<uint8_t>(insn),
85 static_cast<uint8_t>(insn >> 8),
86 static_cast<uint8_t>(insn >> 16),
87 static_cast<uint8_t>(insn >> 24),
88 };
89 static_assert(sizeof(insn_code) == 4u, "Invalid sizeof(insn_code).");
90 code->insert(code->begin() + pos, insn_code, insn_code + sizeof(insn_code));
91 }
92
93 void PushBackInsn(std::vector<uint8_t>* code, uint32_t insn) {
94 InsertInsn(code, code->size(), insn);
95 }
96
97 std::vector<uint8_t> RawCode(std::initializer_list<uint32_t> insns) {
98 std::vector<uint8_t> raw_code;
99 raw_code.reserve(insns.size() * 4u);
100 for (uint32_t insn : insns) {
101 PushBackInsn(&raw_code, insn);
102 }
103 return raw_code;
104 }
105
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100106 uint32_t Create2MethodsWithGap(const ArrayRef<const uint8_t>& method1_code,
Vladimir Markob207e142015-04-02 21:25:21 +0100107 const ArrayRef<const LinkerPatch>& method1_patches,
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100108 const ArrayRef<const uint8_t>& last_method_code,
Vladimir Markob207e142015-04-02 21:25:21 +0100109 const ArrayRef<const LinkerPatch>& last_method_patches,
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100110 uint32_t distance_without_thunks) {
111 CHECK_EQ(distance_without_thunks % kArm64Alignment, 0u);
Vladimir Marko0c737df2016-08-01 16:33:16 +0100112 uint32_t method1_offset =
113 kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader);
Vladimir Markob207e142015-04-02 21:25:21 +0100114 AddCompiledMethod(MethodRef(1u), method1_code, method1_patches);
Vladimir Marko0c737df2016-08-01 16:33:16 +0100115 const uint32_t gap_start = method1_offset + method1_code.size();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100116
117 // We want to put the method3 at a very precise offset.
118 const uint32_t last_method_offset = method1_offset + distance_without_thunks;
Vladimir Marko0c737df2016-08-01 16:33:16 +0100119 CHECK_ALIGNED(last_method_offset, kArm64Alignment);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100120 const uint32_t gap_end = last_method_offset - sizeof(OatQuickMethodHeader);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100121
Vladimir Marko0c737df2016-08-01 16:33:16 +0100122 // Fill the gap with intermediate methods in chunks of 2MiB and the first in [2MiB, 4MiB).
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100123 // (This allows deduplicating the small chunks to avoid using 256MiB of memory for +-128MiB
Vladimir Marko0c737df2016-08-01 16:33:16 +0100124 // offsets by this test. Making the first chunk bigger makes it easy to give all intermediate
125 // methods the same alignment of the end, so the thunk insertion adds a predictable size as
126 // long as it's after the first chunk.)
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100127 uint32_t method_idx = 2u;
128 constexpr uint32_t kSmallChunkSize = 2 * MB;
129 std::vector<uint8_t> gap_code;
Vladimir Marko0c737df2016-08-01 16:33:16 +0100130 uint32_t gap_size = gap_end - gap_start;
131 uint32_t num_small_chunks = std::max(gap_size / kSmallChunkSize, 1u) - 1u;
132 uint32_t chunk_start = gap_start;
133 uint32_t chunk_size = gap_size - num_small_chunks * kSmallChunkSize;
134 for (uint32_t i = 0; i <= num_small_chunks; ++i) { // num_small_chunks+1 iterations.
135 uint32_t chunk_code_size =
136 chunk_size - CodeAlignmentSize(chunk_start) - sizeof(OatQuickMethodHeader);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100137 gap_code.resize(chunk_code_size, 0u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000138 AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(gap_code));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100139 method_idx += 1u;
Vladimir Marko0c737df2016-08-01 16:33:16 +0100140 chunk_start += chunk_size;
141 chunk_size = kSmallChunkSize; // For all but the first chunk.
142 DCHECK_EQ(CodeAlignmentSize(gap_end), CodeAlignmentSize(chunk_start));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100143 }
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100144
145 // Add the last method and link
146 AddCompiledMethod(MethodRef(method_idx), last_method_code, last_method_patches);
147 Link();
148
149 // Check assumptions.
150 CHECK_EQ(GetMethodOffset(1), method1_offset);
151 auto last_result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
152 CHECK(last_result.first);
153 // There may be a thunk before method2.
154 if (last_result.second != last_method_offset) {
155 // Thunk present. Check that there's only one.
Vladimir Marko33bff252017-11-01 14:35:42 +0000156 uint32_t thunk_end =
157 CompiledCode::AlignCode(gap_end, InstructionSet::kArm64) + MethodCallThunkSize();
Vladimir Marko0c737df2016-08-01 16:33:16 +0100158 uint32_t header_offset = thunk_end + CodeAlignmentSize(thunk_end);
159 CHECK_EQ(last_result.second, header_offset + sizeof(OatQuickMethodHeader));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100160 }
161 return method_idx;
162 }
163
164 uint32_t GetMethodOffset(uint32_t method_idx) {
165 auto result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
166 CHECK(result.first);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000167 CHECK_ALIGNED(result.second, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100168 return result.second;
169 }
170
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000171 std::vector<uint8_t> CompileMethodCallThunk() {
Vladimir Marko0a51fc32017-05-02 13:12:02 +0100172 ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetMethodCallKey();
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000173 return down_cast<Arm64RelativePatcher*>(patcher_.get())->CompileThunk(key);
174 }
175
176 uint32_t MethodCallThunkSize() {
177 return CompileMethodCallThunk().size();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100178 }
179
180 bool CheckThunk(uint32_t thunk_offset) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000181 const std::vector<uint8_t> expected_code = CompileMethodCallThunk();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100182 if (output_.size() < thunk_offset + expected_code.size()) {
183 LOG(ERROR) << "output_.size() == " << output_.size() << " < "
184 << "thunk_offset + expected_code.size() == " << (thunk_offset + expected_code.size());
185 return false;
186 }
187 ArrayRef<const uint8_t> linked_code(&output_[thunk_offset], expected_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000188 if (linked_code == ArrayRef<const uint8_t>(expected_code)) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100189 return true;
190 }
191 // Log failure info.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000192 DumpDiff(ArrayRef<const uint8_t>(expected_code), linked_code);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100193 return false;
194 }
195
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000196 std::vector<uint8_t> GenNops(size_t num_nops) {
197 std::vector<uint8_t> result;
Vladimir Marko66d691d2017-04-07 17:53:39 +0100198 result.reserve(num_nops * 4u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000199 for (size_t i = 0; i != num_nops; ++i) {
200 PushBackInsn(&result, kNopInsn);
201 }
202 return result;
203 }
204
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100205 std::vector<uint8_t> GenNopsAndBl(size_t num_nops, uint32_t bl) {
206 std::vector<uint8_t> result;
207 result.reserve(num_nops * 4u + 4u);
208 for (size_t i = 0; i != num_nops; ++i) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000209 PushBackInsn(&result, kNopInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100210 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000211 PushBackInsn(&result, bl);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100212 return result;
213 }
214
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000215 std::vector<uint8_t> GenNopsAndAdrpAndUse(size_t num_nops,
216 uint32_t method_offset,
217 uint32_t target_offset,
218 uint32_t use_insn) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100219 std::vector<uint8_t> result;
220 result.reserve(num_nops * 4u + 8u);
221 for (size_t i = 0; i != num_nops; ++i) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000222 PushBackInsn(&result, kNopInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100223 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000224 CHECK_ALIGNED(method_offset, 4u);
225 CHECK_ALIGNED(target_offset, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100226 uint32_t adrp_offset = method_offset + num_nops * 4u;
227 uint32_t disp = target_offset - (adrp_offset & ~0xfffu);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000228 if (use_insn == kLdrWInsn) {
229 DCHECK_ALIGNED(disp, 1u << 2);
230 use_insn |= 1 | // LDR x1, [x0, #(imm12 << 2)]
231 ((disp & 0xfffu) << (10 - 2)); // imm12 = ((disp & 0xfffu) >> 2) is at bit 10.
232 } else if (use_insn == kAddXInsn) {
233 use_insn |= 1 | // ADD x1, x0, #imm
234 (disp & 0xfffu) << 10; // imm12 = (disp & 0xfffu) is at bit 10.
235 } else {
236 LOG(FATAL) << "Unexpected instruction: 0x" << std::hex << use_insn;
237 }
Vladimir Marko66d691d2017-04-07 17:53:39 +0100238 uint32_t adrp = 0x90000000u | // ADRP x0, +SignExtend(immhi:immlo:Zeros(12), 64)
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100239 ((disp & 0x3000u) << (29 - 12)) | // immlo = ((disp & 0x3000u) >> 12) is at bit 29,
240 ((disp & 0xffffc000) >> (14 - 5)) | // immhi = (disp >> 14) is at bit 5,
241 // We take the sign bit from the disp, limiting disp to +- 2GiB.
242 ((disp & 0x80000000) >> (31 - 23)); // sign bit in immhi is at bit 23.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000243 PushBackInsn(&result, adrp);
244 PushBackInsn(&result, use_insn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100245 return result;
246 }
247
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000248 std::vector<uint8_t> GenNopsAndAdrpLdr(size_t num_nops,
249 uint32_t method_offset,
250 uint32_t target_offset) {
251 return GenNopsAndAdrpAndUse(num_nops, method_offset, target_offset, kLdrWInsn);
252 }
253
Vladimir Marko5f078202017-05-18 13:32:53 +0100254 void TestNopsAdrpLdr(size_t num_nops, uint32_t bss_begin, uint32_t string_entry_offset) {
255 constexpr uint32_t kStringIndex = 1u;
256 string_index_to_offset_map_.Put(kStringIndex, string_entry_offset);
257 bss_begin_ = bss_begin;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100258 auto code = GenNopsAndAdrpLdr(num_nops, 0u, 0u); // Unpatched.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000259 const LinkerPatch patches[] = {
Vladimir Marko5f078202017-05-18 13:32:53 +0100260 LinkerPatch::StringBssEntryPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
261 LinkerPatch::StringBssEntryPatch(num_nops * 4u + 4u, nullptr, num_nops * 4u, kStringIndex),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100262 };
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000263 AddCompiledMethod(MethodRef(1u),
264 ArrayRef<const uint8_t>(code),
Vladimir Markob207e142015-04-02 21:25:21 +0100265 ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100266 Link();
267
268 uint32_t method1_offset = GetMethodOffset(1u);
Vladimir Marko5f078202017-05-18 13:32:53 +0100269 uint32_t target_offset = bss_begin_ + string_entry_offset;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100270 auto expected_code = GenNopsAndAdrpLdr(num_nops, method1_offset, target_offset);
271 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
272 }
273
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000274 std::vector<uint8_t> GenNopsAndAdrpAdd(size_t num_nops,
275 uint32_t method_offset,
276 uint32_t target_offset) {
277 return GenNopsAndAdrpAndUse(num_nops, method_offset, target_offset, kAddXInsn);
278 }
279
280 void TestNopsAdrpAdd(size_t num_nops, uint32_t string_offset) {
281 constexpr uint32_t kStringIndex = 1u;
282 string_index_to_offset_map_.Put(kStringIndex, string_offset);
283 auto code = GenNopsAndAdrpAdd(num_nops, 0u, 0u); // Unpatched.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000284 const LinkerPatch patches[] = {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000285 LinkerPatch::RelativeStringPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
286 LinkerPatch::RelativeStringPatch(num_nops * 4u + 4u, nullptr, num_nops * 4u, kStringIndex),
287 };
288 AddCompiledMethod(MethodRef(1u),
289 ArrayRef<const uint8_t>(code),
290 ArrayRef<const LinkerPatch>(patches));
291 Link();
292
293 uint32_t method1_offset = GetMethodOffset(1u);
294 auto expected_code = GenNopsAndAdrpAdd(num_nops, method1_offset, string_offset);
295 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
296 }
297
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000298 void PrepareNopsAdrpInsn2Ldr(size_t num_nops,
299 uint32_t insn2,
Vladimir Marko5f078202017-05-18 13:32:53 +0100300 uint32_t bss_begin,
301 uint32_t string_entry_offset) {
302 constexpr uint32_t kStringIndex = 1u;
303 string_index_to_offset_map_.Put(kStringIndex, string_entry_offset);
304 bss_begin_ = bss_begin;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100305 auto code = GenNopsAndAdrpLdr(num_nops, 0u, 0u); // Unpatched.
306 InsertInsn(&code, num_nops * 4u + 4u, insn2);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000307 const LinkerPatch patches[] = {
Vladimir Marko5f078202017-05-18 13:32:53 +0100308 LinkerPatch::StringBssEntryPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
309 LinkerPatch::StringBssEntryPatch(num_nops * 4u + 8u, nullptr, num_nops * 4u, kStringIndex),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100310 };
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000311 AddCompiledMethod(MethodRef(1u),
312 ArrayRef<const uint8_t>(code),
Vladimir Markob207e142015-04-02 21:25:21 +0100313 ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100314 Link();
315 }
316
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000317 void PrepareNopsAdrpInsn2Add(size_t num_nops, uint32_t insn2, uint32_t string_offset) {
318 constexpr uint32_t kStringIndex = 1u;
319 string_index_to_offset_map_.Put(kStringIndex, string_offset);
320 auto code = GenNopsAndAdrpAdd(num_nops, 0u, 0u); // Unpatched.
321 InsertInsn(&code, num_nops * 4u + 4u, insn2);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000322 const LinkerPatch patches[] = {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000323 LinkerPatch::RelativeStringPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
324 LinkerPatch::RelativeStringPatch(num_nops * 4u + 8u, nullptr, num_nops * 4u, kStringIndex),
325 };
326 AddCompiledMethod(MethodRef(1u),
327 ArrayRef<const uint8_t>(code),
328 ArrayRef<const LinkerPatch>(patches));
329 Link();
330 }
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100331
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000332 void TestNopsAdrpInsn2AndUse(size_t num_nops,
333 uint32_t insn2,
334 uint32_t target_offset,
335 uint32_t use_insn) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100336 uint32_t method1_offset = GetMethodOffset(1u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000337 auto expected_code = GenNopsAndAdrpAndUse(num_nops, method1_offset, target_offset, use_insn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100338 InsertInsn(&expected_code, num_nops * 4u + 4u, insn2);
339 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
340 }
341
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000342 void TestNopsAdrpInsn2AndUseHasThunk(size_t num_nops,
343 uint32_t insn2,
344 uint32_t target_offset,
345 uint32_t use_insn) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100346 uint32_t method1_offset = GetMethodOffset(1u);
347 CHECK(!compiled_method_refs_.empty());
Mathieu Chartierfc8b4222017-09-17 13:44:24 -0700348 CHECK_EQ(compiled_method_refs_[0].index, 1u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100349 CHECK_EQ(compiled_method_refs_.size(), compiled_methods_.size());
Vladimir Marko35831e82015-09-11 11:59:18 +0100350 uint32_t method1_size = compiled_methods_[0]->GetQuickCode().size();
Vladimir Marko33bff252017-11-01 14:35:42 +0000351 uint32_t thunk_offset =
352 CompiledCode::AlignCode(method1_offset + method1_size, InstructionSet::kArm64);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100353 uint32_t b_diff = thunk_offset - (method1_offset + num_nops * 4u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000354 CHECK_ALIGNED(b_diff, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100355 ASSERT_LT(b_diff, 128 * MB);
356 uint32_t b_out = kBPlus0 + ((b_diff >> 2) & 0x03ffffffu);
357 uint32_t b_in = kBPlus0 + ((-b_diff >> 2) & 0x03ffffffu);
358
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000359 auto expected_code = GenNopsAndAdrpAndUse(num_nops, method1_offset, target_offset, use_insn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100360 InsertInsn(&expected_code, num_nops * 4u + 4u, insn2);
361 // Replace adrp with bl.
362 expected_code.erase(expected_code.begin() + num_nops * 4u,
363 expected_code.begin() + num_nops * 4u + 4u);
364 InsertInsn(&expected_code, num_nops * 4u, b_out);
365 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
366
367 auto expected_thunk_code = GenNopsAndAdrpLdr(0u, thunk_offset, target_offset);
368 ASSERT_EQ(expected_thunk_code.size(), 8u);
369 expected_thunk_code.erase(expected_thunk_code.begin() + 4u, expected_thunk_code.begin() + 8u);
370 InsertInsn(&expected_thunk_code, 4u, b_in);
371 ASSERT_EQ(expected_thunk_code.size(), 8u);
372
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000373 uint32_t thunk_size = MethodCallThunkSize();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100374 ASSERT_EQ(thunk_offset + thunk_size, output_.size());
375 ASSERT_EQ(thunk_size, expected_thunk_code.size());
376 ArrayRef<const uint8_t> thunk_code(&output_[thunk_offset], thunk_size);
377 if (ArrayRef<const uint8_t>(expected_thunk_code) != thunk_code) {
378 DumpDiff(ArrayRef<const uint8_t>(expected_thunk_code), thunk_code);
379 FAIL();
380 }
381 }
382
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000383 void TestAdrpInsn2Ldr(uint32_t insn2,
384 uint32_t adrp_offset,
385 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100386 uint32_t bss_begin,
387 uint32_t string_entry_offset) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100388 uint32_t method1_offset =
Vladimir Marko0c737df2016-08-01 16:33:16 +0100389 kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100390 ASSERT_LT(method1_offset, adrp_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000391 CHECK_ALIGNED(adrp_offset, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100392 uint32_t num_nops = (adrp_offset - method1_offset) / 4u;
Vladimir Marko5f078202017-05-18 13:32:53 +0100393 PrepareNopsAdrpInsn2Ldr(num_nops, insn2, bss_begin, string_entry_offset);
394 uint32_t target_offset = bss_begin_ + string_entry_offset;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100395 if (has_thunk) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000396 TestNopsAdrpInsn2AndUseHasThunk(num_nops, insn2, target_offset, kLdrWInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100397 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000398 TestNopsAdrpInsn2AndUse(num_nops, insn2, target_offset, kLdrWInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100399 }
400 ASSERT_EQ(method1_offset, GetMethodOffset(1u)); // If this fails, num_nops is wrong.
401 }
Matteo Franchin97e2f262015-04-02 15:49:06 +0100402
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000403 void TestAdrpLdurLdr(uint32_t adrp_offset,
404 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100405 uint32_t bss_begin,
406 uint32_t string_entry_offset) {
407 TestAdrpInsn2Ldr(kLdurInsn, adrp_offset, has_thunk, bss_begin, string_entry_offset);
Matteo Franchin97e2f262015-04-02 15:49:06 +0100408 }
409
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000410 void TestAdrpLdrPcRelLdr(uint32_t pcrel_ldr_insn,
411 int32_t pcrel_disp,
412 uint32_t adrp_offset,
413 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100414 uint32_t bss_begin,
415 uint32_t string_entry_offset) {
Matteo Franchin97e2f262015-04-02 15:49:06 +0100416 ASSERT_LT(pcrel_disp, 0x100000);
417 ASSERT_GE(pcrel_disp, -0x100000);
418 ASSERT_EQ(pcrel_disp & 0x3, 0);
419 uint32_t insn2 = pcrel_ldr_insn | (((static_cast<uint32_t>(pcrel_disp) >> 2) & 0x7ffffu) << 5);
Vladimir Marko5f078202017-05-18 13:32:53 +0100420 TestAdrpInsn2Ldr(insn2, adrp_offset, has_thunk, bss_begin, string_entry_offset);
Matteo Franchin97e2f262015-04-02 15:49:06 +0100421 }
422
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000423 void TestAdrpLdrSpRelLdr(uint32_t sprel_ldr_insn,
424 uint32_t sprel_disp_in_load_units,
425 uint32_t adrp_offset,
426 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100427 uint32_t bss_begin,
428 uint32_t string_entry_offset) {
Matteo Franchin97e2f262015-04-02 15:49:06 +0100429 ASSERT_LT(sprel_disp_in_load_units, 0x1000u);
430 uint32_t insn2 = sprel_ldr_insn | ((sprel_disp_in_load_units & 0xfffu) << 10);
Vladimir Marko5f078202017-05-18 13:32:53 +0100431 TestAdrpInsn2Ldr(insn2, adrp_offset, has_thunk, bss_begin, string_entry_offset);
Matteo Franchin97e2f262015-04-02 15:49:06 +0100432 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000433
434 void TestAdrpInsn2Add(uint32_t insn2,
435 uint32_t adrp_offset,
436 bool has_thunk,
437 uint32_t string_offset) {
438 uint32_t method1_offset =
Vladimir Marko0c737df2016-08-01 16:33:16 +0100439 kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000440 ASSERT_LT(method1_offset, adrp_offset);
441 CHECK_ALIGNED(adrp_offset, 4u);
442 uint32_t num_nops = (adrp_offset - method1_offset) / 4u;
443 PrepareNopsAdrpInsn2Add(num_nops, insn2, string_offset);
444 if (has_thunk) {
445 TestNopsAdrpInsn2AndUseHasThunk(num_nops, insn2, string_offset, kAddXInsn);
446 } else {
447 TestNopsAdrpInsn2AndUse(num_nops, insn2, string_offset, kAddXInsn);
448 }
449 ASSERT_EQ(method1_offset, GetMethodOffset(1u)); // If this fails, num_nops is wrong.
450 }
451
452 void TestAdrpLdurAdd(uint32_t adrp_offset, bool has_thunk, uint32_t string_offset) {
453 TestAdrpInsn2Add(kLdurInsn, adrp_offset, has_thunk, string_offset);
454 }
455
456 void TestAdrpLdrPcRelAdd(uint32_t pcrel_ldr_insn,
457 int32_t pcrel_disp,
458 uint32_t adrp_offset,
459 bool has_thunk,
460 uint32_t string_offset) {
461 ASSERT_LT(pcrel_disp, 0x100000);
462 ASSERT_GE(pcrel_disp, -0x100000);
463 ASSERT_EQ(pcrel_disp & 0x3, 0);
464 uint32_t insn2 = pcrel_ldr_insn | (((static_cast<uint32_t>(pcrel_disp) >> 2) & 0x7ffffu) << 5);
465 TestAdrpInsn2Add(insn2, adrp_offset, has_thunk, string_offset);
466 }
467
468 void TestAdrpLdrSpRelAdd(uint32_t sprel_ldr_insn,
469 uint32_t sprel_disp_in_load_units,
470 uint32_t adrp_offset,
471 bool has_thunk,
472 uint32_t string_offset) {
473 ASSERT_LT(sprel_disp_in_load_units, 0x1000u);
474 uint32_t insn2 = sprel_ldr_insn | ((sprel_disp_in_load_units & 0xfffu) << 10);
475 TestAdrpInsn2Add(insn2, adrp_offset, has_thunk, string_offset);
476 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000477
478 std::vector<uint8_t> CompileBakerOffsetThunk(uint32_t base_reg, uint32_t holder_reg) {
479 const LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch(
480 0u, Arm64RelativePatcher::EncodeBakerReadBarrierFieldData(base_reg, holder_reg));
Vladimir Marko0a51fc32017-05-02 13:12:02 +0100481 ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetBakerThunkKey(patch);
482 return down_cast<Arm64RelativePatcher*>(patcher_.get())->CompileThunk(key);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000483 }
484
Vladimir Marko66d691d2017-04-07 17:53:39 +0100485 std::vector<uint8_t> CompileBakerArrayThunk(uint32_t base_reg) {
486 LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch(
487 0u, Arm64RelativePatcher::EncodeBakerReadBarrierArrayData(base_reg));
Vladimir Marko0a51fc32017-05-02 13:12:02 +0100488 ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetBakerThunkKey(patch);
489 return down_cast<Arm64RelativePatcher*>(patcher_.get())->CompileThunk(key);
Vladimir Marko66d691d2017-04-07 17:53:39 +0100490 }
491
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000492 std::vector<uint8_t> CompileBakerGcRootThunk(uint32_t root_reg) {
493 LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch(
494 0u, Arm64RelativePatcher::EncodeBakerReadBarrierGcRootData(root_reg));
Vladimir Marko0a51fc32017-05-02 13:12:02 +0100495 ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetBakerThunkKey(patch);
496 return down_cast<Arm64RelativePatcher*>(patcher_.get())->CompileThunk(key);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000497 }
498
499 uint32_t GetOutputInsn(uint32_t offset) {
500 CHECK_LE(offset, output_.size());
501 CHECK_GE(output_.size() - offset, 4u);
502 return (static_cast<uint32_t>(output_[offset]) << 0) |
503 (static_cast<uint32_t>(output_[offset + 1]) << 8) |
504 (static_cast<uint32_t>(output_[offset + 2]) << 16) |
505 (static_cast<uint32_t>(output_[offset + 3]) << 24);
506 }
507
Vladimir Marko66d691d2017-04-07 17:53:39 +0100508 void TestBakerField(uint32_t offset, uint32_t ref_reg);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100509};
510
511const uint8_t Arm64RelativePatcherTest::kCallRawCode[] = {
512 0x00, 0x00, 0x00, 0x94
513};
514
515const ArrayRef<const uint8_t> Arm64RelativePatcherTest::kCallCode(kCallRawCode);
516
517const uint8_t Arm64RelativePatcherTest::kNopRawCode[] = {
518 0x1f, 0x20, 0x03, 0xd5
519};
520
521const ArrayRef<const uint8_t> Arm64RelativePatcherTest::kNopCode(kNopRawCode);
522
523class Arm64RelativePatcherTestDefault : public Arm64RelativePatcherTest {
524 public:
525 Arm64RelativePatcherTestDefault() : Arm64RelativePatcherTest("default") { }
526};
527
528class Arm64RelativePatcherTestDenver64 : public Arm64RelativePatcherTest {
529 public:
530 Arm64RelativePatcherTestDenver64() : Arm64RelativePatcherTest("denver64") { }
531};
532
533TEST_F(Arm64RelativePatcherTestDefault, CallSelf) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000534 const LinkerPatch patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100535 LinkerPatch::RelativeCodePatch(0u, nullptr, 1u),
536 };
Vladimir Markob207e142015-04-02 21:25:21 +0100537 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100538 Link();
539
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000540 const std::vector<uint8_t> expected_code = RawCode({kBlPlus0});
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100541 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
542}
543
544TEST_F(Arm64RelativePatcherTestDefault, CallOther) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000545 const LinkerPatch method1_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100546 LinkerPatch::RelativeCodePatch(0u, nullptr, 2u),
547 };
Vladimir Markob207e142015-04-02 21:25:21 +0100548 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(method1_patches));
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000549 const LinkerPatch method2_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100550 LinkerPatch::RelativeCodePatch(0u, nullptr, 1u),
551 };
Vladimir Markob207e142015-04-02 21:25:21 +0100552 AddCompiledMethod(MethodRef(2u), kCallCode, ArrayRef<const LinkerPatch>(method2_patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100553 Link();
554
555 uint32_t method1_offset = GetMethodOffset(1u);
556 uint32_t method2_offset = GetMethodOffset(2u);
557 uint32_t diff_after = method2_offset - method1_offset;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000558 CHECK_ALIGNED(diff_after, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100559 ASSERT_LT(diff_after >> 2, 1u << 8); // Simple encoding, (diff_after >> 2) fits into 8 bits.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000560 const std::vector<uint8_t> method1_expected_code = RawCode({kBlPlus0 + (diff_after >> 2)});
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100561 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(method1_expected_code)));
562 uint32_t diff_before = method1_offset - method2_offset;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000563 CHECK_ALIGNED(diff_before, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100564 ASSERT_GE(diff_before, -1u << 27);
565 auto method2_expected_code = GenNopsAndBl(0u, kBlPlus0 | ((diff_before >> 2) & 0x03ffffffu));
566 EXPECT_TRUE(CheckLinkedMethod(MethodRef(2u), ArrayRef<const uint8_t>(method2_expected_code)));
567}
568
569TEST_F(Arm64RelativePatcherTestDefault, CallTrampoline) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000570 const LinkerPatch patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100571 LinkerPatch::RelativeCodePatch(0u, nullptr, 2u),
572 };
Vladimir Markob207e142015-04-02 21:25:21 +0100573 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100574 Link();
575
576 uint32_t method1_offset = GetMethodOffset(1u);
577 uint32_t diff = kTrampolineOffset - method1_offset;
578 ASSERT_EQ(diff & 1u, 0u);
579 ASSERT_GE(diff, -1u << 9); // Simple encoding, -256 <= (diff >> 1) < 0 (checked as unsigned).
580 auto expected_code = GenNopsAndBl(0u, kBlPlus0 | ((diff >> 2) & 0x03ffffffu));
581 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
582}
583
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000584TEST_F(Arm64RelativePatcherTestDefault, CallTrampolineTooFar) {
585 constexpr uint32_t missing_method_index = 1024u;
586 auto last_method_raw_code = GenNopsAndBl(1u, kBlPlus0);
587 constexpr uint32_t bl_offset_in_last_method = 1u * 4u; // After NOPs.
588 ArrayRef<const uint8_t> last_method_code(last_method_raw_code);
589 ASSERT_EQ(bl_offset_in_last_method + 4u, last_method_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000590 const LinkerPatch last_method_patches[] = {
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000591 LinkerPatch::RelativeCodePatch(bl_offset_in_last_method, nullptr, missing_method_index),
592 };
593
594 constexpr uint32_t just_over_max_negative_disp = 128 * MB + 4;
595 uint32_t last_method_idx = Create2MethodsWithGap(
596 kNopCode, ArrayRef<const LinkerPatch>(), last_method_code,
597 ArrayRef<const LinkerPatch>(last_method_patches),
598 just_over_max_negative_disp - bl_offset_in_last_method);
599 uint32_t method1_offset = GetMethodOffset(1u);
600 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
601 ASSERT_EQ(method1_offset,
602 last_method_offset + bl_offset_in_last_method - just_over_max_negative_disp);
603 ASSERT_FALSE(method_offset_map_.FindMethodOffset(MethodRef(missing_method_index)).first);
604
605 // Check linked code.
606 uint32_t thunk_offset =
Vladimir Marko33bff252017-11-01 14:35:42 +0000607 CompiledCode::AlignCode(last_method_offset + last_method_code.size(), InstructionSet::kArm64);
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000608 uint32_t diff = thunk_offset - (last_method_offset + bl_offset_in_last_method);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000609 CHECK_ALIGNED(diff, 4u);
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000610 ASSERT_LT(diff, 128 * MB);
611 auto expected_code = GenNopsAndBl(1u, kBlPlus0 | (diff >> 2));
612 EXPECT_TRUE(CheckLinkedMethod(MethodRef(last_method_idx),
613 ArrayRef<const uint8_t>(expected_code)));
614 EXPECT_TRUE(CheckThunk(thunk_offset));
615}
616
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100617TEST_F(Arm64RelativePatcherTestDefault, CallOtherAlmostTooFarAfter) {
618 auto method1_raw_code = GenNopsAndBl(1u, kBlPlus0);
619 constexpr uint32_t bl_offset_in_method1 = 1u * 4u; // After NOPs.
620 ArrayRef<const uint8_t> method1_code(method1_raw_code);
621 ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size());
622 uint32_t expected_last_method_idx = 65; // Based on 2MiB chunks in Create2MethodsWithGap().
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000623 const LinkerPatch method1_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100624 LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, expected_last_method_idx),
625 };
626
627 constexpr uint32_t max_positive_disp = 128 * MB - 4u;
Vladimir Marko345f93e2015-07-14 18:58:59 +0100628 uint32_t last_method_idx = Create2MethodsWithGap(method1_code,
629 ArrayRef<const LinkerPatch>(method1_patches),
630 kNopCode,
631 ArrayRef<const LinkerPatch>(),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100632 bl_offset_in_method1 + max_positive_disp);
633 ASSERT_EQ(expected_last_method_idx, last_method_idx);
634
635 uint32_t method1_offset = GetMethodOffset(1u);
636 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
637 ASSERT_EQ(method1_offset + bl_offset_in_method1 + max_positive_disp, last_method_offset);
638
639 // Check linked code.
640 auto expected_code = GenNopsAndBl(1u, kBlPlusMax);
641 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
642}
643
644TEST_F(Arm64RelativePatcherTestDefault, CallOtherAlmostTooFarBefore) {
645 auto last_method_raw_code = GenNopsAndBl(0u, kBlPlus0);
646 constexpr uint32_t bl_offset_in_last_method = 0u * 4u; // After NOPs.
647 ArrayRef<const uint8_t> last_method_code(last_method_raw_code);
648 ASSERT_EQ(bl_offset_in_last_method + 4u, last_method_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000649 const LinkerPatch last_method_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100650 LinkerPatch::RelativeCodePatch(bl_offset_in_last_method, nullptr, 1u),
651 };
652
653 constexpr uint32_t max_negative_disp = 128 * MB;
Vladimir Marko345f93e2015-07-14 18:58:59 +0100654 uint32_t last_method_idx = Create2MethodsWithGap(kNopCode,
655 ArrayRef<const LinkerPatch>(),
656 last_method_code,
657 ArrayRef<const LinkerPatch>(last_method_patches),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100658 max_negative_disp - bl_offset_in_last_method);
659 uint32_t method1_offset = GetMethodOffset(1u);
660 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
661 ASSERT_EQ(method1_offset, last_method_offset + bl_offset_in_last_method - max_negative_disp);
662
663 // Check linked code.
664 auto expected_code = GenNopsAndBl(0u, kBlMinusMax);
665 EXPECT_TRUE(CheckLinkedMethod(MethodRef(last_method_idx),
666 ArrayRef<const uint8_t>(expected_code)));
667}
668
669TEST_F(Arm64RelativePatcherTestDefault, CallOtherJustTooFarAfter) {
670 auto method1_raw_code = GenNopsAndBl(0u, kBlPlus0);
671 constexpr uint32_t bl_offset_in_method1 = 0u * 4u; // After NOPs.
672 ArrayRef<const uint8_t> method1_code(method1_raw_code);
673 ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size());
674 uint32_t expected_last_method_idx = 65; // Based on 2MiB chunks in Create2MethodsWithGap().
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000675 const LinkerPatch method1_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100676 LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, expected_last_method_idx),
677 };
678
679 constexpr uint32_t just_over_max_positive_disp = 128 * MB;
680 uint32_t last_method_idx = Create2MethodsWithGap(
Vladimir Marko345f93e2015-07-14 18:58:59 +0100681 method1_code,
682 ArrayRef<const LinkerPatch>(method1_patches),
683 kNopCode,
684 ArrayRef<const LinkerPatch>(),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100685 bl_offset_in_method1 + just_over_max_positive_disp);
686 ASSERT_EQ(expected_last_method_idx, last_method_idx);
687
688 uint32_t method1_offset = GetMethodOffset(1u);
689 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
Vladimir Marko0c737df2016-08-01 16:33:16 +0100690 ASSERT_TRUE(IsAligned<kArm64Alignment>(last_method_offset));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100691 uint32_t last_method_header_offset = last_method_offset - sizeof(OatQuickMethodHeader);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000692 uint32_t thunk_size = MethodCallThunkSize();
Vladimir Marko33bff252017-11-01 14:35:42 +0000693 uint32_t thunk_offset = RoundDown(last_method_header_offset - thunk_size, kArm64Alignment);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000694 DCHECK_EQ(thunk_offset + thunk_size + CodeAlignmentSize(thunk_offset + thunk_size),
Vladimir Marko0c737df2016-08-01 16:33:16 +0100695 last_method_header_offset);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100696 uint32_t diff = thunk_offset - (method1_offset + bl_offset_in_method1);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000697 CHECK_ALIGNED(diff, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100698 ASSERT_LT(diff, 128 * MB);
699 auto expected_code = GenNopsAndBl(0u, kBlPlus0 | (diff >> 2));
700 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
701 CheckThunk(thunk_offset);
702}
703
704TEST_F(Arm64RelativePatcherTestDefault, CallOtherJustTooFarBefore) {
705 auto last_method_raw_code = GenNopsAndBl(1u, kBlPlus0);
706 constexpr uint32_t bl_offset_in_last_method = 1u * 4u; // After NOPs.
707 ArrayRef<const uint8_t> last_method_code(last_method_raw_code);
708 ASSERT_EQ(bl_offset_in_last_method + 4u, last_method_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000709 const LinkerPatch last_method_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100710 LinkerPatch::RelativeCodePatch(bl_offset_in_last_method, nullptr, 1u),
711 };
712
713 constexpr uint32_t just_over_max_negative_disp = 128 * MB + 4;
714 uint32_t last_method_idx = Create2MethodsWithGap(
Vladimir Marko345f93e2015-07-14 18:58:59 +0100715 kNopCode, ArrayRef<const LinkerPatch>(), last_method_code,
716 ArrayRef<const LinkerPatch>(last_method_patches),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100717 just_over_max_negative_disp - bl_offset_in_last_method);
718 uint32_t method1_offset = GetMethodOffset(1u);
719 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
720 ASSERT_EQ(method1_offset,
721 last_method_offset + bl_offset_in_last_method - just_over_max_negative_disp);
722
723 // Check linked code.
724 uint32_t thunk_offset =
Vladimir Marko33bff252017-11-01 14:35:42 +0000725 CompiledCode::AlignCode(last_method_offset + last_method_code.size(), InstructionSet::kArm64);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100726 uint32_t diff = thunk_offset - (last_method_offset + bl_offset_in_last_method);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000727 CHECK_ALIGNED(diff, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100728 ASSERT_LT(diff, 128 * MB);
729 auto expected_code = GenNopsAndBl(1u, kBlPlus0 | (diff >> 2));
730 EXPECT_TRUE(CheckLinkedMethod(MethodRef(last_method_idx),
731 ArrayRef<const uint8_t>(expected_code)));
732 EXPECT_TRUE(CheckThunk(thunk_offset));
733}
734
Vladimir Marko5f078202017-05-18 13:32:53 +0100735TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry1) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100736 TestNopsAdrpLdr(0u, 0x12345678u, 0x1234u);
737}
738
Vladimir Marko5f078202017-05-18 13:32:53 +0100739TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry2) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100740 TestNopsAdrpLdr(0u, -0x12345678u, 0x4444u);
741}
742
Vladimir Marko5f078202017-05-18 13:32:53 +0100743TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry3) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100744 TestNopsAdrpLdr(0u, 0x12345000u, 0x3ffcu);
745}
746
Vladimir Marko5f078202017-05-18 13:32:53 +0100747TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry4) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100748 TestNopsAdrpLdr(0u, 0x12345000u, 0x4000u);
749}
750
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000751TEST_F(Arm64RelativePatcherTestDefault, StringReference1) {
752 TestNopsAdrpAdd(0u, 0x12345678u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100753}
754
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000755TEST_F(Arm64RelativePatcherTestDefault, StringReference2) {
756 TestNopsAdrpAdd(0u, -0x12345678u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100757}
758
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000759TEST_F(Arm64RelativePatcherTestDefault, StringReference3) {
760 TestNopsAdrpAdd(0u, 0x12345000u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100761}
762
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000763TEST_F(Arm64RelativePatcherTestDefault, StringReference4) {
764 TestNopsAdrpAdd(0u, 0x12345ffcu);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100765}
766
Matteo Franchin97e2f262015-04-02 15:49:06 +0100767#define TEST_FOR_OFFSETS(test, disp1, disp2) \
768 test(0xff4u, disp1) test(0xff8u, disp1) test(0xffcu, disp1) test(0x1000u, disp1) \
769 test(0xff4u, disp2) test(0xff8u, disp2) test(0xffcu, disp2) test(0x1000u, disp2)
770
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000771#define DEFAULT_LDUR_LDR_TEST(adrp_offset, disp) \
Vladimir Marko5f078202017-05-18 13:32:53 +0100772 TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry ## adrp_offset ## Ldur ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700773 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu); \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000774 TestAdrpLdurLdr(adrp_offset, has_thunk, 0x12345678u, disp); \
775 }
776
777TEST_FOR_OFFSETS(DEFAULT_LDUR_LDR_TEST, 0x1234, 0x1238)
778
779#define DENVER64_LDUR_LDR_TEST(adrp_offset, disp) \
Vladimir Marko5f078202017-05-18 13:32:53 +0100780 TEST_F(Arm64RelativePatcherTestDenver64, StringBssEntry ## adrp_offset ## Ldur ## disp) { \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000781 TestAdrpLdurLdr(adrp_offset, false, 0x12345678u, disp); \
782 }
783
784TEST_FOR_OFFSETS(DENVER64_LDUR_LDR_TEST, 0x1234, 0x1238)
785
Matteo Franchin97e2f262015-04-02 15:49:06 +0100786// LDR <Wt>, <label> is always aligned. We should never have to use a fixup.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000787#define LDRW_PCREL_LDR_TEST(adrp_offset, disp) \
Vladimir Marko5f078202017-05-18 13:32:53 +0100788 TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry ## adrp_offset ## WPcRel ## disp) { \
Matteo Franchin97e2f262015-04-02 15:49:06 +0100789 TestAdrpLdrPcRelLdr(kLdrWPcRelInsn, disp, adrp_offset, false, 0x12345678u, 0x1234u); \
790 }
791
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000792TEST_FOR_OFFSETS(LDRW_PCREL_LDR_TEST, 0x1234, 0x1238)
Matteo Franchin97e2f262015-04-02 15:49:06 +0100793
794// LDR <Xt>, <label> is aligned when offset + displacement is a multiple of 8.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000795#define LDRX_PCREL_LDR_TEST(adrp_offset, disp) \
Vladimir Marko5f078202017-05-18 13:32:53 +0100796 TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry ## adrp_offset ## XPcRel ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700797 bool unaligned = !IsAligned<8u>((adrp_offset) + 4u + static_cast<uint32_t>(disp)); \
798 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu) && unaligned; \
Matteo Franchin97e2f262015-04-02 15:49:06 +0100799 TestAdrpLdrPcRelLdr(kLdrXPcRelInsn, disp, adrp_offset, has_thunk, 0x12345678u, 0x1234u); \
800 }
801
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000802TEST_FOR_OFFSETS(LDRX_PCREL_LDR_TEST, 0x1234, 0x1238)
Matteo Franchin97e2f262015-04-02 15:49:06 +0100803
804// LDR <Wt>, [SP, #<pimm>] and LDR <Xt>, [SP, #<pimm>] are always aligned. No fixup needed.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000805#define LDRW_SPREL_LDR_TEST(adrp_offset, disp) \
Vladimir Marko5f078202017-05-18 13:32:53 +0100806 TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry ## adrp_offset ## WSpRel ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700807 TestAdrpLdrSpRelLdr(kLdrWSpRelInsn, (disp) >> 2, adrp_offset, false, 0x12345678u, 0x1234u); \
Matteo Franchin97e2f262015-04-02 15:49:06 +0100808 }
809
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000810TEST_FOR_OFFSETS(LDRW_SPREL_LDR_TEST, 0, 4)
Matteo Franchin97e2f262015-04-02 15:49:06 +0100811
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000812#define LDRX_SPREL_LDR_TEST(adrp_offset, disp) \
Vladimir Marko5f078202017-05-18 13:32:53 +0100813 TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry ## adrp_offset ## XSpRel ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700814 TestAdrpLdrSpRelLdr(kLdrXSpRelInsn, (disp) >> 3, adrp_offset, false, 0x12345678u, 0x1234u); \
Matteo Franchin97e2f262015-04-02 15:49:06 +0100815 }
816
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000817TEST_FOR_OFFSETS(LDRX_SPREL_LDR_TEST, 0, 8)
818
819#define DEFAULT_LDUR_ADD_TEST(adrp_offset, disp) \
820 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## Ldur ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700821 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu); \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000822 TestAdrpLdurAdd(adrp_offset, has_thunk, disp); \
823 }
824
825TEST_FOR_OFFSETS(DEFAULT_LDUR_ADD_TEST, 0x12345678, 0xffffc840)
826
827#define DENVER64_LDUR_ADD_TEST(adrp_offset, disp) \
828 TEST_F(Arm64RelativePatcherTestDenver64, StringReference ## adrp_offset ## Ldur ## disp) { \
829 TestAdrpLdurAdd(adrp_offset, false, disp); \
830 }
831
832TEST_FOR_OFFSETS(DENVER64_LDUR_ADD_TEST, 0x12345678, 0xffffc840)
833
834#define DEFAULT_SUBX3X2_ADD_TEST(adrp_offset, disp) \
835 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## SubX3X2 ## disp) { \
836 /* SUB unrelated to "ADRP x0, addr". */ \
837 uint32_t sub = kSubXInsn | (100 << 10) | (2u << 5) | 3u; /* SUB x3, x2, #100 */ \
838 TestAdrpInsn2Add(sub, adrp_offset, false, disp); \
839 }
840
841TEST_FOR_OFFSETS(DEFAULT_SUBX3X2_ADD_TEST, 0x12345678, 0xffffc840)
842
843#define DEFAULT_SUBSX3X0_ADD_TEST(adrp_offset, disp) \
844 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## SubsX3X0 ## disp) { \
845 /* SUBS that uses the result of "ADRP x0, addr". */ \
846 uint32_t subs = kSubsXInsn | (100 << 10) | (0u << 5) | 3u; /* SUBS x3, x0, #100 */ \
847 TestAdrpInsn2Add(subs, adrp_offset, false, disp); \
848 }
849
850TEST_FOR_OFFSETS(DEFAULT_SUBSX3X0_ADD_TEST, 0x12345678, 0xffffc840)
851
852#define DEFAULT_ADDX0X0_ADD_TEST(adrp_offset, disp) \
853 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## AddX0X0 ## disp) { \
854 /* ADD that uses the result register of "ADRP x0, addr" as both source and destination. */ \
855 uint32_t add = kSubXInsn | (100 << 10) | (0u << 5) | 0u; /* ADD x0, x0, #100 */ \
856 TestAdrpInsn2Add(add, adrp_offset, false, disp); \
857 }
858
859TEST_FOR_OFFSETS(DEFAULT_ADDX0X0_ADD_TEST, 0x12345678, 0xffffc840)
860
861#define DEFAULT_ADDSX0X2_ADD_TEST(adrp_offset, disp) \
862 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## AddsX0X2 ## disp) { \
863 /* ADDS that does not use the result of "ADRP x0, addr" but overwrites that register. */ \
864 uint32_t adds = kAddsXInsn | (100 << 10) | (2u << 5) | 0u; /* ADDS x0, x2, #100 */ \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700865 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu); \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000866 TestAdrpInsn2Add(adds, adrp_offset, has_thunk, disp); \
867 }
868
869TEST_FOR_OFFSETS(DEFAULT_ADDSX0X2_ADD_TEST, 0x12345678, 0xffffc840)
870
871// LDR <Wt>, <label> is always aligned. We should never have to use a fixup.
872#define LDRW_PCREL_ADD_TEST(adrp_offset, disp) \
873 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## WPcRel ## disp) { \
874 TestAdrpLdrPcRelAdd(kLdrWPcRelInsn, disp, adrp_offset, false, 0x12345678u); \
875 }
876
877TEST_FOR_OFFSETS(LDRW_PCREL_ADD_TEST, 0x1234, 0x1238)
878
879// LDR <Xt>, <label> is aligned when offset + displacement is a multiple of 8.
880#define LDRX_PCREL_ADD_TEST(adrp_offset, disp) \
881 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## XPcRel ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700882 bool unaligned = !IsAligned<8u>((adrp_offset) + 4u + static_cast<uint32_t>(disp)); \
883 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu) && unaligned; \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000884 TestAdrpLdrPcRelAdd(kLdrXPcRelInsn, disp, adrp_offset, has_thunk, 0x12345678u); \
885 }
886
887TEST_FOR_OFFSETS(LDRX_PCREL_ADD_TEST, 0x1234, 0x1238)
888
889// LDR <Wt>, [SP, #<pimm>] and LDR <Xt>, [SP, #<pimm>] are always aligned. No fixup needed.
890#define LDRW_SPREL_ADD_TEST(adrp_offset, disp) \
891 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## WSpRel ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700892 TestAdrpLdrSpRelAdd(kLdrWSpRelInsn, (disp) >> 2, adrp_offset, false, 0x12345678u); \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000893 }
894
895TEST_FOR_OFFSETS(LDRW_SPREL_ADD_TEST, 0, 4)
896
897#define LDRX_SPREL_ADD_TEST(adrp_offset, disp) \
898 TEST_F(Arm64RelativePatcherTestDefault, StringReference ## adrp_offset ## XSpRel ## disp) { \
Chih-Hung Hsieh1a0de6a2016-08-26 15:06:11 -0700899 TestAdrpLdrSpRelAdd(kLdrXSpRelInsn, (disp) >> 3, adrp_offset, false, 0x12345678u); \
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000900 }
901
902TEST_FOR_OFFSETS(LDRX_SPREL_ADD_TEST, 0, 8)
Matteo Franchin97e2f262015-04-02 15:49:06 +0100903
Vladimir Marko66d691d2017-04-07 17:53:39 +0100904void Arm64RelativePatcherTest::TestBakerField(uint32_t offset, uint32_t ref_reg) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000905 uint32_t valid_regs[] = {
906 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
907 10, 11, 12, 13, 14, 15, 18, 19, // IP0 and IP1 are reserved.
908 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
909 // LR and SP/ZR are reserved.
910 };
911 DCHECK_ALIGNED(offset, 4u);
912 DCHECK_LT(offset, 16 * KB);
913 constexpr size_t kMethodCodeSize = 8u;
914 constexpr size_t kLiteralOffset = 0u;
915 uint32_t method_idx = 0u;
916 for (uint32_t base_reg : valid_regs) {
917 for (uint32_t holder_reg : valid_regs) {
Vladimir Marko66d691d2017-04-07 17:53:39 +0100918 uint32_t ldr = kLdrWInsn | (offset << (10 - 2)) | (base_reg << 5) | ref_reg;
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000919 const std::vector<uint8_t> raw_code = RawCode({kCbnzIP1Plus0Insn, ldr});
920 ASSERT_EQ(kMethodCodeSize, raw_code.size());
921 ArrayRef<const uint8_t> code(raw_code);
922 uint32_t encoded_data =
923 Arm64RelativePatcher::EncodeBakerReadBarrierFieldData(base_reg, holder_reg);
924 const LinkerPatch patches[] = {
925 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset, encoded_data),
926 };
927 ++method_idx;
928 AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches));
929 }
930 }
931 Link();
932
933 // All thunks are at the end.
934 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment);
935 method_idx = 0u;
936 for (uint32_t base_reg : valid_regs) {
937 for (uint32_t holder_reg : valid_regs) {
938 ++method_idx;
939 uint32_t cbnz_offset = thunk_offset - (GetMethodOffset(method_idx) + kLiteralOffset);
940 uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
Vladimir Marko66d691d2017-04-07 17:53:39 +0100941 uint32_t ldr = kLdrWInsn | (offset << (10 - 2)) | (base_reg << 5) | ref_reg;
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000942 const std::vector<uint8_t> expected_code = RawCode({cbnz, ldr});
943 ASSERT_EQ(kMethodCodeSize, expected_code.size());
944 ASSERT_TRUE(
945 CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code)));
946
947 std::vector<uint8_t> expected_thunk = CompileBakerOffsetThunk(base_reg, holder_reg);
948 ASSERT_GT(output_.size(), thunk_offset);
949 ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size());
950 ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset,
951 expected_thunk.size());
952 if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) {
953 DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk);
954 ASSERT_TRUE(false);
955 }
956
957 size_t gray_check_offset = thunk_offset;
958 if (holder_reg == base_reg) {
959 // Verify that the null-check CBZ uses the correct register, i.e. holder_reg.
960 ASSERT_GE(output_.size() - gray_check_offset, 4u);
Vladimir Marko66d691d2017-04-07 17:53:39 +0100961 ASSERT_EQ(0x34000000u | holder_reg, GetOutputInsn(thunk_offset) & 0xff00001fu);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000962 gray_check_offset +=4u;
963 }
964 // Verify that the lock word for gray bit check is loaded from the holder address.
965 static constexpr size_t kGrayCheckInsns = 5;
966 ASSERT_GE(output_.size() - gray_check_offset, 4u * kGrayCheckInsns);
967 const uint32_t load_lock_word =
968 kLdrWInsn |
969 (mirror::Object::MonitorOffset().Uint32Value() << (10 - 2)) |
970 (holder_reg << 5) |
971 /* ip0 */ 16;
972 EXPECT_EQ(load_lock_word, GetOutputInsn(gray_check_offset));
973 // Verify the gray bit check.
Vladimir Marko66d691d2017-04-07 17:53:39 +0100974 const uint32_t check_gray_bit_without_offset =
975 0x37000000u | (LockWord::kReadBarrierStateShift << 19) | /* ip0 */ 16;
976 EXPECT_EQ(check_gray_bit_without_offset, GetOutputInsn(gray_check_offset + 4u) & 0xfff8001fu);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000977 // Verify the fake dependency.
978 const uint32_t fake_dependency =
Vladimir Marko66d691d2017-04-07 17:53:39 +0100979 0x8b408000u | // ADD Xd, Xn, Xm, LSR 32
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000980 (/* ip0 */ 16 << 16) | // Xm = ip0
981 (base_reg << 5) | // Xn = base_reg
982 base_reg; // Xd = base_reg
983 EXPECT_EQ(fake_dependency, GetOutputInsn(gray_check_offset + 12u));
984 // Do not check the rest of the implementation.
985
986 // The next thunk follows on the next aligned offset.
987 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment);
988 }
989 }
990}
991
Vladimir Marko66d691d2017-04-07 17:53:39 +0100992#define TEST_BAKER_FIELD(offset, ref_reg) \
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000993 TEST_F(Arm64RelativePatcherTestDefault, \
Vladimir Marko66d691d2017-04-07 17:53:39 +0100994 BakerOffset##offset##_##ref_reg) { \
995 TestBakerField(offset, ref_reg); \
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000996 }
997
Vladimir Marko66d691d2017-04-07 17:53:39 +0100998TEST_BAKER_FIELD(/* offset */ 0, /* ref_reg */ 0)
999TEST_BAKER_FIELD(/* offset */ 8, /* ref_reg */ 15)
1000TEST_BAKER_FIELD(/* offset */ 0x3ffc, /* ref_reg */ 29)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001001
1002TEST_F(Arm64RelativePatcherTestDefault, BakerOffsetThunkInTheMiddle) {
1003 // One thunk in the middle with maximum distance branches to it from both sides.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001004 // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001005 constexpr uint32_t kLiteralOffset1 = 4;
1006 const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kCbnzIP1Plus0Insn, kLdrWInsn});
1007 ArrayRef<const uint8_t> code1(raw_code1);
1008 uint32_t encoded_data =
1009 Arm64RelativePatcher::EncodeBakerReadBarrierFieldData(/* base_reg */ 0, /* holder_reg */ 0);
1010 const LinkerPatch patches1[] = {
1011 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data),
1012 };
1013 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1));
1014
1015 // Allow thunk at 1MiB offset from the start of the method above. Literal offset being 4
1016 // allows the branch to reach that thunk.
1017 size_t filler1_size =
1018 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment);
1019 std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 4u);
1020 ArrayRef<const uint8_t> filler1_code(raw_filler1_code);
1021 AddCompiledMethod(MethodRef(2u), filler1_code);
1022
1023 // Enforce thunk reservation with a tiny method.
1024 AddCompiledMethod(MethodRef(3u), kNopCode);
1025
1026 // Allow reaching the thunk from the very beginning of a method 1MiB away. Backward branch
1027 // reaches the full 1MiB. Things to subtract:
1028 // - thunk size and method 3 pre-header, rounded up (padding in between if needed)
1029 // - method 3 code and method 4 pre-header, rounded up (padding in between if needed)
1030 // - method 4 header (let there be no padding between method 4 code and method 5 pre-header).
1031 size_t thunk_size = CompileBakerOffsetThunk(/* base_reg */ 0, /* holder_reg */ 0).size();
1032 size_t filler2_size =
1033 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment)
1034 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment)
1035 - sizeof(OatQuickMethodHeader);
1036 std::vector<uint8_t> raw_filler2_code = GenNops(filler2_size / 4u);
1037 ArrayRef<const uint8_t> filler2_code(raw_filler2_code);
1038 AddCompiledMethod(MethodRef(4u), filler2_code);
1039
1040 constexpr uint32_t kLiteralOffset2 = 0;
1041 const std::vector<uint8_t> raw_code2 = RawCode({kCbnzIP1Plus0Insn, kLdrWInsn});
1042 ArrayRef<const uint8_t> code2(raw_code2);
1043 const LinkerPatch patches2[] = {
1044 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset2, encoded_data),
1045 };
1046 AddCompiledMethod(MethodRef(5u), code2, ArrayRef<const LinkerPatch>(patches2));
1047
1048 Link();
1049
1050 uint32_t first_method_offset = GetMethodOffset(1u);
1051 uint32_t last_method_offset = GetMethodOffset(5u);
1052 EXPECT_EQ(2 * MB, last_method_offset - first_method_offset);
1053
1054 const uint32_t cbnz_max_forward = kCbnzIP1Plus0Insn | 0x007fffe0;
1055 const uint32_t cbnz_max_backward = kCbnzIP1Plus0Insn | 0x00800000;
1056 const std::vector<uint8_t> expected_code1 = RawCode({kNopInsn, cbnz_max_forward, kLdrWInsn});
1057 const std::vector<uint8_t> expected_code2 = RawCode({cbnz_max_backward, kLdrWInsn});
1058 ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1)));
1059 ASSERT_TRUE(CheckLinkedMethod(MethodRef(5), ArrayRef<const uint8_t>(expected_code2)));
1060}
1061
1062TEST_F(Arm64RelativePatcherTestDefault, BakerOffsetThunkBeforeFiller) {
1063 // Based on the first part of BakerOffsetThunkInTheMiddle but the CBNZ is one instruction
1064 // earlier, so the thunk is emitted before the filler.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001065 // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001066 constexpr uint32_t kLiteralOffset1 = 0;
1067 const std::vector<uint8_t> raw_code1 = RawCode({kCbnzIP1Plus0Insn, kLdrWInsn, kNopInsn});
1068 ArrayRef<const uint8_t> code1(raw_code1);
1069 uint32_t encoded_data =
1070 Arm64RelativePatcher::EncodeBakerReadBarrierFieldData(/* base_reg */ 0, /* holder_reg */ 0);
1071 const LinkerPatch patches1[] = {
1072 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data),
1073 };
1074 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1));
1075
1076 // Allow thunk at 1MiB offset from the start of the method above. Literal offset being 4
1077 // allows the branch to reach that thunk.
1078 size_t filler1_size =
1079 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment);
1080 std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 4u);
1081 ArrayRef<const uint8_t> filler1_code(raw_filler1_code);
1082 AddCompiledMethod(MethodRef(2u), filler1_code);
1083
1084 Link();
1085
1086 const uint32_t cbnz_offset = RoundUp(raw_code1.size(), kArm64Alignment) - kLiteralOffset1;
1087 const uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
1088 const std::vector<uint8_t> expected_code1 = RawCode({cbnz, kLdrWInsn, kNopInsn});
1089 ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1)));
1090}
1091
1092TEST_F(Arm64RelativePatcherTestDefault, BakerOffsetThunkInTheMiddleUnreachableFromLast) {
1093 // Based on the BakerOffsetThunkInTheMiddle but the CBNZ in the last method is preceded
1094 // by NOP and cannot reach the thunk in the middle, so we emit an extra thunk at the end.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001095 // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001096 constexpr uint32_t kLiteralOffset1 = 4;
1097 const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kCbnzIP1Plus0Insn, kLdrWInsn});
1098 ArrayRef<const uint8_t> code1(raw_code1);
1099 uint32_t encoded_data =
1100 Arm64RelativePatcher::EncodeBakerReadBarrierFieldData(/* base_reg */ 0, /* holder_reg */ 0);
1101 const LinkerPatch patches1[] = {
1102 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data),
1103 };
1104 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1));
1105
1106 // Allow thunk at 1MiB offset from the start of the method above. Literal offset being 4
1107 // allows the branch to reach that thunk.
1108 size_t filler1_size =
1109 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment);
1110 std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 4u);
1111 ArrayRef<const uint8_t> filler1_code(raw_filler1_code);
1112 AddCompiledMethod(MethodRef(2u), filler1_code);
1113
1114 // Enforce thunk reservation with a tiny method.
1115 AddCompiledMethod(MethodRef(3u), kNopCode);
1116
1117 // If not for the extra NOP, this would allow reaching the thunk from the very beginning
1118 // of a method 1MiB away. Backward branch reaches the full 1MiB. Things to subtract:
1119 // - thunk size and method 3 pre-header, rounded up (padding in between if needed)
1120 // - method 3 code and method 4 pre-header, rounded up (padding in between if needed)
1121 // - method 4 header (let there be no padding between method 4 code and method 5 pre-header).
1122 size_t thunk_size = CompileBakerOffsetThunk(/* base_reg */ 0, /* holder_reg */ 0).size();
1123 size_t filler2_size =
1124 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment)
1125 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment)
1126 - sizeof(OatQuickMethodHeader);
1127 std::vector<uint8_t> raw_filler2_code = GenNops(filler2_size / 4u);
1128 ArrayRef<const uint8_t> filler2_code(raw_filler2_code);
1129 AddCompiledMethod(MethodRef(4u), filler2_code);
1130
1131 // Extra NOP compared to BakerOffsetThunkInTheMiddle.
1132 constexpr uint32_t kLiteralOffset2 = 4;
1133 const std::vector<uint8_t> raw_code2 = RawCode({kNopInsn, kCbnzIP1Plus0Insn, kLdrWInsn});
1134 ArrayRef<const uint8_t> code2(raw_code2);
1135 const LinkerPatch patches2[] = {
1136 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset2, encoded_data),
1137 };
1138 AddCompiledMethod(MethodRef(5u), code2, ArrayRef<const LinkerPatch>(patches2));
1139
1140 Link();
1141
1142 const uint32_t cbnz_max_forward = kCbnzIP1Plus0Insn | 0x007fffe0;
1143 const uint32_t cbnz_last_offset = RoundUp(raw_code2.size(), kArm64Alignment) - kLiteralOffset2;
1144 const uint32_t cbnz_last = kCbnzIP1Plus0Insn | (cbnz_last_offset << (5 - 2));
1145 const std::vector<uint8_t> expected_code1 = RawCode({kNopInsn, cbnz_max_forward, kLdrWInsn});
1146 const std::vector<uint8_t> expected_code2 = RawCode({kNopInsn, cbnz_last, kLdrWInsn});
1147 ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1)));
1148 ASSERT_TRUE(CheckLinkedMethod(MethodRef(5), ArrayRef<const uint8_t>(expected_code2)));
1149}
1150
Vladimir Marko66d691d2017-04-07 17:53:39 +01001151TEST_F(Arm64RelativePatcherTestDefault, BakerArray) {
1152 uint32_t valid_regs[] = {
1153 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
1154 10, 11, 12, 13, 14, 15, 18, 19, // IP0 and IP1 are reserved.
1155 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
1156 // LR and SP/ZR are reserved.
1157 };
1158 auto ldr = [](uint32_t base_reg) {
1159 uint32_t index_reg = (base_reg == 0u) ? 1u : 0u;
1160 uint32_t ref_reg = (base_reg == 2) ? 3u : 2u;
1161 return kLdrWLsl2Insn | (index_reg << 16) | (base_reg << 5) | ref_reg;
1162 };
1163 constexpr size_t kMethodCodeSize = 8u;
1164 constexpr size_t kLiteralOffset = 0u;
1165 uint32_t method_idx = 0u;
1166 for (uint32_t base_reg : valid_regs) {
1167 ++method_idx;
1168 const std::vector<uint8_t> raw_code = RawCode({kCbnzIP1Plus0Insn, ldr(base_reg)});
1169 ASSERT_EQ(kMethodCodeSize, raw_code.size());
1170 ArrayRef<const uint8_t> code(raw_code);
1171 const LinkerPatch patches[] = {
1172 LinkerPatch::BakerReadBarrierBranchPatch(
1173 kLiteralOffset, Arm64RelativePatcher::EncodeBakerReadBarrierArrayData(base_reg)),
1174 };
1175 AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches));
1176 }
1177 Link();
1178
1179 // All thunks are at the end.
1180 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment);
1181 method_idx = 0u;
1182 for (uint32_t base_reg : valid_regs) {
1183 ++method_idx;
1184 uint32_t cbnz_offset = thunk_offset - (GetMethodOffset(method_idx) + kLiteralOffset);
1185 uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
1186 const std::vector<uint8_t> expected_code = RawCode({cbnz, ldr(base_reg)});
1187 ASSERT_EQ(kMethodCodeSize, expected_code.size());
1188 EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code)));
1189
1190 std::vector<uint8_t> expected_thunk = CompileBakerArrayThunk(base_reg);
1191 ASSERT_GT(output_.size(), thunk_offset);
1192 ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size());
1193 ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset,
1194 expected_thunk.size());
1195 if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) {
1196 DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk);
1197 ASSERT_TRUE(false);
1198 }
1199
1200 // Verify that the lock word for gray bit check is loaded from the correct address
1201 // before the base_reg which points to the array data.
1202 static constexpr size_t kGrayCheckInsns = 5;
1203 ASSERT_GE(output_.size() - thunk_offset, 4u * kGrayCheckInsns);
1204 int32_t data_offset =
1205 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
1206 int32_t offset = mirror::Object::MonitorOffset().Int32Value() - data_offset;
1207 ASSERT_LT(offset, 0);
1208 const uint32_t load_lock_word =
1209 kLdurWInsn |
1210 ((offset & 0x1ffu) << 12) |
1211 (base_reg << 5) |
1212 /* ip0 */ 16;
1213 EXPECT_EQ(load_lock_word, GetOutputInsn(thunk_offset));
1214 // Verify the gray bit check.
1215 const uint32_t check_gray_bit_without_offset =
1216 0x37000000u | (LockWord::kReadBarrierStateShift << 19) | /* ip0 */ 16;
1217 EXPECT_EQ(check_gray_bit_without_offset, GetOutputInsn(thunk_offset + 4u) & 0xfff8001fu);
1218 // Verify the fake dependency.
1219 const uint32_t fake_dependency =
1220 0x8b408000u | // ADD Xd, Xn, Xm, LSR 32
1221 (/* ip0 */ 16 << 16) | // Xm = ip0
1222 (base_reg << 5) | // Xn = base_reg
1223 base_reg; // Xd = base_reg
1224 EXPECT_EQ(fake_dependency, GetOutputInsn(thunk_offset + 12u));
1225 // Do not check the rest of the implementation.
1226
1227 // The next thunk follows on the next aligned offset.
1228 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment);
1229 }
1230}
1231
1232TEST_F(Arm64RelativePatcherTestDefault, BakerGcRoot) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001233 uint32_t valid_regs[] = {
1234 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
1235 10, 11, 12, 13, 14, 15, 18, 19, // IP0 and IP1 are reserved.
1236 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
1237 // LR and SP/ZR are reserved.
1238 };
1239 constexpr size_t kMethodCodeSize = 8u;
1240 constexpr size_t kLiteralOffset = 4u;
1241 uint32_t method_idx = 0u;
1242 for (uint32_t root_reg : valid_regs) {
1243 ++method_idx;
1244 uint32_t ldr = kLdrWInsn | (/* offset */ 8 << (10 - 2)) | (/* base_reg */ 0 << 5) | root_reg;
1245 const std::vector<uint8_t> raw_code = RawCode({ldr, kCbnzIP1Plus0Insn});
1246 ASSERT_EQ(kMethodCodeSize, raw_code.size());
1247 ArrayRef<const uint8_t> code(raw_code);
1248 const LinkerPatch patches[] = {
1249 LinkerPatch::BakerReadBarrierBranchPatch(
1250 kLiteralOffset, Arm64RelativePatcher::EncodeBakerReadBarrierGcRootData(root_reg)),
1251 };
1252 AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches));
1253 }
1254 Link();
1255
1256 // All thunks are at the end.
1257 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment);
1258 method_idx = 0u;
1259 for (uint32_t root_reg : valid_regs) {
1260 ++method_idx;
1261 uint32_t cbnz_offset = thunk_offset - (GetMethodOffset(method_idx) + kLiteralOffset);
1262 uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
1263 uint32_t ldr = kLdrWInsn | (/* offset */ 8 << (10 - 2)) | (/* base_reg */ 0 << 5) | root_reg;
1264 const std::vector<uint8_t> expected_code = RawCode({ldr, cbnz});
1265 ASSERT_EQ(kMethodCodeSize, expected_code.size());
1266 EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code)));
1267
1268 std::vector<uint8_t> expected_thunk = CompileBakerGcRootThunk(root_reg);
1269 ASSERT_GT(output_.size(), thunk_offset);
1270 ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size());
1271 ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset,
1272 expected_thunk.size());
1273 if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) {
1274 DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk);
1275 ASSERT_TRUE(false);
1276 }
1277
1278 // Verify that the fast-path null-check CBZ uses the correct register, i.e. root_reg.
1279 ASSERT_GE(output_.size() - thunk_offset, 4u);
Vladimir Marko66d691d2017-04-07 17:53:39 +01001280 ASSERT_EQ(0x34000000u | root_reg, GetOutputInsn(thunk_offset) & 0xff00001fu);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001281 // Do not check the rest of the implementation.
1282
1283 // The next thunk follows on the next aligned offset.
1284 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment);
1285 }
1286}
1287
1288TEST_F(Arm64RelativePatcherTestDefault, BakerAndMethodCallInteraction) {
1289 // During development, there was a `DCHECK_LE(MaxNextOffset(), next_thunk.MaxNextOffset());`
1290 // in `ArmBaseRelativePatcher::ThunkData::MakeSpaceBefore()` which does not necessarily
1291 // hold when we're reserving thunks of different sizes. This test exposes the situation
1292 // by using Baker thunks and a method call thunk.
1293
1294 // Add a method call patch that can reach to method 1 offset + 128MiB.
1295 uint32_t method_idx = 0u;
1296 constexpr size_t kMethodCallLiteralOffset = 4u;
1297 constexpr uint32_t kMissingMethodIdx = 2u;
1298 const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kBlPlus0});
1299 const LinkerPatch method1_patches[] = {
1300 LinkerPatch::RelativeCodePatch(kMethodCallLiteralOffset, nullptr, 2u),
1301 };
1302 ArrayRef<const uint8_t> code1(raw_code1);
1303 ++method_idx;
1304 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(method1_patches));
1305
1306 // Skip kMissingMethodIdx.
1307 ++method_idx;
1308 ASSERT_EQ(kMissingMethodIdx, method_idx);
1309 // Add a method with the right size that the method code for the next one starts 1MiB
1310 // after code for method 1.
1311 size_t filler_size =
1312 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment)
1313 - sizeof(OatQuickMethodHeader);
1314 std::vector<uint8_t> filler_code = GenNops(filler_size / 4u);
1315 ++method_idx;
1316 AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(filler_code));
1317 // Add 126 methods with 1MiB code+header, making the code for the next method start 1MiB
1318 // before the currently scheduled MaxNextOffset() for the method call thunk.
1319 for (uint32_t i = 0; i != 126; ++i) {
1320 filler_size = 1 * MB - sizeof(OatQuickMethodHeader);
1321 filler_code = GenNops(filler_size / 4u);
1322 ++method_idx;
1323 AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(filler_code));
1324 }
1325
1326 // Add 2 Baker GC root patches to the last method, one that would allow the thunk at
1327 // 1MiB + kArm64Alignment, i.e. kArm64Alignment after the method call thunk, and the
1328 // second that needs it kArm64Alignment after that. Given the size of the GC root thunk
1329 // is more than the space required by the method call thunk plus kArm64Alignment,
1330 // this pushes the first GC root thunk's pending MaxNextOffset() before the method call
1331 // thunk's pending MaxNextOffset() which needs to be adjusted.
1332 ASSERT_LT(RoundUp(CompileMethodCallThunk().size(), kArm64Alignment) + kArm64Alignment,
1333 CompileBakerGcRootThunk(/* root_reg */ 0).size());
1334 static_assert(kArm64Alignment == 16, "Code below assumes kArm64Alignment == 16");
1335 constexpr size_t kBakerLiteralOffset1 = 4u + kArm64Alignment;
1336 constexpr size_t kBakerLiteralOffset2 = 4u + 2 * kArm64Alignment;
1337 // Use offset = 0, base_reg = 0, the LDR is simply `kLdrWInsn | root_reg`.
1338 const uint32_t ldr1 = kLdrWInsn | /* root_reg */ 1;
1339 const uint32_t ldr2 = kLdrWInsn | /* root_reg */ 2;
1340 const std::vector<uint8_t> last_method_raw_code = RawCode({
1341 kNopInsn, kNopInsn, kNopInsn, kNopInsn, // Padding before first GC root read barrier.
1342 ldr1, kCbnzIP1Plus0Insn, // First GC root LDR with read barrier.
1343 kNopInsn, kNopInsn, // Padding before second GC root read barrier.
1344 ldr2, kCbnzIP1Plus0Insn, // Second GC root LDR with read barrier.
1345 });
1346 uint32_t encoded_data1 = Arm64RelativePatcher::EncodeBakerReadBarrierGcRootData(/* root_reg */ 1);
1347 uint32_t encoded_data2 = Arm64RelativePatcher::EncodeBakerReadBarrierGcRootData(/* root_reg */ 2);
1348 const LinkerPatch last_method_patches[] = {
1349 LinkerPatch::BakerReadBarrierBranchPatch(kBakerLiteralOffset1, encoded_data1),
1350 LinkerPatch::BakerReadBarrierBranchPatch(kBakerLiteralOffset2, encoded_data2),
1351 };
1352 ++method_idx;
1353 AddCompiledMethod(MethodRef(method_idx),
1354 ArrayRef<const uint8_t>(last_method_raw_code),
1355 ArrayRef<const LinkerPatch>(last_method_patches));
1356
1357 // The main purpose of the test is to check that Link() does not cause a crash.
1358 Link();
1359
1360 ASSERT_EQ(127 * MB, GetMethodOffset(method_idx) - GetMethodOffset(1u));
1361}
1362
Vladimir Marko3f311cf2015-04-02 15:28:45 +01001363} // namespace linker
1364} // namespace art