Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 17 | #include "linker/arm/relative_patcher_thumb2.h" |
| 18 | |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 19 | #include "base/casts.h" |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 20 | #include "linker/relative_patcher_test.h" |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 21 | #include "lock_word.h" |
| 22 | #include "mirror/array-inl.h" |
| 23 | #include "mirror/object.h" |
Nicolas Geoffray | 524e7ea | 2015-10-16 17:13:34 +0100 | [diff] [blame] | 24 | #include "oat_quick_method_header.h" |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | namespace linker { |
| 28 | |
| 29 | class Thumb2RelativePatcherTest : public RelativePatcherTest { |
| 30 | public: |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 31 | Thumb2RelativePatcherTest() : RelativePatcherTest(InstructionSet::kThumb2, "default") { } |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 32 | |
| 33 | protected: |
| 34 | static const uint8_t kCallRawCode[]; |
| 35 | static const ArrayRef<const uint8_t> kCallCode; |
| 36 | static const uint8_t kNopRawCode[]; |
| 37 | static const ArrayRef<const uint8_t> kNopCode; |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 38 | static const uint8_t kUnpatchedPcRelativeRawCode[]; |
| 39 | static const ArrayRef<const uint8_t> kUnpatchedPcRelativeCode; |
| 40 | static const uint32_t kPcInsnOffset; |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 41 | |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 42 | // The PC in Thumb mode is 4 bytes after the instruction location. |
| 43 | static constexpr uint32_t kPcAdjustment = 4u; |
| 44 | |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 45 | // Branches within range [-256, 256) can be created from these by adding the low 8 bits. |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 46 | static constexpr uint32_t kBlPlus0 = 0xf000f800u; |
| 47 | static constexpr uint32_t kBlMinus256 = 0xf7ffff00u; |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 48 | |
| 49 | // Special BL values. |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 50 | static constexpr uint32_t kBlPlusMax = 0xf3ffd7ffu; |
| 51 | static constexpr uint32_t kBlMinusMax = 0xf400d000u; |
| 52 | |
| 53 | // BNE +0, 32-bit, encoding T3. Bits 0-10, 11, 13, 16-21, 26 are placeholder for target offset. |
| 54 | static constexpr uint32_t kBneWPlus0 = 0xf0408000u; |
| 55 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 56 | // LDR immediate, 16-bit, encoding T1. Bits 6-10 are imm5, 0-2 are Rt, 3-5 are Rn. |
| 57 | static constexpr uint32_t kLdrInsn = 0x6800u; |
| 58 | |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 59 | // LDR immediate, 32-bit, encoding T3. Bits 0-11 are offset, 12-15 are Rt, 16-20 are Rn. |
| 60 | static constexpr uint32_t kLdrWInsn = 0xf8d00000u; |
| 61 | |
| 62 | // LDR immediate, negative offset, encoding T4. Bits 0-7 are the offset to subtract. |
| 63 | static constexpr uint32_t kLdrNegativeOffset = 0xf8500c00u; |
| 64 | |
| 65 | // LDR register, lsl #2. Bits 4-5 are the imm2, i.e. the lsl shift. |
| 66 | static constexpr uint32_t kLdrRegLsl2 = 0xf8500020u; |
| 67 | |
| 68 | // NOP instructions. |
| 69 | static constexpr uint32_t kNopInsn = 0xbf00u; |
| 70 | static constexpr uint32_t kNopWInsn = 0xf3af8000u; |
| 71 | |
| 72 | void InsertInsn(std::vector<uint8_t>* code, size_t pos, uint32_t insn) { |
| 73 | CHECK_LE(pos, code->size()); |
| 74 | if (IsUint<16>(insn)) { |
| 75 | const uint8_t insn_code[] = { |
| 76 | static_cast<uint8_t>(insn), |
| 77 | static_cast<uint8_t>(insn >> 8), |
| 78 | }; |
| 79 | static_assert(sizeof(insn_code) == 2u, "Invalid sizeof(insn_code)."); |
| 80 | code->insert(code->begin() + pos, insn_code, insn_code + sizeof(insn_code)); |
| 81 | } else { |
| 82 | const uint8_t insn_code[] = { |
| 83 | static_cast<uint8_t>(insn >> 16), |
| 84 | static_cast<uint8_t>(insn >> 24), |
| 85 | static_cast<uint8_t>(insn), |
| 86 | static_cast<uint8_t>(insn >> 8), |
| 87 | }; |
| 88 | static_assert(sizeof(insn_code) == 4u, "Invalid sizeof(insn_code)."); |
| 89 | code->insert(code->begin() + pos, insn_code, insn_code + sizeof(insn_code)); |
| 90 | } |
| 91 | } |
| 92 | |
| 93 | void PushBackInsn(std::vector<uint8_t>* code, uint32_t insn) { |
| 94 | InsertInsn(code, code->size(), insn); |
| 95 | } |
| 96 | |
| 97 | std::vector<uint8_t> GenNops(size_t num_nops) { |
| 98 | std::vector<uint8_t> result; |
| 99 | result.reserve(num_nops * 2u); |
| 100 | for (size_t i = 0; i != num_nops; ++i) { |
| 101 | PushBackInsn(&result, kNopInsn); |
| 102 | } |
| 103 | return result; |
| 104 | } |
| 105 | |
| 106 | std::vector<uint8_t> RawCode(std::initializer_list<uint32_t> insns) { |
| 107 | std::vector<uint8_t> raw_code; |
| 108 | size_t number_of_16_bit_insns = |
| 109 | std::count_if(insns.begin(), insns.end(), [](uint32_t x) { return IsUint<16>(x); }); |
| 110 | raw_code.reserve(insns.size() * 4u - number_of_16_bit_insns * 2u); |
| 111 | for (uint32_t insn : insns) { |
| 112 | PushBackInsn(&raw_code, insn); |
| 113 | } |
| 114 | return raw_code; |
| 115 | } |
| 116 | |
| 117 | uint32_t BneWWithOffset(uint32_t bne_offset, uint32_t target_offset) { |
| 118 | if (!IsAligned<2u>(bne_offset)) { |
| 119 | LOG(ERROR) << "Unaligned bne_offset: " << bne_offset; |
| 120 | return 0xffffffffu; // Fails code diff later. |
| 121 | } |
| 122 | if (!IsAligned<2u>(target_offset)) { |
| 123 | LOG(ERROR) << "Unaligned target_offset: " << target_offset; |
| 124 | return 0xffffffffu; // Fails code diff later. |
| 125 | } |
| 126 | uint32_t diff = target_offset - bne_offset - kPcAdjustment; |
| 127 | DCHECK_ALIGNED(diff, 2u); |
| 128 | if ((diff >> 20) != 0 && (diff >> 20) != 0xfffu) { |
| 129 | LOG(ERROR) << "Target out of range: " << diff; |
| 130 | return 0xffffffffu; // Fails code diff later. |
| 131 | } |
| 132 | return kBneWPlus0 | ((diff >> 1) & 0x7ffu) // imm11 |
| 133 | | (((diff >> 12) & 0x3fu) << 16) // imm6 |
| 134 | | (((diff >> 18) & 1) << 13) // J1 |
| 135 | | (((diff >> 19) & 1) << 11) // J2 |
| 136 | | (((diff >> 20) & 1) << 26); // S |
| 137 | } |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 138 | |
| 139 | bool Create2MethodsWithGap(const ArrayRef<const uint8_t>& method1_code, |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 140 | const ArrayRef<const LinkerPatch>& method1_patches, |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 141 | const ArrayRef<const uint8_t>& method3_code, |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 142 | const ArrayRef<const LinkerPatch>& method3_patches, |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 143 | uint32_t distance_without_thunks) { |
| 144 | CHECK_EQ(distance_without_thunks % kArmAlignment, 0u); |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 145 | uint32_t method1_offset = |
| 146 | kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader); |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 147 | AddCompiledMethod(MethodRef(1u), method1_code, method1_patches); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 148 | |
| 149 | // We want to put the method3 at a very precise offset. |
| 150 | const uint32_t method3_offset = method1_offset + distance_without_thunks; |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 151 | CHECK_ALIGNED(method3_offset, kArmAlignment); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 152 | |
| 153 | // Calculate size of method2 so that we put method3 at the correct place. |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 154 | const uint32_t method1_end = method1_offset + method1_code.size(); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 155 | const uint32_t method2_offset = |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 156 | method1_end + CodeAlignmentSize(method1_end) + sizeof(OatQuickMethodHeader); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 157 | const uint32_t method2_size = (method3_offset - sizeof(OatQuickMethodHeader) - method2_offset); |
| 158 | std::vector<uint8_t> method2_raw_code(method2_size); |
| 159 | ArrayRef<const uint8_t> method2_code(method2_raw_code); |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 160 | AddCompiledMethod(MethodRef(2u), method2_code); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 161 | |
| 162 | AddCompiledMethod(MethodRef(3u), method3_code, method3_patches); |
| 163 | |
| 164 | Link(); |
| 165 | |
| 166 | // Check assumptions. |
| 167 | CHECK_EQ(GetMethodOffset(1), method1_offset); |
| 168 | CHECK_EQ(GetMethodOffset(2), method2_offset); |
| 169 | auto result3 = method_offset_map_.FindMethodOffset(MethodRef(3)); |
| 170 | CHECK(result3.first); |
| 171 | // There may be a thunk before method2. |
| 172 | if (result3.second == method3_offset + 1 /* thumb mode */) { |
| 173 | return false; // No thunk. |
| 174 | } else { |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 175 | uint32_t thunk_end = |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 176 | CompiledCode::AlignCode(method3_offset - sizeof(OatQuickMethodHeader), |
| 177 | InstructionSet::kThumb2) + |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 178 | MethodCallThunkSize(); |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 179 | uint32_t header_offset = thunk_end + CodeAlignmentSize(thunk_end); |
| 180 | CHECK_EQ(result3.second, header_offset + sizeof(OatQuickMethodHeader) + 1 /* thumb mode */); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 181 | return true; // Thunk present. |
| 182 | } |
| 183 | } |
| 184 | |
| 185 | uint32_t GetMethodOffset(uint32_t method_idx) { |
| 186 | auto result = method_offset_map_.FindMethodOffset(MethodRef(method_idx)); |
| 187 | CHECK(result.first); |
| 188 | CHECK_NE(result.second & 1u, 0u); |
| 189 | return result.second - 1 /* thumb mode */; |
| 190 | } |
| 191 | |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 192 | std::vector<uint8_t> CompileMethodCallThunk() { |
Vladimir Marko | 0a51fc3 | 2017-05-02 13:12:02 +0100 | [diff] [blame] | 193 | ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetMethodCallKey(); |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 194 | return static_cast<Thumb2RelativePatcher*>(patcher_.get())->CompileThunk(key); |
| 195 | } |
| 196 | |
| 197 | uint32_t MethodCallThunkSize() { |
| 198 | return CompileMethodCallThunk().size(); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | bool CheckThunk(uint32_t thunk_offset) { |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 202 | const std::vector<uint8_t> expected_code = CompileMethodCallThunk(); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 203 | if (output_.size() < thunk_offset + expected_code.size()) { |
| 204 | LOG(ERROR) << "output_.size() == " << output_.size() << " < " |
| 205 | << "thunk_offset + expected_code.size() == " << (thunk_offset + expected_code.size()); |
| 206 | return false; |
| 207 | } |
| 208 | ArrayRef<const uint8_t> linked_code(&output_[thunk_offset], expected_code.size()); |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 209 | if (linked_code == ArrayRef<const uint8_t>(expected_code)) { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 210 | return true; |
| 211 | } |
| 212 | // Log failure info. |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 213 | DumpDiff(ArrayRef<const uint8_t>(expected_code), linked_code); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 214 | return false; |
| 215 | } |
| 216 | |
| 217 | std::vector<uint8_t> GenNopsAndBl(size_t num_nops, uint32_t bl) { |
| 218 | std::vector<uint8_t> result; |
| 219 | result.reserve(num_nops * 2u + 4u); |
| 220 | for (size_t i = 0; i != num_nops; ++i) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 221 | PushBackInsn(&result, kNopInsn); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 222 | } |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 223 | PushBackInsn(&result, bl); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 224 | return result; |
| 225 | } |
Vladimir Marko | e5c76c5 | 2015-04-06 12:10:19 +0100 | [diff] [blame] | 226 | |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 227 | void TestStringBssEntry(uint32_t bss_begin, uint32_t string_entry_offset); |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 228 | void TestStringReference(uint32_t string_offset); |
| 229 | void CheckPcRelativePatch(const ArrayRef<const LinkerPatch>& patches, uint32_t target_offset); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 230 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 231 | std::vector<uint8_t> CompileBakerOffsetThunk(uint32_t base_reg, |
| 232 | uint32_t holder_reg, |
| 233 | bool narrow) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 234 | const LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch( |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 235 | 0u, Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData(base_reg, holder_reg, narrow)); |
Vladimir Marko | 0a51fc3 | 2017-05-02 13:12:02 +0100 | [diff] [blame] | 236 | ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetBakerThunkKey(patch); |
| 237 | return down_cast<Thumb2RelativePatcher*>(patcher_.get())->CompileThunk(key); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 238 | } |
| 239 | |
| 240 | std::vector<uint8_t> CompileBakerArrayThunk(uint32_t base_reg) { |
| 241 | LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch( |
| 242 | 0u, Thumb2RelativePatcher::EncodeBakerReadBarrierArrayData(base_reg)); |
Vladimir Marko | 0a51fc3 | 2017-05-02 13:12:02 +0100 | [diff] [blame] | 243 | ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetBakerThunkKey(patch); |
| 244 | return down_cast<Thumb2RelativePatcher*>(patcher_.get())->CompileThunk(key); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 245 | } |
| 246 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 247 | std::vector<uint8_t> CompileBakerGcRootThunk(uint32_t root_reg, bool narrow) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 248 | LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch( |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 249 | 0u, Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(root_reg, narrow)); |
Vladimir Marko | 0a51fc3 | 2017-05-02 13:12:02 +0100 | [diff] [blame] | 250 | ArmBaseRelativePatcher::ThunkKey key = ArmBaseRelativePatcher::GetBakerThunkKey(patch); |
| 251 | return down_cast<Thumb2RelativePatcher*>(patcher_.get())->CompileThunk(key); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 252 | } |
| 253 | |
| 254 | uint32_t GetOutputInsn32(uint32_t offset) { |
| 255 | CHECK_LE(offset, output_.size()); |
| 256 | CHECK_GE(output_.size() - offset, 4u); |
| 257 | return (static_cast<uint32_t>(output_[offset]) << 16) | |
| 258 | (static_cast<uint32_t>(output_[offset + 1]) << 24) | |
| 259 | (static_cast<uint32_t>(output_[offset + 2]) << 0) | |
| 260 | (static_cast<uint32_t>(output_[offset + 3]) << 8); |
| 261 | } |
| 262 | |
| 263 | uint16_t GetOutputInsn16(uint32_t offset) { |
| 264 | CHECK_LE(offset, output_.size()); |
| 265 | CHECK_GE(output_.size() - offset, 2u); |
| 266 | return (static_cast<uint32_t>(output_[offset]) << 0) | |
| 267 | (static_cast<uint32_t>(output_[offset + 1]) << 8); |
| 268 | } |
| 269 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 270 | void TestBakerFieldWide(uint32_t offset, uint32_t ref_reg); |
| 271 | void TestBakerFieldNarrow(uint32_t offset, uint32_t ref_reg); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 272 | }; |
| 273 | |
| 274 | const uint8_t Thumb2RelativePatcherTest::kCallRawCode[] = { |
| 275 | 0x00, 0xf0, 0x00, 0xf8 |
| 276 | }; |
| 277 | |
| 278 | const ArrayRef<const uint8_t> Thumb2RelativePatcherTest::kCallCode(kCallRawCode); |
| 279 | |
| 280 | const uint8_t Thumb2RelativePatcherTest::kNopRawCode[] = { |
| 281 | 0x00, 0xbf |
| 282 | }; |
| 283 | |
| 284 | const ArrayRef<const uint8_t> Thumb2RelativePatcherTest::kNopCode(kNopRawCode); |
| 285 | |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 286 | const uint8_t Thumb2RelativePatcherTest::kUnpatchedPcRelativeRawCode[] = { |
| 287 | 0x40, 0xf2, 0x00, 0x00, // MOVW r0, #0 (placeholder) |
| 288 | 0xc0, 0xf2, 0x00, 0x00, // MOVT r0, #0 (placeholder) |
| 289 | 0x78, 0x44, // ADD r0, pc |
| 290 | }; |
| 291 | const ArrayRef<const uint8_t> Thumb2RelativePatcherTest::kUnpatchedPcRelativeCode( |
| 292 | kUnpatchedPcRelativeRawCode); |
| 293 | const uint32_t Thumb2RelativePatcherTest::kPcInsnOffset = 8u; |
| 294 | |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 295 | void Thumb2RelativePatcherTest::TestStringBssEntry(uint32_t bss_begin, |
| 296 | uint32_t string_entry_offset) { |
| 297 | constexpr uint32_t kStringIndex = 1u; |
| 298 | string_index_to_offset_map_.Put(kStringIndex, string_entry_offset); |
| 299 | bss_begin_ = bss_begin; |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 300 | const LinkerPatch patches[] = { |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 301 | LinkerPatch::StringBssEntryPatch(0u, nullptr, kPcInsnOffset, kStringIndex), |
| 302 | LinkerPatch::StringBssEntryPatch(4u, nullptr, kPcInsnOffset, kStringIndex), |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 303 | }; |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 304 | CheckPcRelativePatch(ArrayRef<const LinkerPatch>(patches), bss_begin_ + string_entry_offset); |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 305 | } |
| 306 | |
| 307 | void Thumb2RelativePatcherTest::TestStringReference(uint32_t string_offset) { |
| 308 | constexpr uint32_t kStringIndex = 1u; |
| 309 | string_index_to_offset_map_.Put(kStringIndex, string_offset); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 310 | const LinkerPatch patches[] = { |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 311 | LinkerPatch::RelativeStringPatch(0u, nullptr, kPcInsnOffset, kStringIndex), |
| 312 | LinkerPatch::RelativeStringPatch(4u, nullptr, kPcInsnOffset, kStringIndex), |
| 313 | }; |
| 314 | CheckPcRelativePatch(ArrayRef<const LinkerPatch>(patches), string_offset); |
| 315 | } |
| 316 | |
| 317 | void Thumb2RelativePatcherTest::CheckPcRelativePatch(const ArrayRef<const LinkerPatch>& patches, |
| 318 | uint32_t target_offset) { |
| 319 | AddCompiledMethod(MethodRef(1u), kUnpatchedPcRelativeCode, ArrayRef<const LinkerPatch>(patches)); |
| 320 | Link(); |
| 321 | |
| 322 | uint32_t method1_offset = GetMethodOffset(1u); |
| 323 | uint32_t pc_base_offset = method1_offset + kPcInsnOffset + 4u /* PC adjustment */; |
| 324 | uint32_t diff = target_offset - pc_base_offset; |
| 325 | // Distribute the bits of the diff between the MOVW and MOVT: |
| 326 | uint32_t diffw = diff & 0xffffu; |
| 327 | uint32_t difft = diff >> 16; |
| 328 | uint32_t movw = 0xf2400000u | // MOVW r0, #0 (placeholder), |
| 329 | ((diffw & 0xf000u) << (16 - 12)) | // move imm4 from bits 12-15 to bits 16-19, |
| 330 | ((diffw & 0x0800u) << (26 - 11)) | // move imm from bit 11 to bit 26, |
| 331 | ((diffw & 0x0700u) << (12 - 8)) | // move imm3 from bits 8-10 to bits 12-14, |
| 332 | ((diffw & 0x00ffu)); // keep imm8 at bits 0-7. |
| 333 | uint32_t movt = 0xf2c00000u | // MOVT r0, #0 (placeholder), |
| 334 | ((difft & 0xf000u) << (16 - 12)) | // move imm4 from bits 12-15 to bits 16-19, |
| 335 | ((difft & 0x0800u) << (26 - 11)) | // move imm from bit 11 to bit 26, |
| 336 | ((difft & 0x0700u) << (12 - 8)) | // move imm3 from bits 8-10 to bits 12-14, |
| 337 | ((difft & 0x00ffu)); // keep imm8 at bits 0-7. |
| 338 | const uint8_t expected_code[] = { |
| 339 | static_cast<uint8_t>(movw >> 16), static_cast<uint8_t>(movw >> 24), |
| 340 | static_cast<uint8_t>(movw >> 0), static_cast<uint8_t>(movw >> 8), |
| 341 | static_cast<uint8_t>(movt >> 16), static_cast<uint8_t>(movt >> 24), |
| 342 | static_cast<uint8_t>(movt >> 0), static_cast<uint8_t>(movt >> 8), |
| 343 | 0x78, 0x44, |
| 344 | }; |
| 345 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code))); |
| 346 | } |
| 347 | |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 348 | TEST_F(Thumb2RelativePatcherTest, CallSelf) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 349 | const LinkerPatch patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 350 | LinkerPatch::RelativeCodePatch(0u, nullptr, 1u), |
| 351 | }; |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 352 | AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches)); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 353 | Link(); |
| 354 | |
| 355 | static const uint8_t expected_code[] = { |
| 356 | 0xff, 0xf7, 0xfe, 0xff |
| 357 | }; |
| 358 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code))); |
| 359 | } |
| 360 | |
| 361 | TEST_F(Thumb2RelativePatcherTest, CallOther) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 362 | const LinkerPatch method1_patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 363 | LinkerPatch::RelativeCodePatch(0u, nullptr, 2u), |
| 364 | }; |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 365 | AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(method1_patches)); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 366 | const LinkerPatch method2_patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 367 | LinkerPatch::RelativeCodePatch(0u, nullptr, 1u), |
| 368 | }; |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 369 | AddCompiledMethod(MethodRef(2u), kCallCode, ArrayRef<const LinkerPatch>(method2_patches)); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 370 | Link(); |
| 371 | |
| 372 | uint32_t method1_offset = GetMethodOffset(1u); |
| 373 | uint32_t method2_offset = GetMethodOffset(2u); |
| 374 | uint32_t diff_after = method2_offset - (method1_offset + 4u /* PC adjustment */); |
| 375 | ASSERT_EQ(diff_after & 1u, 0u); |
| 376 | ASSERT_LT(diff_after >> 1, 1u << 8); // Simple encoding, (diff_after >> 1) fits into 8 bits. |
| 377 | static const uint8_t method1_expected_code[] = { |
| 378 | 0x00, 0xf0, static_cast<uint8_t>(diff_after >> 1), 0xf8 |
| 379 | }; |
| 380 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(method1_expected_code))); |
| 381 | uint32_t diff_before = method1_offset - (method2_offset + 4u /* PC adjustment */); |
| 382 | ASSERT_EQ(diff_before & 1u, 0u); |
| 383 | ASSERT_GE(diff_before, -1u << 9); // Simple encoding, -256 <= (diff >> 1) < 0. |
| 384 | auto method2_expected_code = GenNopsAndBl(0u, kBlMinus256 | ((diff_before >> 1) & 0xffu)); |
| 385 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(2u), ArrayRef<const uint8_t>(method2_expected_code))); |
| 386 | } |
| 387 | |
| 388 | TEST_F(Thumb2RelativePatcherTest, CallTrampoline) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 389 | const LinkerPatch patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 390 | LinkerPatch::RelativeCodePatch(0u, nullptr, 2u), |
| 391 | }; |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 392 | AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches)); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 393 | Link(); |
| 394 | |
| 395 | uint32_t method1_offset = GetMethodOffset(1u); |
| 396 | uint32_t diff = kTrampolineOffset - (method1_offset + 4u); |
| 397 | ASSERT_EQ(diff & 1u, 0u); |
| 398 | ASSERT_GE(diff, -1u << 9); // Simple encoding, -256 <= (diff >> 1) < 0 (checked as unsigned). |
| 399 | auto expected_code = GenNopsAndBl(0u, kBlMinus256 | ((diff >> 1) & 0xffu)); |
| 400 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code))); |
| 401 | } |
| 402 | |
Vladimir Marko | d1eaf0d | 2015-10-29 12:18:29 +0000 | [diff] [blame] | 403 | TEST_F(Thumb2RelativePatcherTest, CallTrampolineTooFar) { |
| 404 | constexpr uint32_t missing_method_index = 1024u; |
| 405 | auto method3_raw_code = GenNopsAndBl(3u, kBlPlus0); |
| 406 | constexpr uint32_t bl_offset_in_method3 = 3u * 2u; // After NOPs. |
| 407 | ArrayRef<const uint8_t> method3_code(method3_raw_code); |
| 408 | ASSERT_EQ(bl_offset_in_method3 + 4u, method3_code.size()); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 409 | const LinkerPatch method3_patches[] = { |
Vladimir Marko | d1eaf0d | 2015-10-29 12:18:29 +0000 | [diff] [blame] | 410 | LinkerPatch::RelativeCodePatch(bl_offset_in_method3, nullptr, missing_method_index), |
| 411 | }; |
| 412 | |
| 413 | constexpr uint32_t just_over_max_negative_disp = 16 * MB + 2 - 4u /* PC adjustment */; |
| 414 | bool thunk_in_gap = Create2MethodsWithGap(kNopCode, |
| 415 | ArrayRef<const LinkerPatch>(), |
| 416 | method3_code, |
| 417 | ArrayRef<const LinkerPatch>(method3_patches), |
| 418 | just_over_max_negative_disp - bl_offset_in_method3); |
| 419 | ASSERT_FALSE(thunk_in_gap); // There should be a thunk but it should be after the method2. |
| 420 | ASSERT_FALSE(method_offset_map_.FindMethodOffset(MethodRef(missing_method_index)).first); |
| 421 | |
| 422 | // Check linked code. |
| 423 | uint32_t method3_offset = GetMethodOffset(3u); |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 424 | uint32_t thunk_offset = CompiledCode::AlignCode(method3_offset + method3_code.size(), |
| 425 | InstructionSet::kThumb2); |
Vladimir Marko | d1eaf0d | 2015-10-29 12:18:29 +0000 | [diff] [blame] | 426 | uint32_t diff = thunk_offset - (method3_offset + bl_offset_in_method3 + 4u /* PC adjustment */); |
| 427 | ASSERT_EQ(diff & 1u, 0u); |
| 428 | ASSERT_LT(diff >> 1, 1u << 8); // Simple encoding, (diff >> 1) fits into 8 bits. |
| 429 | auto expected_code = GenNopsAndBl(3u, kBlPlus0 | ((diff >> 1) & 0xffu)); |
| 430 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(3u), ArrayRef<const uint8_t>(expected_code))); |
| 431 | EXPECT_TRUE(CheckThunk(thunk_offset)); |
| 432 | } |
| 433 | |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 434 | TEST_F(Thumb2RelativePatcherTest, CallOtherAlmostTooFarAfter) { |
| 435 | auto method1_raw_code = GenNopsAndBl(3u, kBlPlus0); |
| 436 | constexpr uint32_t bl_offset_in_method1 = 3u * 2u; // After NOPs. |
| 437 | ArrayRef<const uint8_t> method1_code(method1_raw_code); |
| 438 | ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size()); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 439 | const LinkerPatch method1_patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 440 | LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, 3u), |
| 441 | }; |
| 442 | |
| 443 | constexpr uint32_t max_positive_disp = 16 * MB - 2u + 4u /* PC adjustment */; |
Vladimir Marko | 345f93e | 2015-07-14 18:58:59 +0100 | [diff] [blame] | 444 | bool thunk_in_gap = Create2MethodsWithGap(method1_code, |
| 445 | ArrayRef<const LinkerPatch>(method1_patches), |
| 446 | kNopCode, |
| 447 | ArrayRef<const LinkerPatch>(), |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 448 | bl_offset_in_method1 + max_positive_disp); |
| 449 | ASSERT_FALSE(thunk_in_gap); // There should be no thunk. |
| 450 | |
| 451 | // Check linked code. |
| 452 | auto expected_code = GenNopsAndBl(3u, kBlPlusMax); |
| 453 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code))); |
| 454 | } |
| 455 | |
| 456 | TEST_F(Thumb2RelativePatcherTest, CallOtherAlmostTooFarBefore) { |
| 457 | auto method3_raw_code = GenNopsAndBl(2u, kBlPlus0); |
| 458 | constexpr uint32_t bl_offset_in_method3 = 2u * 2u; // After NOPs. |
| 459 | ArrayRef<const uint8_t> method3_code(method3_raw_code); |
| 460 | ASSERT_EQ(bl_offset_in_method3 + 4u, method3_code.size()); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 461 | const LinkerPatch method3_patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 462 | LinkerPatch::RelativeCodePatch(bl_offset_in_method3, nullptr, 1u), |
| 463 | }; |
| 464 | |
| 465 | constexpr uint32_t just_over_max_negative_disp = 16 * MB - 4u /* PC adjustment */; |
Vladimir Marko | 345f93e | 2015-07-14 18:58:59 +0100 | [diff] [blame] | 466 | bool thunk_in_gap = Create2MethodsWithGap(kNopCode, |
| 467 | ArrayRef<const LinkerPatch>(), |
| 468 | method3_code, |
| 469 | ArrayRef<const LinkerPatch>(method3_patches), |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 470 | just_over_max_negative_disp - bl_offset_in_method3); |
| 471 | ASSERT_FALSE(thunk_in_gap); // There should be no thunk. |
| 472 | |
| 473 | // Check linked code. |
| 474 | auto expected_code = GenNopsAndBl(2u, kBlMinusMax); |
| 475 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(3u), ArrayRef<const uint8_t>(expected_code))); |
| 476 | } |
| 477 | |
| 478 | TEST_F(Thumb2RelativePatcherTest, CallOtherJustTooFarAfter) { |
| 479 | auto method1_raw_code = GenNopsAndBl(2u, kBlPlus0); |
| 480 | constexpr uint32_t bl_offset_in_method1 = 2u * 2u; // After NOPs. |
| 481 | ArrayRef<const uint8_t> method1_code(method1_raw_code); |
| 482 | ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size()); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 483 | const LinkerPatch method1_patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 484 | LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, 3u), |
| 485 | }; |
| 486 | |
| 487 | constexpr uint32_t just_over_max_positive_disp = 16 * MB + 4u /* PC adjustment */; |
Vladimir Marko | 345f93e | 2015-07-14 18:58:59 +0100 | [diff] [blame] | 488 | bool thunk_in_gap = Create2MethodsWithGap(method1_code, |
| 489 | ArrayRef<const LinkerPatch>(method1_patches), |
| 490 | kNopCode, |
| 491 | ArrayRef<const LinkerPatch>(), |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 492 | bl_offset_in_method1 + just_over_max_positive_disp); |
| 493 | ASSERT_TRUE(thunk_in_gap); |
| 494 | |
| 495 | uint32_t method1_offset = GetMethodOffset(1u); |
| 496 | uint32_t method3_offset = GetMethodOffset(3u); |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 497 | ASSERT_TRUE(IsAligned<kArmAlignment>(method3_offset)); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 498 | uint32_t method3_header_offset = method3_offset - sizeof(OatQuickMethodHeader); |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 499 | uint32_t thunk_size = MethodCallThunkSize(); |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 500 | uint32_t thunk_offset = RoundDown(method3_header_offset - thunk_size, kArmAlignment); |
Vladimir Marko | f4f2daa | 2017-03-20 18:26:59 +0000 | [diff] [blame] | 501 | DCHECK_EQ(thunk_offset + thunk_size + CodeAlignmentSize(thunk_offset + thunk_size), |
Vladimir Marko | 0c737df | 2016-08-01 16:33:16 +0100 | [diff] [blame] | 502 | method3_header_offset); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 503 | ASSERT_TRUE(IsAligned<kArmAlignment>(thunk_offset)); |
| 504 | uint32_t diff = thunk_offset - (method1_offset + bl_offset_in_method1 + 4u /* PC adjustment */); |
| 505 | ASSERT_EQ(diff & 1u, 0u); |
| 506 | ASSERT_GE(diff, 16 * MB - (1u << 9)); // Simple encoding, unknown bits fit into the low 8 bits. |
| 507 | auto expected_code = GenNopsAndBl(2u, 0xf3ffd700 | ((diff >> 1) & 0xffu)); |
| 508 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code))); |
| 509 | CheckThunk(thunk_offset); |
| 510 | } |
| 511 | |
| 512 | TEST_F(Thumb2RelativePatcherTest, CallOtherJustTooFarBefore) { |
| 513 | auto method3_raw_code = GenNopsAndBl(3u, kBlPlus0); |
| 514 | constexpr uint32_t bl_offset_in_method3 = 3u * 2u; // After NOPs. |
| 515 | ArrayRef<const uint8_t> method3_code(method3_raw_code); |
| 516 | ASSERT_EQ(bl_offset_in_method3 + 4u, method3_code.size()); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 517 | const LinkerPatch method3_patches[] = { |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 518 | LinkerPatch::RelativeCodePatch(bl_offset_in_method3, nullptr, 1u), |
| 519 | }; |
| 520 | |
| 521 | constexpr uint32_t just_over_max_negative_disp = 16 * MB + 2 - 4u /* PC adjustment */; |
Vladimir Marko | 345f93e | 2015-07-14 18:58:59 +0100 | [diff] [blame] | 522 | bool thunk_in_gap = Create2MethodsWithGap(kNopCode, |
| 523 | ArrayRef<const LinkerPatch>(), |
| 524 | method3_code, |
| 525 | ArrayRef<const LinkerPatch>(method3_patches), |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 526 | just_over_max_negative_disp - bl_offset_in_method3); |
| 527 | ASSERT_FALSE(thunk_in_gap); // There should be a thunk but it should be after the method2. |
| 528 | |
| 529 | // Check linked code. |
| 530 | uint32_t method3_offset = GetMethodOffset(3u); |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 531 | uint32_t thunk_offset = CompiledCode::AlignCode(method3_offset + method3_code.size(), |
| 532 | InstructionSet::kThumb2); |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 533 | uint32_t diff = thunk_offset - (method3_offset + bl_offset_in_method3 + 4u /* PC adjustment */); |
| 534 | ASSERT_EQ(diff & 1u, 0u); |
| 535 | ASSERT_LT(diff >> 1, 1u << 8); // Simple encoding, (diff >> 1) fits into 8 bits. |
| 536 | auto expected_code = GenNopsAndBl(3u, kBlPlus0 | ((diff >> 1) & 0xffu)); |
| 537 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(3u), ArrayRef<const uint8_t>(expected_code))); |
| 538 | EXPECT_TRUE(CheckThunk(thunk_offset)); |
| 539 | } |
| 540 | |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 541 | TEST_F(Thumb2RelativePatcherTest, StringBssEntry1) { |
| 542 | TestStringBssEntry(0x00ff0000u, 0x00fcu); |
Vladimir Marko | e5c76c5 | 2015-04-06 12:10:19 +0100 | [diff] [blame] | 543 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 544 | } |
| 545 | |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 546 | TEST_F(Thumb2RelativePatcherTest, StringBssEntry2) { |
| 547 | TestStringBssEntry(0x02ff0000u, 0x05fcu); |
Vladimir Marko | e5c76c5 | 2015-04-06 12:10:19 +0100 | [diff] [blame] | 548 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 549 | } |
| 550 | |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 551 | TEST_F(Thumb2RelativePatcherTest, StringBssEntry3) { |
| 552 | TestStringBssEntry(0x08ff0000u, 0x08fcu); |
Vladimir Marko | e5c76c5 | 2015-04-06 12:10:19 +0100 | [diff] [blame] | 553 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 554 | } |
| 555 | |
Vladimir Marko | 5f07820 | 2017-05-18 13:32:53 +0100 | [diff] [blame] | 556 | TEST_F(Thumb2RelativePatcherTest, StringBssEntry4) { |
| 557 | TestStringBssEntry(0xd0ff0000u, 0x60fcu); |
Vladimir Marko | cac5a7e | 2016-02-22 10:39:50 +0000 | [diff] [blame] | 558 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 559 | } |
| 560 | |
| 561 | TEST_F(Thumb2RelativePatcherTest, StringReference1) { |
| 562 | TestStringReference(0x00ff00fcu); |
| 563 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 564 | } |
| 565 | |
| 566 | TEST_F(Thumb2RelativePatcherTest, StringReference2) { |
| 567 | TestStringReference(0x02ff05fcu); |
| 568 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 569 | } |
| 570 | |
| 571 | TEST_F(Thumb2RelativePatcherTest, StringReference3) { |
| 572 | TestStringReference(0x08ff08fcu); |
| 573 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 574 | } |
| 575 | |
| 576 | TEST_F(Thumb2RelativePatcherTest, StringReference4) { |
| 577 | TestStringReference(0xd0ff60fcu); |
Vladimir Marko | e5c76c5 | 2015-04-06 12:10:19 +0100 | [diff] [blame] | 578 | ASSERT_LT(GetMethodOffset(1u), 0xfcu); |
| 579 | } |
| 580 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 581 | void Thumb2RelativePatcherTest::TestBakerFieldWide(uint32_t offset, uint32_t ref_reg) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 582 | uint32_t valid_regs[] = { |
| 583 | 0, 1, 2, 3, 5, 6, 7, // R4 is reserved for entrypoint address. |
| 584 | 8, 9, 10, 11, // IP, SP, LR and PC are reserved. |
| 585 | }; |
| 586 | DCHECK_ALIGNED(offset, 4u); |
| 587 | DCHECK_LT(offset, 4 * KB); |
| 588 | constexpr size_t kMethodCodeSize = 8u; |
| 589 | constexpr size_t kLiteralOffset = 0u; |
| 590 | uint32_t method_idx = 0u; |
| 591 | for (uint32_t base_reg : valid_regs) { |
| 592 | for (uint32_t holder_reg : valid_regs) { |
| 593 | uint32_t ldr = kLdrWInsn | offset | (base_reg << 16) | (ref_reg << 12); |
| 594 | const std::vector<uint8_t> raw_code = RawCode({kBneWPlus0, ldr}); |
| 595 | ASSERT_EQ(kMethodCodeSize, raw_code.size()); |
| 596 | ArrayRef<const uint8_t> code(raw_code); |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 597 | uint32_t encoded_data = Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData( |
| 598 | base_reg, holder_reg, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 599 | const LinkerPatch patches[] = { |
| 600 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset, encoded_data), |
| 601 | }; |
| 602 | ++method_idx; |
| 603 | AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches)); |
| 604 | } |
| 605 | } |
| 606 | Link(); |
| 607 | |
| 608 | // All thunks are at the end. |
| 609 | uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); |
| 610 | method_idx = 0u; |
| 611 | for (uint32_t base_reg : valid_regs) { |
| 612 | for (uint32_t holder_reg : valid_regs) { |
| 613 | ++method_idx; |
| 614 | uint32_t bne = BneWWithOffset(GetMethodOffset(method_idx) + kLiteralOffset, thunk_offset); |
| 615 | uint32_t ldr = kLdrWInsn | offset | (base_reg << 16) | (ref_reg << 12); |
| 616 | const std::vector<uint8_t> expected_code = RawCode({bne, ldr}); |
| 617 | ASSERT_EQ(kMethodCodeSize, expected_code.size()) << "bne=0x" << std::hex << bne; |
| 618 | ASSERT_TRUE( |
| 619 | CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code))); |
| 620 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 621 | std::vector<uint8_t> expected_thunk = |
| 622 | CompileBakerOffsetThunk(base_reg, holder_reg, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 623 | ASSERT_GT(output_.size(), thunk_offset); |
| 624 | ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size()); |
| 625 | ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset, |
| 626 | expected_thunk.size()); |
| 627 | if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) { |
| 628 | DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk); |
| 629 | ASSERT_TRUE(false); |
| 630 | } |
| 631 | |
| 632 | size_t gray_check_offset = thunk_offset; |
| 633 | if (holder_reg == base_reg) { |
| 634 | // Verify that the null-check uses the correct register, i.e. holder_reg. |
| 635 | if (holder_reg < 8) { |
| 636 | ASSERT_GE(output_.size() - gray_check_offset, 2u); |
| 637 | ASSERT_EQ(0xb100 | holder_reg, GetOutputInsn16(thunk_offset) & 0xfd07u); |
| 638 | gray_check_offset +=2u; |
| 639 | } else { |
| 640 | ASSERT_GE(output_.size() - gray_check_offset, 6u); |
| 641 | ASSERT_EQ(0xf1b00f00u | (holder_reg << 16), GetOutputInsn32(thunk_offset) & 0xfbff8f00u); |
| 642 | ASSERT_EQ(0xd000u, GetOutputInsn16(thunk_offset + 4u) & 0xff00u); // BEQ |
| 643 | gray_check_offset += 6u; |
| 644 | } |
| 645 | } |
| 646 | // Verify that the lock word for gray bit check is loaded from the holder address. |
| 647 | ASSERT_GE(output_.size() - gray_check_offset, |
| 648 | 4u * /* 32-bit instructions */ 4u + 2u * /* 16-bit instructions */ 2u); |
| 649 | const uint32_t load_lock_word = |
| 650 | kLdrWInsn | |
| 651 | (holder_reg << 16) | |
| 652 | (/* IP */ 12 << 12) | |
| 653 | mirror::Object::MonitorOffset().Uint32Value(); |
| 654 | ASSERT_EQ(load_lock_word, GetOutputInsn32(gray_check_offset)); |
| 655 | // Verify the gray bit check. |
| 656 | DCHECK_GE(LockWord::kReadBarrierStateShift, 8u); // ROR modified immediate. |
| 657 | uint32_t ror_shift = 7 + (32 - LockWord::kReadBarrierStateShift); |
| 658 | const uint32_t tst_gray_bit_without_offset = |
| 659 | 0xf0100f00 | (/* IP */ 12 << 16) |
| 660 | | (((ror_shift >> 4) & 1) << 26) // i |
| 661 | | (((ror_shift >> 1) & 7) << 12) // imm3 |
| 662 | | ((ror_shift & 1) << 7); // imm8, ROR('1':imm8<7:0>, ror_shift). |
| 663 | EXPECT_EQ(tst_gray_bit_without_offset, GetOutputInsn32(gray_check_offset + 4u)); |
| 664 | EXPECT_EQ(0xd100u, GetOutputInsn16(gray_check_offset + 8u) & 0xff00u); // BNE |
| 665 | // Verify the fake dependency (skip "ADD LR, LR, #ldr_offset"). |
| 666 | const uint32_t fake_dependency = |
| 667 | 0xeb000010 | // ADD Rd, Rn, Rm, LSR 32 (type=01, imm3=000, imm2=00) |
| 668 | (/* IP */ 12) | // Rm = IP |
| 669 | (base_reg << 16) | // Rn = base_reg |
| 670 | (base_reg << 8); // Rd = base_reg |
| 671 | EXPECT_EQ(fake_dependency, GetOutputInsn32(gray_check_offset + 14u)); |
| 672 | // Do not check the rest of the implementation. |
| 673 | |
| 674 | // The next thunk follows on the next aligned offset. |
| 675 | thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); |
| 676 | } |
| 677 | } |
| 678 | } |
| 679 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 680 | void Thumb2RelativePatcherTest::TestBakerFieldNarrow(uint32_t offset, uint32_t ref_reg) { |
| 681 | uint32_t valid_regs[] = { |
| 682 | 0, 1, 2, 3, 5, 6, 7, // R4 is reserved for entrypoint address. |
| 683 | 8, 9, 10, 11, // IP, SP, LR and PC are reserved. |
| 684 | }; |
| 685 | DCHECK_ALIGNED(offset, 4u); |
| 686 | DCHECK_LT(offset, 32u); |
| 687 | constexpr size_t kMethodCodeSize = 6u; |
| 688 | constexpr size_t kLiteralOffset = 0u; |
| 689 | uint32_t method_idx = 0u; |
| 690 | for (uint32_t base_reg : valid_regs) { |
| 691 | if (base_reg >= 8u) { |
| 692 | continue; |
| 693 | } |
| 694 | for (uint32_t holder_reg : valid_regs) { |
| 695 | uint32_t ldr = kLdrInsn | (offset << (6 - 2)) | (base_reg << 3) | ref_reg; |
| 696 | const std::vector<uint8_t> raw_code = RawCode({kBneWPlus0, ldr}); |
| 697 | ASSERT_EQ(kMethodCodeSize, raw_code.size()); |
| 698 | ArrayRef<const uint8_t> code(raw_code); |
| 699 | uint32_t encoded_data = Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData( |
| 700 | base_reg, holder_reg, /* narrow */ true); |
| 701 | const LinkerPatch patches[] = { |
| 702 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset, encoded_data), |
| 703 | }; |
| 704 | ++method_idx; |
| 705 | AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches)); |
| 706 | } |
| 707 | } |
| 708 | Link(); |
| 709 | |
| 710 | // All thunks are at the end. |
| 711 | uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); |
| 712 | method_idx = 0u; |
| 713 | for (uint32_t base_reg : valid_regs) { |
| 714 | if (base_reg >= 8u) { |
| 715 | continue; |
| 716 | } |
| 717 | for (uint32_t holder_reg : valid_regs) { |
| 718 | ++method_idx; |
| 719 | uint32_t bne = BneWWithOffset(GetMethodOffset(method_idx) + kLiteralOffset, thunk_offset); |
| 720 | uint32_t ldr = kLdrInsn | (offset << (6 - 2)) | (base_reg << 3) | ref_reg; |
| 721 | const std::vector<uint8_t> expected_code = RawCode({bne, ldr}); |
| 722 | ASSERT_EQ(kMethodCodeSize, expected_code.size()) << "bne=0x" << std::hex << bne; |
| 723 | ASSERT_TRUE( |
| 724 | CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code))); |
| 725 | |
| 726 | std::vector<uint8_t> expected_thunk = |
| 727 | CompileBakerOffsetThunk(base_reg, holder_reg, /* narrow */ true); |
| 728 | ASSERT_GT(output_.size(), thunk_offset); |
| 729 | ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size()); |
| 730 | ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset, |
| 731 | expected_thunk.size()); |
| 732 | if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) { |
| 733 | DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk); |
| 734 | ASSERT_TRUE(false); |
| 735 | } |
| 736 | |
| 737 | size_t gray_check_offset = thunk_offset; |
| 738 | if (holder_reg == base_reg) { |
| 739 | // Verify that the null-check uses the correct register, i.e. holder_reg. |
| 740 | if (holder_reg < 8) { |
| 741 | ASSERT_GE(output_.size() - gray_check_offset, 2u); |
| 742 | ASSERT_EQ(0xb100 | holder_reg, GetOutputInsn16(thunk_offset) & 0xfd07u); |
| 743 | gray_check_offset +=2u; |
| 744 | } else { |
| 745 | ASSERT_GE(output_.size() - gray_check_offset, 6u); |
| 746 | ASSERT_EQ(0xf1b00f00u | (holder_reg << 16), GetOutputInsn32(thunk_offset) & 0xfbff8f00u); |
| 747 | ASSERT_EQ(0xd000u, GetOutputInsn16(thunk_offset + 4u) & 0xff00u); // BEQ |
| 748 | gray_check_offset += 6u; |
| 749 | } |
| 750 | } |
| 751 | // Verify that the lock word for gray bit check is loaded from the holder address. |
| 752 | ASSERT_GE(output_.size() - gray_check_offset, |
| 753 | 4u * /* 32-bit instructions */ 4u + 2u * /* 16-bit instructions */ 2u); |
| 754 | const uint32_t load_lock_word = |
| 755 | kLdrWInsn | |
| 756 | (holder_reg << 16) | |
| 757 | (/* IP */ 12 << 12) | |
| 758 | mirror::Object::MonitorOffset().Uint32Value(); |
| 759 | ASSERT_EQ(load_lock_word, GetOutputInsn32(gray_check_offset)); |
| 760 | // Verify the gray bit check. |
| 761 | DCHECK_GE(LockWord::kReadBarrierStateShift, 8u); // ROR modified immediate. |
| 762 | uint32_t ror_shift = 7 + (32 - LockWord::kReadBarrierStateShift); |
| 763 | const uint32_t tst_gray_bit_without_offset = |
| 764 | 0xf0100f00 | (/* IP */ 12 << 16) |
| 765 | | (((ror_shift >> 4) & 1) << 26) // i |
| 766 | | (((ror_shift >> 1) & 7) << 12) // imm3 |
| 767 | | ((ror_shift & 1) << 7); // imm8, ROR('1':imm8<7:0>, ror_shift). |
| 768 | EXPECT_EQ(tst_gray_bit_without_offset, GetOutputInsn32(gray_check_offset + 4u)); |
| 769 | EXPECT_EQ(0xd100u, GetOutputInsn16(gray_check_offset + 8u) & 0xff00u); // BNE |
| 770 | // Verify the fake dependency (skip "ADD LR, LR, #ldr_offset"). |
| 771 | const uint32_t fake_dependency = |
| 772 | 0xeb000010 | // ADD Rd, Rn, Rm, LSR 32 (type=01, imm3=000, imm2=00) |
| 773 | (/* IP */ 12) | // Rm = IP |
| 774 | (base_reg << 16) | // Rn = base_reg |
| 775 | (base_reg << 8); // Rd = base_reg |
| 776 | EXPECT_EQ(fake_dependency, GetOutputInsn32(gray_check_offset + 14u)); |
| 777 | // Do not check the rest of the implementation. |
| 778 | |
| 779 | // The next thunk follows on the next aligned offset. |
| 780 | thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); |
| 781 | } |
| 782 | } |
| 783 | } |
| 784 | |
| 785 | #define TEST_BAKER_FIELD_WIDE(offset, ref_reg) \ |
| 786 | TEST_F(Thumb2RelativePatcherTest, \ |
| 787 | BakerOffsetWide##offset##_##ref_reg) { \ |
| 788 | TestBakerFieldWide(offset, ref_reg); \ |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 789 | } |
| 790 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 791 | TEST_BAKER_FIELD_WIDE(/* offset */ 0, /* ref_reg */ 0) |
| 792 | TEST_BAKER_FIELD_WIDE(/* offset */ 8, /* ref_reg */ 3) |
| 793 | TEST_BAKER_FIELD_WIDE(/* offset */ 28, /* ref_reg */ 7) |
| 794 | TEST_BAKER_FIELD_WIDE(/* offset */ 0xffc, /* ref_reg */ 11) |
| 795 | |
| 796 | #define TEST_BAKER_FIELD_NARROW(offset, ref_reg) \ |
| 797 | TEST_F(Thumb2RelativePatcherTest, \ |
| 798 | BakerOffsetNarrow##offset##_##ref_reg) { \ |
| 799 | TestBakerFieldNarrow(offset, ref_reg); \ |
| 800 | } |
| 801 | |
| 802 | TEST_BAKER_FIELD_NARROW(/* offset */ 0, /* ref_reg */ 0) |
| 803 | TEST_BAKER_FIELD_NARROW(/* offset */ 8, /* ref_reg */ 3) |
| 804 | TEST_BAKER_FIELD_NARROW(/* offset */ 28, /* ref_reg */ 7) |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 805 | |
| 806 | TEST_F(Thumb2RelativePatcherTest, BakerOffsetThunkInTheMiddle) { |
| 807 | // One thunk in the middle with maximum distance branches to it from both sides. |
| 808 | // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`. |
| 809 | constexpr uint32_t kLiteralOffset1 = 6u; |
| 810 | const std::vector<uint8_t> raw_code1 = RawCode({kNopWInsn, kNopInsn, kBneWPlus0, kLdrWInsn}); |
| 811 | ArrayRef<const uint8_t> code1(raw_code1); |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 812 | uint32_t encoded_data = Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData( |
| 813 | /* base_reg */ 0, /* holder_reg */ 0, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 814 | const LinkerPatch patches1[] = { |
| 815 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data), |
| 816 | }; |
| 817 | AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1)); |
| 818 | |
| 819 | constexpr uint32_t expected_thunk_offset = |
| 820 | kLiteralOffset1 + kPcAdjustment + /* kMaxBcondPositiveDisplacement */ ((1 << 20) - 2u); |
| 821 | static_assert(IsAligned<kArmAlignment>(expected_thunk_offset), "Target offset must be aligned."); |
| 822 | size_t filler1_size = expected_thunk_offset - |
| 823 | RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); |
| 824 | std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 2u); |
| 825 | ArrayRef<const uint8_t> filler1_code(raw_filler1_code); |
| 826 | AddCompiledMethod(MethodRef(2u), filler1_code); |
| 827 | |
| 828 | // Enforce thunk reservation with a tiny method. |
| 829 | AddCompiledMethod(MethodRef(3u), kNopCode); |
| 830 | |
| 831 | constexpr uint32_t kLiteralOffset2 = 4; |
| 832 | static_assert(IsAligned<kArmAlignment>(kLiteralOffset2 + kPcAdjustment), |
| 833 | "PC for BNE must be aligned."); |
| 834 | |
| 835 | // Allow reaching the thunk from the very beginning of a method almost 1MiB away. Backward branch |
| 836 | // reaches the full 1MiB but we need to take PC adjustment into account. Things to subtract: |
| 837 | // - thunk size and method 3 pre-header, rounded up (padding in between if needed) |
| 838 | // - method 3 code and method 4 pre-header, rounded up (padding in between if needed) |
| 839 | // - method 4 header (let there be no padding between method 4 code and method 5 pre-header). |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 840 | size_t thunk_size = |
| 841 | CompileBakerOffsetThunk(/* base_reg */ 0, /* holder_reg */ 0, /* narrow */ false).size(); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 842 | size_t filler2_size = |
| 843 | 1 * MB - (kLiteralOffset2 + kPcAdjustment) |
| 844 | - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArmAlignment) |
| 845 | - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArmAlignment) |
| 846 | - sizeof(OatQuickMethodHeader); |
| 847 | std::vector<uint8_t> raw_filler2_code = GenNops(filler2_size / 2u); |
| 848 | ArrayRef<const uint8_t> filler2_code(raw_filler2_code); |
| 849 | AddCompiledMethod(MethodRef(4u), filler2_code); |
| 850 | |
| 851 | const std::vector<uint8_t> raw_code2 = RawCode({kNopWInsn, kBneWPlus0, kLdrWInsn}); |
| 852 | ArrayRef<const uint8_t> code2(raw_code2); |
| 853 | const LinkerPatch patches2[] = { |
| 854 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset2, encoded_data), |
| 855 | }; |
| 856 | AddCompiledMethod(MethodRef(5u), code2, ArrayRef<const LinkerPatch>(patches2)); |
| 857 | |
| 858 | Link(); |
| 859 | |
| 860 | uint32_t first_method_offset = GetMethodOffset(1u); |
| 861 | uint32_t last_method_offset = GetMethodOffset(5u); |
| 862 | EXPECT_EQ(2 * MB, last_method_offset - first_method_offset); |
| 863 | |
| 864 | const uint32_t bne_max_forward = kBneWPlus0 | 0x003f2fff; |
| 865 | const uint32_t bne_max_backward = kBneWPlus0 | 0x04000000; |
| 866 | const std::vector<uint8_t> expected_code1 = |
| 867 | RawCode({kNopWInsn, kNopInsn, bne_max_forward, kLdrWInsn}); |
| 868 | const std::vector<uint8_t> expected_code2 = RawCode({kNopWInsn, bne_max_backward, kLdrWInsn}); |
| 869 | ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1))); |
| 870 | ASSERT_TRUE(CheckLinkedMethod(MethodRef(5), ArrayRef<const uint8_t>(expected_code2))); |
| 871 | } |
| 872 | |
| 873 | TEST_F(Thumb2RelativePatcherTest, BakerOffsetThunkBeforeFiller) { |
| 874 | // Based on the first part of BakerOffsetThunkInTheMiddle but the BNE is one instruction |
| 875 | // earlier, so the thunk is emitted before the filler. |
| 876 | // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`. |
| 877 | constexpr uint32_t kLiteralOffset1 = 4u; |
| 878 | const std::vector<uint8_t> raw_code1 = RawCode({kNopWInsn, kBneWPlus0, kLdrWInsn, kNopInsn}); |
| 879 | ArrayRef<const uint8_t> code1(raw_code1); |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 880 | uint32_t encoded_data = Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData( |
| 881 | /* base_reg */ 0, /* holder_reg */ 0, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 882 | const LinkerPatch patches1[] = { |
| 883 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data), |
| 884 | }; |
| 885 | AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1)); |
| 886 | |
| 887 | constexpr uint32_t expected_thunk_offset = |
| 888 | kLiteralOffset1 + kPcAdjustment + /* kMaxBcondPositiveDisplacement + 2 */ (1u << 20); |
| 889 | static_assert(IsAligned<kArmAlignment>(expected_thunk_offset), "Target offset must be aligned."); |
| 890 | size_t filler1_size = expected_thunk_offset - |
| 891 | RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); |
| 892 | std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 2u); |
| 893 | ArrayRef<const uint8_t> filler1_code(raw_filler1_code); |
| 894 | AddCompiledMethod(MethodRef(2u), filler1_code); |
| 895 | |
| 896 | Link(); |
| 897 | |
| 898 | const uint32_t bne = BneWWithOffset(kLiteralOffset1, RoundUp(raw_code1.size(), kArmAlignment)); |
| 899 | const std::vector<uint8_t> expected_code1 = RawCode({kNopWInsn, bne, kLdrWInsn, kNopInsn}); |
| 900 | ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1))); |
| 901 | } |
| 902 | |
| 903 | TEST_F(Thumb2RelativePatcherTest, BakerOffsetThunkInTheMiddleUnreachableFromLast) { |
| 904 | // Based on the BakerOffsetThunkInTheMiddle but the BNE in the last method is preceded |
| 905 | // by NOP and cannot reach the thunk in the middle, so we emit an extra thunk at the end. |
| 906 | // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`. |
| 907 | constexpr uint32_t kLiteralOffset1 = 6u; |
| 908 | const std::vector<uint8_t> raw_code1 = RawCode({kNopWInsn, kNopInsn, kBneWPlus0, kLdrWInsn}); |
| 909 | ArrayRef<const uint8_t> code1(raw_code1); |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 910 | uint32_t encoded_data = Thumb2RelativePatcher::EncodeBakerReadBarrierFieldData( |
| 911 | /* base_reg */ 0, /* holder_reg */ 0, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 912 | const LinkerPatch patches1[] = { |
| 913 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data), |
| 914 | }; |
| 915 | AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1)); |
| 916 | |
| 917 | constexpr uint32_t expected_thunk_offset = |
| 918 | kLiteralOffset1 + kPcAdjustment + /* kMaxBcondPositiveDisplacement */ ((1 << 20) - 2u); |
| 919 | static_assert(IsAligned<kArmAlignment>(expected_thunk_offset), "Target offset must be aligned."); |
| 920 | size_t filler1_size = expected_thunk_offset - |
| 921 | RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); |
| 922 | std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 2u); |
| 923 | ArrayRef<const uint8_t> filler1_code(raw_filler1_code); |
| 924 | AddCompiledMethod(MethodRef(2u), filler1_code); |
| 925 | |
| 926 | // Enforce thunk reservation with a tiny method. |
| 927 | AddCompiledMethod(MethodRef(3u), kNopCode); |
| 928 | |
| 929 | constexpr uint32_t kReachableFromOffset2 = 4; |
| 930 | constexpr uint32_t kLiteralOffset2 = kReachableFromOffset2 + 2; |
| 931 | static_assert(IsAligned<kArmAlignment>(kReachableFromOffset2 + kPcAdjustment), |
| 932 | "PC for BNE must be aligned."); |
| 933 | |
| 934 | // If not for the extra NOP, this would allow reaching the thunk from the BNE |
| 935 | // of a method 1MiB away. Backward branch reaches the full 1MiB but we need to take |
| 936 | // PC adjustment into account. Things to subtract: |
| 937 | // - thunk size and method 3 pre-header, rounded up (padding in between if needed) |
| 938 | // - method 3 code and method 4 pre-header, rounded up (padding in between if needed) |
| 939 | // - method 4 header (let there be no padding between method 4 code and method 5 pre-header). |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 940 | size_t thunk_size = |
| 941 | CompileBakerOffsetThunk(/* base_reg */ 0, /* holder_reg */ 0, /* narrow */ false).size(); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 942 | size_t filler2_size = |
| 943 | 1 * MB - (kReachableFromOffset2 + kPcAdjustment) |
| 944 | - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArmAlignment) |
| 945 | - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArmAlignment) |
| 946 | - sizeof(OatQuickMethodHeader); |
| 947 | std::vector<uint8_t> raw_filler2_code = GenNops(filler2_size / 2u); |
| 948 | ArrayRef<const uint8_t> filler2_code(raw_filler2_code); |
| 949 | AddCompiledMethod(MethodRef(4u), filler2_code); |
| 950 | |
| 951 | // Extra 16-bit NOP compared to BakerOffsetThunkInTheMiddle. |
| 952 | const std::vector<uint8_t> raw_code2 = RawCode({kNopWInsn, kNopInsn, kBneWPlus0, kLdrWInsn}); |
| 953 | ArrayRef<const uint8_t> code2(raw_code2); |
| 954 | const LinkerPatch patches2[] = { |
| 955 | LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset2, encoded_data), |
| 956 | }; |
| 957 | AddCompiledMethod(MethodRef(5u), code2, ArrayRef<const LinkerPatch>(patches2)); |
| 958 | |
| 959 | Link(); |
| 960 | |
| 961 | uint32_t first_method_offset = GetMethodOffset(1u); |
| 962 | uint32_t last_method_offset = GetMethodOffset(5u); |
| 963 | EXPECT_EQ(2 * MB, last_method_offset - first_method_offset); |
| 964 | |
| 965 | const uint32_t bne_max_forward = kBneWPlus0 | 0x003f2fff; |
| 966 | const uint32_t bne_last = |
| 967 | BneWWithOffset(kLiteralOffset2, RoundUp(raw_code2.size(), kArmAlignment)); |
| 968 | const std::vector<uint8_t> expected_code1 = |
| 969 | RawCode({kNopWInsn, kNopInsn, bne_max_forward, kLdrWInsn}); |
| 970 | const std::vector<uint8_t> expected_code2 = |
| 971 | RawCode({kNopWInsn, kNopInsn, bne_last, kLdrWInsn}); |
| 972 | ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1))); |
| 973 | ASSERT_TRUE(CheckLinkedMethod(MethodRef(5), ArrayRef<const uint8_t>(expected_code2))); |
| 974 | } |
| 975 | |
| 976 | TEST_F(Thumb2RelativePatcherTest, BakerArray) { |
| 977 | uint32_t valid_regs[] = { |
| 978 | 0, 1, 2, 3, 5, 6, 7, // R4 is reserved for entrypoint address. |
| 979 | 8, 9, 10, 11, // IP, SP, LR and PC are reserved. |
| 980 | }; |
| 981 | auto ldr = [](uint32_t base_reg) { |
| 982 | uint32_t index_reg = (base_reg == 0u) ? 1u : 0u; |
| 983 | uint32_t ref_reg = (base_reg == 2) ? 3u : 2u; |
| 984 | return kLdrRegLsl2 | index_reg | (base_reg << 16) | (ref_reg << 12); |
| 985 | }; |
| 986 | constexpr size_t kMethodCodeSize = 8u; |
| 987 | constexpr size_t kLiteralOffset = 0u; |
| 988 | uint32_t method_idx = 0u; |
| 989 | for (uint32_t base_reg : valid_regs) { |
| 990 | ++method_idx; |
| 991 | const std::vector<uint8_t> raw_code = RawCode({kBneWPlus0, ldr(base_reg)}); |
| 992 | ASSERT_EQ(kMethodCodeSize, raw_code.size()); |
| 993 | ArrayRef<const uint8_t> code(raw_code); |
| 994 | const LinkerPatch patches[] = { |
| 995 | LinkerPatch::BakerReadBarrierBranchPatch( |
| 996 | kLiteralOffset, Thumb2RelativePatcher::EncodeBakerReadBarrierArrayData(base_reg)), |
| 997 | }; |
| 998 | AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches)); |
| 999 | } |
| 1000 | Link(); |
| 1001 | |
| 1002 | // All thunks are at the end. |
| 1003 | uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); |
| 1004 | method_idx = 0u; |
| 1005 | for (uint32_t base_reg : valid_regs) { |
| 1006 | ++method_idx; |
| 1007 | uint32_t bne = BneWWithOffset(GetMethodOffset(method_idx) + kLiteralOffset, thunk_offset); |
| 1008 | const std::vector<uint8_t> expected_code = RawCode({bne, ldr(base_reg)}); |
| 1009 | ASSERT_EQ(kMethodCodeSize, expected_code.size()); |
| 1010 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code))); |
| 1011 | |
| 1012 | std::vector<uint8_t> expected_thunk = CompileBakerArrayThunk(base_reg); |
| 1013 | ASSERT_GT(output_.size(), thunk_offset); |
| 1014 | ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size()); |
| 1015 | ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset, |
| 1016 | expected_thunk.size()); |
| 1017 | if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) { |
| 1018 | DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk); |
| 1019 | ASSERT_TRUE(false); |
| 1020 | } |
| 1021 | |
| 1022 | // Verify that the lock word for gray bit check is loaded from the correct address |
| 1023 | // before the base_reg which points to the array data. |
| 1024 | ASSERT_GE(output_.size() - thunk_offset, |
| 1025 | 4u * /* 32-bit instructions */ 4u + 2u * /* 16-bit instructions */ 2u); |
| 1026 | int32_t data_offset = |
| 1027 | mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value(); |
| 1028 | int32_t offset = mirror::Object::MonitorOffset().Int32Value() - data_offset; |
| 1029 | ASSERT_LT(offset, 0); |
| 1030 | ASSERT_GT(offset, -256); |
| 1031 | const uint32_t load_lock_word = |
| 1032 | kLdrNegativeOffset | |
| 1033 | (-offset & 0xffu) | |
| 1034 | (base_reg << 16) | |
| 1035 | (/* IP */ 12 << 12); |
| 1036 | EXPECT_EQ(load_lock_word, GetOutputInsn32(thunk_offset)); |
| 1037 | // Verify the gray bit check. |
| 1038 | DCHECK_GE(LockWord::kReadBarrierStateShift, 8u); // ROR modified immediate. |
| 1039 | uint32_t ror_shift = 7 + (32 - LockWord::kReadBarrierStateShift); |
| 1040 | const uint32_t tst_gray_bit_without_offset = |
| 1041 | 0xf0100f00 | (/* IP */ 12 << 16) |
| 1042 | | (((ror_shift >> 4) & 1) << 26) // i |
| 1043 | | (((ror_shift >> 1) & 7) << 12) // imm3 |
| 1044 | | ((ror_shift & 1) << 7); // imm8, ROR('1':imm8<7:0>, ror_shift). |
| 1045 | EXPECT_EQ(tst_gray_bit_without_offset, GetOutputInsn32(thunk_offset + 4u)); |
| 1046 | EXPECT_EQ(0xd100u, GetOutputInsn16(thunk_offset + 8u) & 0xff00u); // BNE |
| 1047 | // Verify the fake dependency. |
| 1048 | const uint32_t fake_dependency = |
| 1049 | 0xeb000010 | // ADD Rd, Rn, Rm, LSR 32 (type=01, imm3=000, imm2=00) |
| 1050 | (/* IP */ 12) | // Rm = IP |
| 1051 | (base_reg << 16) | // Rn = base_reg |
| 1052 | (base_reg << 8); // Rd = base_reg |
| 1053 | EXPECT_EQ(fake_dependency, GetOutputInsn32(thunk_offset + 14u)); |
| 1054 | // Do not check the rest of the implementation. |
| 1055 | |
| 1056 | // The next thunk follows on the next aligned offset. |
| 1057 | thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); |
| 1058 | } |
| 1059 | } |
| 1060 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1061 | TEST_F(Thumb2RelativePatcherTest, BakerGcRootWide) { |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1062 | uint32_t valid_regs[] = { |
| 1063 | 0, 1, 2, 3, 5, 6, 7, // R4 is reserved for entrypoint address. |
| 1064 | 8, 9, 10, 11, // IP, SP, LR and PC are reserved. |
| 1065 | }; |
| 1066 | constexpr size_t kMethodCodeSize = 8u; |
| 1067 | constexpr size_t kLiteralOffset = 4u; |
| 1068 | uint32_t method_idx = 0u; |
| 1069 | for (uint32_t root_reg : valid_regs) { |
| 1070 | ++method_idx; |
| 1071 | uint32_t ldr = kLdrWInsn | (/* offset */ 8) | (/* base_reg */ 0 << 16) | (root_reg << 12); |
| 1072 | const std::vector<uint8_t> raw_code = RawCode({ldr, kBneWPlus0}); |
| 1073 | ASSERT_EQ(kMethodCodeSize, raw_code.size()); |
| 1074 | ArrayRef<const uint8_t> code(raw_code); |
| 1075 | const LinkerPatch patches[] = { |
| 1076 | LinkerPatch::BakerReadBarrierBranchPatch( |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1077 | kLiteralOffset, |
| 1078 | Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(root_reg, /* narrow */ false)), |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1079 | }; |
| 1080 | AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches)); |
| 1081 | } |
| 1082 | Link(); |
| 1083 | |
| 1084 | // All thunks are at the end. |
| 1085 | uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); |
| 1086 | method_idx = 0u; |
| 1087 | for (uint32_t root_reg : valid_regs) { |
| 1088 | ++method_idx; |
| 1089 | uint32_t bne = BneWWithOffset(GetMethodOffset(method_idx) + kLiteralOffset, thunk_offset); |
| 1090 | uint32_t ldr = kLdrWInsn | (/* offset */ 8) | (/* base_reg */ 0 << 16) | (root_reg << 12); |
| 1091 | const std::vector<uint8_t> expected_code = RawCode({ldr, bne}); |
| 1092 | ASSERT_EQ(kMethodCodeSize, expected_code.size()); |
| 1093 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code))); |
| 1094 | |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1095 | std::vector<uint8_t> expected_thunk = CompileBakerGcRootThunk(root_reg, /* narrow */ false); |
| 1096 | ASSERT_GT(output_.size(), thunk_offset); |
| 1097 | ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size()); |
| 1098 | ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset, |
| 1099 | expected_thunk.size()); |
| 1100 | if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) { |
| 1101 | DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk); |
| 1102 | ASSERT_TRUE(false); |
| 1103 | } |
| 1104 | |
| 1105 | // Verify that the fast-path null-check uses the correct register, i.e. root_reg. |
| 1106 | if (root_reg < 8) { |
| 1107 | ASSERT_GE(output_.size() - thunk_offset, 2u); |
| 1108 | ASSERT_EQ(0xb100 | root_reg, GetOutputInsn16(thunk_offset) & 0xfd07u); |
| 1109 | } else { |
| 1110 | ASSERT_GE(output_.size() - thunk_offset, 6u); |
| 1111 | ASSERT_EQ(0xf1b00f00u | (root_reg << 16), GetOutputInsn32(thunk_offset) & 0xfbff8f00u); |
| 1112 | ASSERT_EQ(0xd000u, GetOutputInsn16(thunk_offset + 4u) & 0xff00u); // BEQ |
| 1113 | } |
| 1114 | // Do not check the rest of the implementation. |
| 1115 | |
| 1116 | // The next thunk follows on the next aligned offset. |
| 1117 | thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); |
| 1118 | } |
| 1119 | } |
| 1120 | |
| 1121 | TEST_F(Thumb2RelativePatcherTest, BakerGcRootNarrow) { |
| 1122 | uint32_t valid_regs[] = { |
| 1123 | 0, 1, 2, 3, 5, 6, 7, // R4 is reserved for entrypoint address. |
| 1124 | // Not appplicable to high registers. |
| 1125 | }; |
| 1126 | constexpr size_t kMethodCodeSize = 6u; |
| 1127 | constexpr size_t kLiteralOffset = 2u; |
| 1128 | uint32_t method_idx = 0u; |
| 1129 | for (uint32_t root_reg : valid_regs) { |
| 1130 | ++method_idx; |
| 1131 | uint32_t ldr = kLdrInsn | (/* offset */ 8 << (6 - 2)) | (/* base_reg */ 0 << 3) | root_reg; |
| 1132 | const std::vector<uint8_t> raw_code = RawCode({ldr, kBneWPlus0}); |
| 1133 | ASSERT_EQ(kMethodCodeSize, raw_code.size()); |
| 1134 | ArrayRef<const uint8_t> code(raw_code); |
| 1135 | const LinkerPatch patches[] = { |
| 1136 | LinkerPatch::BakerReadBarrierBranchPatch( |
| 1137 | kLiteralOffset, |
| 1138 | Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(root_reg, /* narrow */ true)), |
| 1139 | }; |
| 1140 | AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches)); |
| 1141 | } |
| 1142 | Link(); |
| 1143 | |
| 1144 | // All thunks are at the end. |
| 1145 | uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); |
| 1146 | method_idx = 0u; |
| 1147 | for (uint32_t root_reg : valid_regs) { |
| 1148 | ++method_idx; |
| 1149 | uint32_t bne = BneWWithOffset(GetMethodOffset(method_idx) + kLiteralOffset, thunk_offset); |
| 1150 | uint32_t ldr = kLdrInsn | (/* offset */ 8 << (6 - 2)) | (/* base_reg */ 0 << 3) | root_reg; |
| 1151 | const std::vector<uint8_t> expected_code = RawCode({ldr, bne}); |
| 1152 | ASSERT_EQ(kMethodCodeSize, expected_code.size()); |
| 1153 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code))); |
| 1154 | |
| 1155 | std::vector<uint8_t> expected_thunk = CompileBakerGcRootThunk(root_reg, /* narrow */ true); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1156 | ASSERT_GT(output_.size(), thunk_offset); |
| 1157 | ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size()); |
| 1158 | ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset, |
| 1159 | expected_thunk.size()); |
| 1160 | if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) { |
| 1161 | DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk); |
| 1162 | ASSERT_TRUE(false); |
| 1163 | } |
| 1164 | |
| 1165 | // Verify that the fast-path null-check CBZ uses the correct register, i.e. root_reg. |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1166 | ASSERT_GE(output_.size() - thunk_offset, 2u); |
| 1167 | ASSERT_EQ(0xb100 | root_reg, GetOutputInsn16(thunk_offset) & 0xfd07u); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1168 | // Do not check the rest of the implementation. |
| 1169 | |
| 1170 | // The next thunk follows on the next aligned offset. |
| 1171 | thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); |
| 1172 | } |
| 1173 | } |
| 1174 | |
| 1175 | TEST_F(Thumb2RelativePatcherTest, BakerGcRootOffsetBits) { |
| 1176 | // Test 1MiB of patches to the same thunk to stress-test different large offsets. |
| 1177 | // (The low bits are not that important but the location of the high bits is easy to get wrong.) |
| 1178 | std::vector<uint8_t> code; |
| 1179 | code.reserve(1 * MB); |
| 1180 | const size_t num_patches = 1 * MB / 8u; |
| 1181 | std::vector<LinkerPatch> patches; |
| 1182 | patches.reserve(num_patches); |
| 1183 | const uint32_t ldr = |
| 1184 | kLdrWInsn | (/* offset */ 8) | (/* base_reg */ 0 << 16) | (/* root_reg */ 0 << 12); |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1185 | uint32_t encoded_data = |
| 1186 | Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(/* root_reg */ 0, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1187 | for (size_t i = 0; i != num_patches; ++i) { |
| 1188 | PushBackInsn(&code, ldr); |
| 1189 | PushBackInsn(&code, kBneWPlus0); |
| 1190 | patches.push_back(LinkerPatch::BakerReadBarrierBranchPatch(8u * i + 4u, encoded_data)); |
| 1191 | } |
| 1192 | ASSERT_EQ(1 * MB, code.size()); |
| 1193 | ASSERT_EQ(num_patches, patches.size()); |
| 1194 | AddCompiledMethod(MethodRef(1u), |
| 1195 | ArrayRef<const uint8_t>(code), |
| 1196 | ArrayRef<const LinkerPatch>(patches)); |
| 1197 | Link(); |
| 1198 | |
| 1199 | // The thunk is right after the method code. |
| 1200 | DCHECK_ALIGNED(1 * MB, kArmAlignment); |
| 1201 | std::vector<uint8_t> expected_code; |
| 1202 | for (size_t i = 0; i != num_patches; ++i) { |
| 1203 | PushBackInsn(&expected_code, ldr); |
| 1204 | PushBackInsn(&expected_code, BneWWithOffset(8u * i + 4u, 1 * MB)); |
| 1205 | patches.push_back(LinkerPatch::BakerReadBarrierBranchPatch(8u * i + 4u, encoded_data)); |
| 1206 | } |
| 1207 | EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code))); |
| 1208 | } |
| 1209 | |
| 1210 | TEST_F(Thumb2RelativePatcherTest, BakerAndMethodCallInteraction) { |
| 1211 | // During development, there was a `DCHECK_LE(MaxNextOffset(), next_thunk.MaxNextOffset());` |
| 1212 | // in `ArmBaseRelativePatcher::ThunkData::MakeSpaceBefore()` which does not necessarily |
| 1213 | // hold when we're reserving thunks of different sizes. This test exposes the situation |
| 1214 | // by using Baker thunks and a method call thunk. |
| 1215 | |
| 1216 | // Add a method call patch that can reach to method 1 offset + 16MiB. |
| 1217 | uint32_t method_idx = 0u; |
| 1218 | constexpr size_t kMethodCallLiteralOffset = 2u; |
| 1219 | constexpr uint32_t kMissingMethodIdx = 2u; |
| 1220 | const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kBlPlus0}); |
| 1221 | const LinkerPatch method1_patches[] = { |
| 1222 | LinkerPatch::RelativeCodePatch(kMethodCallLiteralOffset, nullptr, 2u), |
| 1223 | }; |
| 1224 | ArrayRef<const uint8_t> code1(raw_code1); |
| 1225 | ++method_idx; |
| 1226 | AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(method1_patches)); |
| 1227 | |
| 1228 | // Skip kMissingMethodIdx. |
| 1229 | ++method_idx; |
| 1230 | ASSERT_EQ(kMissingMethodIdx, method_idx); |
| 1231 | // Add a method with the right size that the method code for the next one starts 1MiB |
| 1232 | // after code for method 1. |
| 1233 | size_t filler_size = |
| 1234 | 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment) |
| 1235 | - sizeof(OatQuickMethodHeader); |
| 1236 | std::vector<uint8_t> filler_code = GenNops(filler_size / 2u); |
| 1237 | ++method_idx; |
| 1238 | AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(filler_code)); |
| 1239 | // Add 14 methods with 1MiB code+header, making the code for the next method start 1MiB |
| 1240 | // before the currently scheduled MaxNextOffset() for the method call thunk. |
| 1241 | for (uint32_t i = 0; i != 14; ++i) { |
| 1242 | filler_size = 1 * MB - sizeof(OatQuickMethodHeader); |
| 1243 | filler_code = GenNops(filler_size / 2u); |
| 1244 | ++method_idx; |
| 1245 | AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(filler_code)); |
| 1246 | } |
| 1247 | |
| 1248 | // Add 2 Baker GC root patches to the last method, one that would allow the thunk at |
| 1249 | // 1MiB + kArmAlignment, i.e. kArmAlignment after the method call thunk, and the |
| 1250 | // second that needs it kArmAlignment after that. Given the size of the GC root thunk |
| 1251 | // is more than the space required by the method call thunk plus kArmAlignment, |
| 1252 | // this pushes the first GC root thunk's pending MaxNextOffset() before the method call |
| 1253 | // thunk's pending MaxNextOffset() which needs to be adjusted. |
| 1254 | ASSERT_LT(RoundUp(CompileMethodCallThunk().size(), kArmAlignment) + kArmAlignment, |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1255 | CompileBakerGcRootThunk(/* root_reg */ 0, /* narrow */ false).size()); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1256 | static_assert(kArmAlignment == 8, "Code below assumes kArmAlignment == 8"); |
| 1257 | constexpr size_t kBakerLiteralOffset1 = kArmAlignment + 2u - kPcAdjustment; |
| 1258 | constexpr size_t kBakerLiteralOffset2 = kBakerLiteralOffset1 + kArmAlignment; |
| 1259 | // Use offset = 0, base_reg = 0, the LDR is simply `kLdrWInsn | (root_reg << 12)`. |
| 1260 | const uint32_t ldr1 = kLdrWInsn | (/* root_reg */ 1 << 12); |
| 1261 | const uint32_t ldr2 = kLdrWInsn | (/* root_reg */ 2 << 12); |
| 1262 | const std::vector<uint8_t> last_method_raw_code = RawCode({ |
| 1263 | kNopInsn, // Padding before first GC root read barrier. |
| 1264 | ldr1, kBneWPlus0, // First GC root LDR with read barrier. |
| 1265 | ldr2, kBneWPlus0, // Second GC root LDR with read barrier. |
| 1266 | }); |
| 1267 | uint32_t encoded_data1 = |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1268 | Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(/* root_reg */ 1, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1269 | uint32_t encoded_data2 = |
Vladimir Marko | 88abba2 | 2017-05-03 17:09:25 +0100 | [diff] [blame] | 1270 | Thumb2RelativePatcher::EncodeBakerReadBarrierGcRootData(/* root_reg */ 2, /* narrow */ false); |
Vladimir Marko | eee1c0e | 2017-04-21 17:58:41 +0100 | [diff] [blame] | 1271 | const LinkerPatch last_method_patches[] = { |
| 1272 | LinkerPatch::BakerReadBarrierBranchPatch(kBakerLiteralOffset1, encoded_data1), |
| 1273 | LinkerPatch::BakerReadBarrierBranchPatch(kBakerLiteralOffset2, encoded_data2), |
| 1274 | }; |
| 1275 | ++method_idx; |
| 1276 | AddCompiledMethod(MethodRef(method_idx), |
| 1277 | ArrayRef<const uint8_t>(last_method_raw_code), |
| 1278 | ArrayRef<const LinkerPatch>(last_method_patches)); |
| 1279 | |
| 1280 | // The main purpose of the test is to check that Link() does not cause a crash. |
| 1281 | Link(); |
| 1282 | |
| 1283 | ASSERT_EQ(15 * MB, GetMethodOffset(method_idx) - GetMethodOffset(1u)); |
| 1284 | } |
| 1285 | |
Vladimir Marko | 4d23c9d | 2015-04-01 23:03:09 +0100 | [diff] [blame] | 1286 | } // namespace linker |
| 1287 | } // namespace art |