blob: 619ef6ee30dd577bf198366b48b844e489c3b591 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Marko6b756b52015-07-14 11:58:38 +010028void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
29 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
30 // the following or the preceding Fixups until we find the target. However, for fixup
31 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
32 // This function creates a compact representation of this relationship, where we have
33 // all the dependents in a single array and Fixups reference their ranges by start
34 // index and count. (Instead of having a per-fixup vector.)
35
36 // Count the number of dependents of each Fixup.
37 const FixupId end_id = assembler->fixups_.size();
38 Fixup* fixups = assembler->fixups_.data();
39 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
40 uint32_t target = fixups[fixup_id].target_;
41 if (target > fixups[fixup_id].location_) {
42 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
43 fixups[id].dependents_count_ += 1u;
44 }
45 } else {
46 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
47 fixups[id - 1u].dependents_count_ += 1u;
48 }
49 }
50 }
51 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
52 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
53 uint32_t number_of_dependents = 0u;
54 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
55 number_of_dependents += fixups[fixup_id].dependents_count_;
56 fixups[fixup_id].dependents_start_ = number_of_dependents;
57 }
58 if (number_of_dependents == 0u) {
59 return;
60 }
61 // Create and fill in the fixup_dependents_.
62 assembler->fixup_dependents_.reset(new FixupId[number_of_dependents]);
63 FixupId* dependents = assembler->fixup_dependents_.get();
64 for (FixupId fixup_id = 0u; fixup_id != end_id; ++fixup_id) {
65 uint32_t target = fixups[fixup_id].target_;
66 if (target > fixups[fixup_id].location_) {
67 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
68 fixups[id].dependents_start_ -= 1u;
69 dependents[fixups[id].dependents_start_] = fixup_id;
70 }
71 } else {
72 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
73 fixups[id - 1u].dependents_start_ -= 1u;
74 dependents[fixups[id - 1u].dependents_start_] = fixup_id;
75 }
76 }
77 }
78}
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
81 CHECK(!label->IsBound());
82
83 while (label->IsLinked()) {
84 FixupId fixup_id = label->Position(); // The id for linked Fixup.
85 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
86 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000087 uint32_t fixup_location = fixup->GetLocation();
88 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
89 buffer_.Store<int16_t>(fixup_location, 0);
90 label->position_ = next; // Move to next.
91 }
92 label->BindTo(bound_pc);
93}
94
95void Thumb2Assembler::BindLiterals() {
96 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
97 uint32_t code_size = buffer_.Size();
98 for (Literal& lit : literals_) {
99 Label* label = lit.GetLabel();
100 BindLabel(label, code_size);
101 code_size += lit.GetSize();
102 }
103}
104
105void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
106 std::deque<FixupId>* fixups_to_recalculate) {
107 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
108 if (adjustment != 0u) {
109 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100110 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000111 Fixup* dependent = GetFixup(dependent_id);
112 dependent->IncreaseAdjustment(adjustment);
113 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
114 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
115 fixups_to_recalculate->push_back(dependent_id);
116 }
117 }
118 }
119}
120
121uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100122 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000123 uint32_t current_code_size = buffer_.Size();
124 std::deque<FixupId> fixups_to_recalculate;
125 if (kIsDebugBuild) {
126 // We will use the placeholders in the buffer_ to mark whether the fixup has
127 // been added to the fixups_to_recalculate. Make sure we start with zeros.
128 for (Fixup& fixup : fixups_) {
129 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
130 }
131 }
132 for (Fixup& fixup : fixups_) {
133 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
134 }
135 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100136 do {
137 // Pop the fixup.
138 FixupId fixup_id = fixups_to_recalculate.front();
139 fixups_to_recalculate.pop_front();
140 Fixup* fixup = GetFixup(fixup_id);
141 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
142 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
143 // See if it needs adjustment.
144 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
145 } while (!fixups_to_recalculate.empty());
146
147 if ((current_code_size & 2) != 0 && !literals_.empty()) {
148 // If we need to add padding before literals, this may just push some out of range,
149 // so recalculate all load literals. This makes up for the fact that we don't mark
150 // load literal as a dependency of all previous Fixups even though it actually is.
151 for (Fixup& fixup : fixups_) {
152 if (fixup.IsLoadLiteral()) {
153 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
154 }
155 }
156 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000157 }
158 if (kIsDebugBuild) {
159 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
160 for (Fixup& fixup : fixups_) {
161 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
162 }
163 }
164
165 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100166 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000167 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
168 if (literals_adjustment != 0u) {
169 for (Literal& literal : literals_) {
170 Label* label = literal.GetLabel();
171 DCHECK(label->IsBound());
172 int old_position = label->Position();
173 label->Reinitialize();
174 label->BindTo(old_position + literals_adjustment);
175 }
176 }
177
178 return current_code_size;
179}
180
181void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
182 // Move non-fixup code to its final place and emit fixups.
183 // Process fixups in reverse order so that we don't repeatedly move the same data.
184 size_t src_end = buffer_.Size();
185 size_t dest_end = adjusted_code_size;
186 buffer_.Resize(dest_end);
187 DCHECK_GE(dest_end, src_end);
188 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
189 Fixup* fixup = &*i;
190 if (fixup->GetOriginalSize() == fixup->GetSize()) {
191 // The size of this Fixup didn't change. To avoid moving the data
192 // in small chunks, emit the code to its original position.
193 fixup->Emit(&buffer_, adjusted_code_size);
194 fixup->Finalize(dest_end - src_end);
195 } else {
196 // Move the data between the end of the fixup and src_end to its final location.
197 size_t old_fixup_location = fixup->GetLocation();
198 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
199 size_t data_size = src_end - src_begin;
200 size_t dest_begin = dest_end - data_size;
201 buffer_.Move(dest_begin, src_begin, data_size);
202 src_end = old_fixup_location;
203 dest_end = dest_begin - fixup->GetSizeInBytes();
204 // Finalize the Fixup and emit the data to the new location.
205 fixup->Finalize(dest_end - src_end);
206 fixup->Emit(&buffer_, adjusted_code_size);
207 }
208 }
209 CHECK_EQ(src_end, dest_end);
210}
211
212void Thumb2Assembler::EmitLiterals() {
213 if (!literals_.empty()) {
214 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
215 // We don't support byte and half-word literals.
216 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100217 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000218 if ((code_size & 2u) != 0u) {
219 Emit16(0);
220 }
221 for (Literal& literal : literals_) {
222 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
223 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
224 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
225 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
226 buffer_.Emit<uint8_t>(literal.GetData()[i]);
227 }
228 }
229 }
230}
231
232inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100233 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000234 int16_t encoding = B15 | B14;
235 if (cond != AL) {
236 DCHECK(IsInt<9>(offset));
237 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
238 } else {
239 DCHECK(IsInt<12>(offset));
240 encoding |= B13 | ((offset >> 1) & 0x7ff);
241 }
242 return encoding;
243}
244
245inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100246 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000247 int32_t s = (offset >> 31) & 1; // Sign bit.
248 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
249 (s << 26) | // Sign bit goes to bit 26.
250 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
251 if (cond != AL) {
252 DCHECK(IsInt<21>(offset));
253 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
254 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
255 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
256 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
257 } else {
258 DCHECK(IsInt<25>(offset));
259 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
260 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
261 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
262 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
263 (j1 << 13) | (j2 << 11);
264 }
265 return encoding;
266}
267
268inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
269 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100270 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000271 DCHECK(IsUint<7>(offset));
272 DCHECK(cond == EQ || cond == NE);
273 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
274 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
275 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
276}
277
278inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
279 DCHECK(!IsHighRegister(rn));
280 DCHECK(IsUint<8>(value));
281 return B13 | B11 | (rn << 8) | value;
282}
283
284inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
285 // The high bit of rn is moved across 4-bit rm.
286 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
287 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
288}
289
290inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
291 DCHECK(IsUint<16>(value));
292 return B31 | B30 | B29 | B28 | B25 | B22 |
293 (static_cast<int32_t>(rd) << 8) |
294 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
295 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
296 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
297 (value & 0xff); // Keep imm8 in bits 0-7.
298}
299
300inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
301 DCHECK_EQ(value & 0xffff, 0);
302 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
303 return movw_encoding | B25 | B23;
304}
305
306inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
307 uint32_t mod_imm = ModifiedImmediate(value);
308 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
309 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
310 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
311}
312
313inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
314 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100315 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000316 DCHECK(IsUint<10>(offset));
317 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
318}
319
320inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
321 // NOTE: We don't support negative offset, i.e. U=0 (B23).
322 return LdrRtRnImm12Encoding(rt, PC, offset);
323}
324
325inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100326 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000327 CHECK(IsUint<10>(offset));
328 return B31 | B30 | B29 | B27 |
329 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
330 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
331 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
332}
333
334inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100335 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000336 CHECK(IsUint<10>(offset));
337 return B31 | B30 | B29 | B27 | B26 | B24 |
338 B23 /* U = 1 */ | B20 | B11 | B9 |
339 (static_cast<int32_t>(rn) << 16) |
340 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
341 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
342 (offset >> 2);
343}
344
345inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100346 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000347 CHECK(IsUint<10>(offset));
348 return B31 | B30 | B29 | B27 | B26 | B24 |
349 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
350 (rn << 16) |
351 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
352 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
353 (offset >> 2);
354}
355
356inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
357 DCHECK(!IsHighRegister(rt));
358 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100359 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000360 DCHECK(IsUint<7>(offset));
361 return B14 | B13 | B11 |
362 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
363 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
364}
365
366int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
367 switch (type_) {
368 case kLoadLiteralWide:
369 return LdrdEncoding32(rn_, rt2_, rbase, offset);
370 case kLoadFPLiteralSingle:
371 return VldrsEncoding32(sd_, rbase, offset);
372 case kLoadFPLiteralDouble:
373 return VldrdEncoding32(dd_, rbase, offset);
374 default:
375 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
376 UNREACHABLE();
377 }
378}
379
380inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
381 DCHECK(IsUint<12>(offset));
382 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
383}
384
385void Thumb2Assembler::FinalizeCode() {
386 ArmAssembler::FinalizeCode();
387 BindLiterals();
388 uint32_t adjusted_code_size = AdjustFixups();
389 EmitFixups(adjusted_code_size);
390 EmitLiterals();
391}
392
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000393bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
394 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000395 Opcode opcode,
396 uint32_t immediate,
397 ShifterOperand* shifter_op) {
398 shifter_op->type_ = ShifterOperand::kImmediate;
399 shifter_op->immed_ = immediate;
400 shifter_op->is_shift_ = false;
401 shifter_op->is_rotate_ = false;
402 switch (opcode) {
403 case ADD:
404 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000405 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
406 return true;
407 }
408 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
409
410 case MOV:
411 // TODO: Support less than or equal to 12bits.
412 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
413 case MVN:
414 default:
415 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
416 }
417}
418
Dave Allison65fcc2c2014-04-28 13:45:27 -0700419void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100420 Condition cond, SetCc set_cc) {
421 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700422}
423
424
425void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100426 Condition cond, SetCc set_cc) {
427 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700428}
429
430
431void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100432 Condition cond, SetCc set_cc) {
433 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700434}
435
436
437void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100438 Condition cond, SetCc set_cc) {
439 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700440}
441
442
443void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100444 Condition cond, SetCc set_cc) {
445 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700446}
447
448
449void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100450 Condition cond, SetCc set_cc) {
451 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700452}
453
454
455void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100456 Condition cond, SetCc set_cc) {
457 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700458}
459
460
461void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100462 Condition cond, SetCc set_cc) {
463 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700464}
465
466
467void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
468 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100469 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700470}
471
472
473void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
474 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100475 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700476}
477
478
479void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100480 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700481}
482
483
484void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100485 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700486}
487
488
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100489void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
490 Condition cond, SetCc set_cc) {
491 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700492}
493
494
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100495void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
496 Condition cond, SetCc set_cc) {
497 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700498}
499
500
501void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100502 Condition cond, SetCc set_cc) {
503 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700504}
505
506
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100507void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
508 Condition cond, SetCc set_cc) {
509 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700510}
511
512
513void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700514 CheckCondition(cond);
515
Dave Allison65fcc2c2014-04-28 13:45:27 -0700516 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
517 // 16 bit.
518 int16_t encoding = B14 | B9 | B8 | B6 |
519 rn << 3 | rd;
520 Emit16(encoding);
521 } else {
522 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700523 uint32_t op1 = 0U /* 0b000 */;
524 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700525 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
526 op1 << 20 |
527 B15 | B14 | B13 | B12 |
528 op2 << 4 |
529 static_cast<uint32_t>(rd) << 8 |
530 static_cast<uint32_t>(rn) << 16 |
531 static_cast<uint32_t>(rm);
532
533 Emit32(encoding);
534 }
535}
536
537
538void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
539 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700540 CheckCondition(cond);
541
Andreas Gampec8ccf682014-09-29 20:07:43 -0700542 uint32_t op1 = 0U /* 0b000 */;
543 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700544 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
545 op1 << 20 |
546 op2 << 4 |
547 static_cast<uint32_t>(rd) << 8 |
548 static_cast<uint32_t>(ra) << 12 |
549 static_cast<uint32_t>(rn) << 16 |
550 static_cast<uint32_t>(rm);
551
552 Emit32(encoding);
553}
554
555
556void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
557 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700558 CheckCondition(cond);
559
Andreas Gampec8ccf682014-09-29 20:07:43 -0700560 uint32_t op1 = 0U /* 0b000 */;
561 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700562 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
563 op1 << 20 |
564 op2 << 4 |
565 static_cast<uint32_t>(rd) << 8 |
566 static_cast<uint32_t>(ra) << 12 |
567 static_cast<uint32_t>(rn) << 16 |
568 static_cast<uint32_t>(rm);
569
570 Emit32(encoding);
571}
572
573
Zheng Xuc6667102015-05-15 16:08:45 +0800574void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
575 Register rm, Condition cond) {
576 CheckCondition(cond);
577
578 uint32_t op1 = 0U /* 0b000; */;
579 uint32_t op2 = 0U /* 0b0000 */;
580 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
581 op1 << 20 |
582 op2 << 4 |
583 static_cast<uint32_t>(rd_lo) << 12 |
584 static_cast<uint32_t>(rd_hi) << 8 |
585 static_cast<uint32_t>(rn) << 16 |
586 static_cast<uint32_t>(rm);
587
588 Emit32(encoding);
589}
590
591
Dave Allison65fcc2c2014-04-28 13:45:27 -0700592void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
593 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700594 CheckCondition(cond);
595
Andreas Gampec8ccf682014-09-29 20:07:43 -0700596 uint32_t op1 = 2U /* 0b010; */;
597 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700598 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
599 op1 << 20 |
600 op2 << 4 |
601 static_cast<uint32_t>(rd_lo) << 12 |
602 static_cast<uint32_t>(rd_hi) << 8 |
603 static_cast<uint32_t>(rn) << 16 |
604 static_cast<uint32_t>(rm);
605
606 Emit32(encoding);
607}
608
609
610void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700611 CheckCondition(cond);
612
Andreas Gampec8ccf682014-09-29 20:07:43 -0700613 uint32_t op1 = 1U /* 0b001 */;
614 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700615 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
616 op1 << 20 |
617 op2 << 4 |
618 0xf << 12 |
619 static_cast<uint32_t>(rd) << 8 |
620 static_cast<uint32_t>(rn) << 16 |
621 static_cast<uint32_t>(rm);
622
623 Emit32(encoding);
624}
625
626
627void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700628 CheckCondition(cond);
629
Andreas Gampec8ccf682014-09-29 20:07:43 -0700630 uint32_t op1 = 1U /* 0b001 */;
631 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700632 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
633 op1 << 20 |
634 op2 << 4 |
635 0xf << 12 |
636 static_cast<uint32_t>(rd) << 8 |
637 static_cast<uint32_t>(rn) << 16 |
638 static_cast<uint32_t>(rm);
639
640 Emit32(encoding);
641}
642
643
Roland Levillain51d3fc42014-11-13 14:11:42 +0000644void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
645 CheckCondition(cond);
646 CHECK_LE(lsb, 31U);
647 CHECK(1U <= width && width <= 32U) << width;
648 uint32_t widthminus1 = width - 1;
649 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
650 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
651
652 uint32_t op = 20U /* 0b10100 */;
653 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
654 op << 20 |
655 static_cast<uint32_t>(rn) << 16 |
656 imm3 << 12 |
657 static_cast<uint32_t>(rd) << 8 |
658 imm2 << 6 |
659 widthminus1;
660
661 Emit32(encoding);
662}
663
664
Roland Levillain981e4542014-11-14 11:47:14 +0000665void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
666 CheckCondition(cond);
667 CHECK_LE(lsb, 31U);
668 CHECK(1U <= width && width <= 32U) << width;
669 uint32_t widthminus1 = width - 1;
670 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
671 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
672
673 uint32_t op = 28U /* 0b11100 */;
674 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
675 op << 20 |
676 static_cast<uint32_t>(rn) << 16 |
677 imm3 << 12 |
678 static_cast<uint32_t>(rd) << 8 |
679 imm2 << 6 |
680 widthminus1;
681
682 Emit32(encoding);
683}
684
685
Dave Allison65fcc2c2014-04-28 13:45:27 -0700686void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
687 EmitLoadStore(cond, true, false, false, false, rd, ad);
688}
689
690
691void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
692 EmitLoadStore(cond, false, false, false, false, rd, ad);
693}
694
695
696void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
697 EmitLoadStore(cond, true, true, false, false, rd, ad);
698}
699
700
701void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
702 EmitLoadStore(cond, false, true, false, false, rd, ad);
703}
704
705
706void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
707 EmitLoadStore(cond, true, false, true, false, rd, ad);
708}
709
710
711void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
712 EmitLoadStore(cond, false, false, true, false, rd, ad);
713}
714
715
716void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
717 EmitLoadStore(cond, true, true, false, true, rd, ad);
718}
719
720
721void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
722 EmitLoadStore(cond, true, false, true, true, rd, ad);
723}
724
725
726void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100727 ldrd(rd, Register(rd + 1), ad, cond);
728}
729
730
731void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700732 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100733 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700734 // This is different from other loads. The encoding is like ARM.
735 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
736 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100737 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700738 ad.encodingThumbLdrdStrd();
739 Emit32(encoding);
740}
741
742
743void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100744 strd(rd, Register(rd + 1), ad, cond);
745}
746
747
748void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700749 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100750 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700751 // This is different from other loads. The encoding is like ARM.
752 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
753 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100754 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700755 ad.encodingThumbLdrdStrd();
756 Emit32(encoding);
757}
758
759
760void Thumb2Assembler::ldm(BlockAddressMode am,
761 Register base,
762 RegList regs,
763 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000764 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
765 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700766 // Thumb doesn't support one reg in the list.
767 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000768 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700769 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700770 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700771 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
772 } else {
773 EmitMultiMemOp(cond, am, true, base, regs);
774 }
775}
776
777
778void Thumb2Assembler::stm(BlockAddressMode am,
779 Register base,
780 RegList regs,
781 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000782 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
783 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700784 // Thumb doesn't support one reg in the list.
785 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000786 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700787 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700788 CHECK(am == IA || am == IA_W);
789 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700790 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
791 } else {
792 EmitMultiMemOp(cond, am, false, base, regs);
793 }
794}
795
796
797bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
798 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
799 if (((imm32 & ((1 << 19) - 1)) == 0) &&
800 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
801 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
802 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
803 ((imm32 >> 19) & ((1 << 6) -1));
804 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
805 sd, S0, S0);
806 return true;
807 }
808 return false;
809}
810
811
812bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
813 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
814 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
815 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
816 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
817 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
818 ((imm64 >> 48) & ((1 << 6) -1));
819 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
820 dd, D0, D0);
821 return true;
822 }
823 return false;
824}
825
826
827void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
828 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
829}
830
831
832void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
833 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
834}
835
836
837void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
838 Condition cond) {
839 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
840}
841
842
843void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
844 Condition cond) {
845 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
846}
847
848
849void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
850 Condition cond) {
851 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
852}
853
854
855void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
856 Condition cond) {
857 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
858}
859
860
861void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
862 Condition cond) {
863 EmitVFPsss(cond, B21, sd, sn, sm);
864}
865
866
867void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
868 Condition cond) {
869 EmitVFPddd(cond, B21, dd, dn, dm);
870}
871
872
873void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
874 Condition cond) {
875 EmitVFPsss(cond, 0, sd, sn, sm);
876}
877
878
879void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
880 Condition cond) {
881 EmitVFPddd(cond, 0, dd, dn, dm);
882}
883
884
885void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
886 Condition cond) {
887 EmitVFPsss(cond, B6, sd, sn, sm);
888}
889
890
891void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
892 Condition cond) {
893 EmitVFPddd(cond, B6, dd, dn, dm);
894}
895
896
897void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
898 Condition cond) {
899 EmitVFPsss(cond, B23, sd, sn, sm);
900}
901
902
903void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
904 Condition cond) {
905 EmitVFPddd(cond, B23, dd, dn, dm);
906}
907
908
909void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
910 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
911}
912
913
914void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
915 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
916}
917
918
919void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
920 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
921}
922
923
924void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
925 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
926}
927
928
929void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
930 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
931}
932
933void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
934 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
935}
936
937
938void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
939 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
940}
941
942
943void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
944 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
945}
946
947
948void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
949 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
950}
951
952
953void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
954 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
955}
956
957
958void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
959 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
960}
961
962
963void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
964 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
965}
966
967
968void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
969 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
970}
971
972
973void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
974 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
975}
976
977
978void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
979 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
980}
981
982
983void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
984 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
985}
986
987
988void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
989 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
990}
991
992
993void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
994 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
995}
996
997
998void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
999 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1000}
1001
1002
1003void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1004 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1005}
1006
1007void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001008 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001009 EmitBranch(cond, label, false, false);
1010}
1011
1012
1013void Thumb2Assembler::bl(Label* label, Condition cond) {
1014 CheckCondition(cond);
1015 EmitBranch(cond, label, true, false);
1016}
1017
1018
1019void Thumb2Assembler::blx(Label* label) {
1020 EmitBranch(AL, label, true, true);
1021}
1022
1023
1024void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001025 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001026 Label l;
1027 b(&l);
1028 EmitBranch(AL, label, false, false);
1029 Bind(&l);
1030}
1031
1032
1033void Thumb2Assembler::Emit32(int32_t value) {
1034 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1035 buffer_.Emit<int16_t>(value >> 16);
1036 buffer_.Emit<int16_t>(value & 0xffff);
1037}
1038
1039
1040void Thumb2Assembler::Emit16(int16_t value) {
1041 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1042 buffer_.Emit<int16_t>(value);
1043}
1044
1045
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001046bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001047 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001048 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001049 Register rn,
1050 Register rd,
1051 const ShifterOperand& so) {
1052 if (force_32bit_) {
1053 return true;
1054 }
1055
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001056 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001057 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001058 // If the immediate is in range, use 16 bit.
1059 if (rd == SP) {
1060 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1061 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001062 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001063 } else if (!IsHighRegister(rd) && opcode == ADD) {
1064 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1065 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001066 }
1067 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001068 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001069
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001070 bool can_contain_high_register =
1071 (opcode == CMP) ||
1072 (opcode == MOV && set_cc != kCcSet) ||
1073 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001074
1075 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1076 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001077 return true;
1078 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001079
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001080 // There are high register instructions available for this opcode.
1081 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1082 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1083 return true;
1084 }
1085
1086 // The ADD and MOV instructions that work with high registers don't have 16-bit
1087 // immediate variants.
1088 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001089 return true;
1090 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001091 }
1092
1093 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1094 return true;
1095 }
1096
Dave Allison65fcc2c2014-04-28 13:45:27 -07001097 bool rn_is_valid = true;
1098
1099 // Check for single operand instructions and ADD/SUB.
1100 switch (opcode) {
1101 case CMP:
1102 case MOV:
1103 case TST:
1104 case MVN:
1105 rn_is_valid = false; // There is no Rn for these instructions.
1106 break;
1107 case TEQ:
1108 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001109 case ADD:
1110 case SUB:
1111 break;
1112 default:
1113 if (so.IsRegister() && rd != rn) {
1114 return true;
1115 }
1116 }
1117
1118 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001119 if (opcode == RSB) {
1120 DCHECK(rn_is_valid);
1121 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001122 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001123 }
1124 } else if (rn_is_valid && rn != rd) {
1125 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1126 // with a 3-bit immediate, and RSB with zero immediate.
1127 if (opcode == ADD || opcode == SUB) {
1128 if (!IsUint<3>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001129 return true;
1130 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001131 } else {
1132 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001133 }
1134 } else {
1135 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1136 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1137 return true;
1138 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001139 if (!IsUint<8>(so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001140 return true;
1141 }
1142 }
1143 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001144 } else {
1145 DCHECK(so.IsRegister());
1146 if (so.IsShift()) {
1147 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1148 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001149 return true;
1150 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001151 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1152 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1153 return true;
1154 }
1155 // 16-bit shifts set condition codes if and only if outside IT block,
1156 // i.e. if and only if cond == AL.
1157 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1158 return true;
1159 }
1160 } else {
1161 // Register operand without shift.
1162 switch (opcode) {
1163 case ADD:
1164 // The 16-bit ADD that cannot contain high registers can set condition codes
1165 // if and only if outside IT block, i.e. if and only if cond == AL.
1166 if (!can_contain_high_register &&
1167 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1168 return true;
1169 }
1170 break;
1171 case AND:
1172 case BIC:
1173 case EOR:
1174 case ORR:
1175 case MVN:
1176 case ADC:
1177 case SUB:
1178 case SBC:
1179 // These 16-bit opcodes set condition codes if and only if outside IT block,
1180 // i.e. if and only if cond == AL.
1181 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1182 return true;
1183 }
1184 break;
1185 case RSB:
1186 case RSC:
1187 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1188 return true;
1189 case CMP:
1190 default:
1191 break;
1192 }
Zheng Xuc6667102015-05-15 16:08:45 +08001193 }
1194 }
1195
Dave Allison65fcc2c2014-04-28 13:45:27 -07001196 // The instruction can be encoded in 16 bits.
1197 return false;
1198}
1199
1200
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001201void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001202 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001203 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001204 Register rn,
1205 Register rd,
1206 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001207 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001208 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001209 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1210 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1211 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1212 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1213 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001214 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001215 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001217 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1218 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1219 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1220 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001221 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1222 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1223 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1224 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001225 default:
1226 break;
1227 }
1228
Andreas Gampec8ccf682014-09-29 20:07:43 -07001229 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001230 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001231 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001232 }
1233
1234 int32_t encoding = 0;
1235 if (so.IsImmediate()) {
1236 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001237 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001238 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001239 if (opcode == SUB) {
1240 thumb_opcode = 5U;
1241 } else if (opcode == ADD) {
1242 thumb_opcode = 0U;
1243 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001244 }
1245 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001246
1247 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001248 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001249 uint32_t imm8 = imm & 0xff;
1250
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001251 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001252 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001253 thumb_opcode << 21 |
1254 rn << 16 |
1255 rd << 8 |
1256 i << 26 |
1257 imm3 << 12 |
1258 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001259 } else {
1260 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001261 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001262 if (imm == kInvalidModifiedImmediate) {
1263 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001264 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001265 }
1266 encoding = B31 | B30 | B29 | B28 |
1267 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001268 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001269 rn << 16 |
1270 rd << 8 |
1271 imm;
1272 }
1273 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001274 // Register (possibly shifted)
1275 encoding = B31 | B30 | B29 | B27 | B25 |
1276 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001277 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001278 rn << 16 |
1279 rd << 8 |
1280 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001281 }
1282 Emit32(encoding);
1283}
1284
1285
1286void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1287 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001288 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001289 Register rn,
1290 Register rd,
1291 const ShifterOperand& so) {
1292 if (opcode == ADD || opcode == SUB) {
1293 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1294 return;
1295 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001296 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001297 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001298 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001299 uint8_t opcode_shift = 6;
1300 uint8_t rd_shift = 0;
1301 uint8_t rn_shift = 3;
1302 uint8_t immediate_shift = 0;
1303 bool use_immediate = false;
1304 uint8_t immediate = 0;
1305
1306 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1307 // Convert shifted mov operand2 into 16 bit opcodes.
1308 dp_opcode = 0;
1309 opcode_shift = 11;
1310
1311 use_immediate = true;
1312 immediate = so.GetImmediate();
1313 immediate_shift = 6;
1314
1315 rn = so.GetRegister();
1316
1317 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001318 case LSL:
1319 DCHECK_LE(immediate, 31u);
1320 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001321 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001322 case LSR:
1323 DCHECK(1 <= immediate && immediate <= 32);
1324 immediate &= 31; // 32 is encoded as 0.
1325 thumb_opcode = 1U /* 0b01 */;
1326 break;
1327 case ASR:
1328 DCHECK(1 <= immediate && immediate <= 32);
1329 immediate &= 31; // 32 is encoded as 0.
1330 thumb_opcode = 2U /* 0b10 */;
1331 break;
1332 case ROR: // No 16-bit ROR immediate.
1333 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001334 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001335 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1336 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001337 }
1338 } else {
1339 if (so.IsImmediate()) {
1340 use_immediate = true;
1341 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001342 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001343 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1344 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001345 // Adjust rn and rd: only two registers will be emitted.
1346 switch (opcode) {
1347 case AND:
1348 case ORR:
1349 case EOR:
1350 case RSB:
1351 case ADC:
1352 case SBC:
1353 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001354 // Sets condition codes if and only if outside IT block,
1355 // check that it complies with set_cc.
1356 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001357 if (rn == rd) {
1358 rn = so.GetRegister();
1359 } else {
1360 CHECK_EQ(rd, so.GetRegister());
1361 }
1362 break;
1363 }
1364 case CMP:
1365 case CMN: {
1366 CHECK_EQ(rd, 0);
1367 rd = rn;
1368 rn = so.GetRegister();
1369 break;
1370 }
1371 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001372 // Sets condition codes if and only if outside IT block,
1373 // check that it complies with set_cc.
1374 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1375 CHECK_EQ(rn, 0);
1376 rn = so.GetRegister();
1377 break;
1378 }
1379 case TST:
1380 case TEQ: {
1381 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001382 CHECK_EQ(rn, 0);
1383 rn = so.GetRegister();
1384 break;
1385 }
1386 default:
1387 break;
1388 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001389 }
1390
1391 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001392 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001393 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001394 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001395 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001396 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1397 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001398 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1399 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1400 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1401 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001402 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001403 if (use_immediate) {
1404 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001405 dp_opcode = 0;
1406 opcode_shift = 11;
1407 thumb_opcode = 5U /* 0b101 */;
1408 rd_shift = 8;
1409 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001410 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1411 // Special cmp for high registers.
1412 dp_opcode = 1U /* 0b01 */;
1413 opcode_shift = 7;
1414 // Put the top bit of rd into the bottom bit of the opcode.
1415 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1416 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001417 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001418 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001419 }
1420
1421 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001422 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001423 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001424 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001425 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001426 break;
1427 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001428 case MOV:
1429 dp_opcode = 0;
1430 if (use_immediate) {
1431 // T2 encoding.
1432 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001433 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001434 rd_shift = 8;
1435 rn_shift = 8;
1436 } else {
1437 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001438 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001439 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001440 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001441 opcode_shift = 7;
1442 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001443 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1444 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001445 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001446 DCHECK(!IsHighRegister(rn));
1447 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001448 thumb_opcode = 0;
1449 }
1450 }
1451 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001452
1453 case TEQ:
1454 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001455 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001456 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001457 break;
1458 }
1459 }
1460
Andreas Gampec8ccf682014-09-29 20:07:43 -07001461 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001462 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001463 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001464 }
1465
1466 int16_t encoding = dp_opcode << 14 |
1467 (thumb_opcode << opcode_shift) |
1468 rd << rd_shift |
1469 rn << rn_shift |
1470 (use_immediate ? (immediate << immediate_shift) : 0);
1471
1472 Emit16(encoding);
1473}
1474
1475
1476// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001477void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001478 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001479 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001480 Register rn,
1481 Register rd,
1482 const ShifterOperand& so) {
1483 uint8_t dp_opcode = 0;
1484 uint8_t opcode_shift = 6;
1485 uint8_t rd_shift = 0;
1486 uint8_t rn_shift = 3;
1487 uint8_t immediate_shift = 0;
1488 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001489 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001490 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001491
1492 if (so.IsImmediate()) {
1493 use_immediate = true;
1494 immediate = so.GetImmediate();
1495 }
1496
1497 switch (opcode) {
1498 case ADD:
1499 if (so.IsRegister()) {
1500 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001501 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001502 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001503 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001504 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001505 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001506 // Make Rn also contain the top bit of rd.
1507 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001508 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1509 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001510 } else {
1511 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001512 DCHECK(!IsHighRegister(rd));
1513 DCHECK(!IsHighRegister(rn));
1514 DCHECK(!IsHighRegister(rm));
1515 // Sets condition codes if and only if outside IT block,
1516 // check that it complies with set_cc.
1517 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001518 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001519 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001520 immediate = static_cast<uint32_t>(so.GetRegister());
1521 use_immediate = true;
1522 immediate_shift = 6;
1523 }
1524 } else {
1525 // Immediate.
1526 if (rd == SP && rn == SP) {
1527 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001528 dp_opcode = 2U /* 0b10 */;
1529 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001530 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001531 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001532 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001533
1534 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1535 rn = R0;
1536 rd = R0;
1537 rd_shift = 0;
1538 rn_shift = 0;
1539 immediate >>= 2;
1540 } else if (rd != SP && rn == SP) {
1541 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001542 dp_opcode = 2U /* 0b10 */;
1543 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001544 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001545 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001546 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001547
1548 // Remove rn from instruction.
1549 rn = R0;
1550 rn_shift = 0;
1551 rd_shift = 8;
1552 immediate >>= 2;
1553 } else if (rn != rd) {
1554 // Must use T1.
1555 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001556 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001557 immediate_shift = 6;
1558 } else {
1559 // T2 encoding.
1560 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001561 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001562 rd_shift = 8;
1563 rn_shift = 8;
1564 }
1565 }
1566 break;
1567
1568 case SUB:
1569 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001570 // T1.
1571 Register rm = so.GetRegister();
1572 DCHECK(!IsHighRegister(rd));
1573 DCHECK(!IsHighRegister(rn));
1574 DCHECK(!IsHighRegister(rm));
1575 // Sets condition codes if and only if outside IT block,
1576 // check that it complies with set_cc.
1577 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1578 opcode_shift = 9;
1579 thumb_opcode = 13U /* 0b01101 */;
1580 immediate = static_cast<uint32_t>(rm);
1581 use_immediate = true;
1582 immediate_shift = 6;
1583 } else {
1584 if (rd == SP && rn == SP) {
1585 // SUB sp, sp, #imm
1586 dp_opcode = 2U /* 0b10 */;
1587 thumb_opcode = 0x61 /* 0b1100001 */;
1588 opcode_shift = 7;
1589 CHECK_LT(immediate, (1u << 9));
1590 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001591
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001592 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1593 rn = R0;
1594 rd = R0;
1595 rd_shift = 0;
1596 rn_shift = 0;
1597 immediate >>= 2;
1598 } else if (rn != rd) {
1599 // Must use T1.
1600 opcode_shift = 9;
1601 thumb_opcode = 15U /* 0b01111 */;
1602 immediate_shift = 6;
1603 } else {
1604 // T2 encoding.
1605 opcode_shift = 11;
1606 thumb_opcode = 7U /* 0b111 */;
1607 rd_shift = 8;
1608 rn_shift = 8;
1609 }
1610 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001611 break;
1612 default:
1613 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001614 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001615 }
1616
1617 int16_t encoding = dp_opcode << 14 |
1618 (thumb_opcode << opcode_shift) |
1619 rd << rd_shift |
1620 rn << rn_shift |
1621 (use_immediate ? (immediate << immediate_shift) : 0);
1622
1623 Emit16(encoding);
1624}
1625
1626
1627void Thumb2Assembler::EmitDataProcessing(Condition cond,
1628 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001629 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001630 Register rn,
1631 Register rd,
1632 const ShifterOperand& so) {
1633 CHECK_NE(rd, kNoRegister);
1634 CheckCondition(cond);
1635
1636 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1637 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1638 } else {
1639 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1640 }
1641}
1642
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001643void Thumb2Assembler::EmitShift(Register rd,
1644 Register rm,
1645 Shift shift,
1646 uint8_t amount,
1647 Condition cond,
1648 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001649 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001650 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1651 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001652 uint16_t opcode = 0;
1653 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001654 case LSL: opcode = 0U /* 0b00 */; break;
1655 case LSR: opcode = 1U /* 0b01 */; break;
1656 case ASR: opcode = 2U /* 0b10 */; break;
1657 case ROR: opcode = 3U /* 0b11 */; break;
1658 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001659 default:
1660 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001661 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001662 }
1663 // 32 bit.
1664 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001665 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001666 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001667 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001668 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1669 static_cast<int16_t>(rd) << 8 | opcode << 4;
1670 Emit32(encoding);
1671 } else {
1672 // 16 bit shift
1673 uint16_t opcode = 0;
1674 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001675 case LSL: opcode = 0U /* 0b00 */; break;
1676 case LSR: opcode = 1U /* 0b01 */; break;
1677 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001678 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001679 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1680 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001681 }
1682 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1683 static_cast<int16_t>(rd);
1684 Emit16(encoding);
1685 }
1686}
1687
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001688void Thumb2Assembler::EmitShift(Register rd,
1689 Register rn,
1690 Shift shift,
1691 Register rm,
1692 Condition cond,
1693 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001694 CHECK_NE(shift, RRX);
1695 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001696 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1697 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001698 must_be_32bit = true;
1699 }
1700
1701 if (must_be_32bit) {
1702 uint16_t opcode = 0;
1703 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001704 case LSL: opcode = 0U /* 0b00 */; break;
1705 case LSR: opcode = 1U /* 0b01 */; break;
1706 case ASR: opcode = 2U /* 0b10 */; break;
1707 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001708 default:
1709 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001710 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001711 }
1712 // 32 bit.
1713 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001714 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001715 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1716 static_cast<int16_t>(rd) << 8 | opcode << 21;
1717 Emit32(encoding);
1718 } else {
1719 uint16_t opcode = 0;
1720 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001721 case LSL: opcode = 2U /* 0b0010 */; break;
1722 case LSR: opcode = 3U /* 0b0011 */; break;
1723 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001724 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001725 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001726 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1727 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001728 }
1729 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1730 static_cast<int16_t>(rd);
1731 Emit16(encoding);
1732 }
1733}
1734
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001735inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1736 switch (size) {
1737 case kBranch16Bit:
1738 return 2u;
1739 case kBranch32Bit:
1740 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001741
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001742 case kCbxz16Bit:
1743 return 2u;
1744 case kCbxz32Bit:
1745 return 4u;
1746 case kCbxz48Bit:
1747 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001748
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001749 case kLiteral1KiB:
1750 return 2u;
1751 case kLiteral4KiB:
1752 return 4u;
1753 case kLiteral64KiB:
1754 return 8u;
1755 case kLiteral1MiB:
1756 return 10u;
1757 case kLiteralFar:
1758 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001759
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001760 case kLongOrFPLiteral1KiB:
1761 return 4u;
1762 case kLongOrFPLiteral256KiB:
1763 return 10u;
1764 case kLongOrFPLiteralFar:
1765 return 14u;
1766 }
1767 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1768 UNREACHABLE();
1769}
1770
1771inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1772 return SizeInBytes(original_size_);
1773}
1774
1775inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1776 return SizeInBytes(size_);
1777}
1778
1779inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1780 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001781 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001782 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1783 return current_code_size & 2;
1784}
1785
1786inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1787 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1788 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1789 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1790 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1791 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1792 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1793 if (target_ > location_) {
1794 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1795 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001796 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001797 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1798 diff -= static_cast<int32_t>(adjustment_);
1799 }
1800 // The default PC adjustment for Thumb2 is 4 bytes.
1801 DCHECK_GE(diff, int32_min + 4);
1802 diff -= 4;
1803 // Add additional adjustment for instructions preceding the PC usage, padding
1804 // before the literal pool and rounding down the PC for literal loads.
1805 switch (GetSize()) {
1806 case kBranch16Bit:
1807 case kBranch32Bit:
1808 break;
1809
1810 case kCbxz16Bit:
1811 break;
1812 case kCbxz32Bit:
1813 case kCbxz48Bit:
1814 DCHECK_GE(diff, int32_min + 2);
1815 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1816 break;
1817
1818 case kLiteral1KiB:
1819 case kLiteral4KiB:
1820 case kLongOrFPLiteral1KiB:
1821 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1822 diff += LiteralPoolPaddingSize(current_code_size);
1823 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1824 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1825 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001826 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001827 diff = diff + (diff & 2);
1828 DCHECK_GE(diff, 0);
1829 break;
1830 case kLiteral1MiB:
1831 case kLiteral64KiB:
1832 case kLongOrFPLiteral256KiB:
1833 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1834 diff -= 4; // One extra 32-bit MOV.
1835 diff += LiteralPoolPaddingSize(current_code_size);
1836 break;
1837 case kLiteralFar:
1838 case kLongOrFPLiteralFar:
1839 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1840 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1841 diff += LiteralPoolPaddingSize(current_code_size);
1842 break;
1843 }
1844 return diff;
1845}
1846
1847inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1848 DCHECK_NE(target_, kUnresolved);
1849 Size old_size = size_;
1850 size_ = new_size;
1851 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1852 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1853 if (target_ > location_) {
1854 adjustment_ += adjustment;
1855 }
1856 return adjustment;
1857}
1858
1859uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1860 uint32_t old_code_size = current_code_size;
1861 switch (GetSize()) {
1862 case kBranch16Bit:
1863 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1864 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001865 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001866 current_code_size += IncreaseSize(kBranch32Bit);
1867 FALLTHROUGH_INTENDED;
1868 case kBranch32Bit:
1869 // We don't support conditional branches beyond +-1MiB
1870 // or unconditional branches beyond +-16MiB.
1871 break;
1872
1873 case kCbxz16Bit:
1874 if (IsUint<7>(GetOffset(current_code_size))) {
1875 break;
1876 }
1877 current_code_size += IncreaseSize(kCbxz32Bit);
1878 FALLTHROUGH_INTENDED;
1879 case kCbxz32Bit:
1880 if (IsInt<9>(GetOffset(current_code_size))) {
1881 break;
1882 }
1883 current_code_size += IncreaseSize(kCbxz48Bit);
1884 FALLTHROUGH_INTENDED;
1885 case kCbxz48Bit:
1886 // We don't support conditional branches beyond +-1MiB.
1887 break;
1888
1889 case kLiteral1KiB:
1890 DCHECK(!IsHighRegister(rn_));
1891 if (IsUint<10>(GetOffset(current_code_size))) {
1892 break;
1893 }
1894 current_code_size += IncreaseSize(kLiteral4KiB);
1895 FALLTHROUGH_INTENDED;
1896 case kLiteral4KiB:
1897 if (IsUint<12>(GetOffset(current_code_size))) {
1898 break;
1899 }
1900 current_code_size += IncreaseSize(kLiteral64KiB);
1901 FALLTHROUGH_INTENDED;
1902 case kLiteral64KiB:
1903 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
1904 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
1905 break;
1906 }
1907 current_code_size += IncreaseSize(kLiteral1MiB);
1908 FALLTHROUGH_INTENDED;
1909 case kLiteral1MiB:
1910 if (IsUint<20>(GetOffset(current_code_size))) {
1911 break;
1912 }
1913 current_code_size += IncreaseSize(kLiteralFar);
1914 FALLTHROUGH_INTENDED;
1915 case kLiteralFar:
1916 // This encoding can reach any target.
1917 break;
1918
1919 case kLongOrFPLiteral1KiB:
1920 if (IsUint<10>(GetOffset(current_code_size))) {
1921 break;
1922 }
1923 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
1924 FALLTHROUGH_INTENDED;
1925 case kLongOrFPLiteral256KiB:
1926 if (IsUint<18>(GetOffset(current_code_size))) {
1927 break;
1928 }
1929 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
1930 FALLTHROUGH_INTENDED;
1931 case kLongOrFPLiteralFar:
1932 // This encoding can reach any target.
1933 break;
1934 }
1935 return current_code_size - old_code_size;
1936}
1937
1938void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
1939 switch (GetSize()) {
1940 case kBranch16Bit: {
1941 DCHECK(type_ == kUnconditional || type_ == kConditional);
1942 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1943 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01001944 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001945 break;
1946 }
1947 case kBranch32Bit: {
1948 DCHECK(type_ == kConditional || type_ == kUnconditional ||
1949 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
1950 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1951 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
1952 if (type_ == kUnconditionalLink) {
1953 DCHECK_NE(encoding & B12, 0);
1954 encoding |= B14;
1955 } else if (type_ == kUnconditionalLinkX) {
1956 DCHECK_NE(encoding & B12, 0);
1957 encoding ^= B14 | B12;
1958 }
1959 buffer->Store<int16_t>(location_, encoding >> 16);
1960 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1961 break;
1962 }
1963
1964 case kCbxz16Bit: {
1965 DCHECK(type_ == kCompareAndBranchXZero);
1966 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
1967 buffer->Store<int16_t>(location_, encoding);
1968 break;
1969 }
1970 case kCbxz32Bit: {
1971 DCHECK(type_ == kCompareAndBranchXZero);
1972 DCHECK(cond_ == EQ || cond_ == NE);
1973 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1974 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
1975 buffer->Store<int16_t>(location_, cmp_encoding);
1976 buffer->Store<int16_t>(location_ + 2, b_encoding);
1977 break;
1978 }
1979 case kCbxz48Bit: {
1980 DCHECK(type_ == kCompareAndBranchXZero);
1981 DCHECK(cond_ == EQ || cond_ == NE);
1982 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1983 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
1984 buffer->Store<int16_t>(location_, cmp_encoding);
1985 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
1986 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
1987 break;
1988 }
1989
1990 case kLiteral1KiB: {
1991 DCHECK(type_ == kLoadLiteralNarrow);
1992 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
1993 buffer->Store<int16_t>(location_, encoding);
1994 break;
1995 }
1996 case kLiteral4KiB: {
1997 DCHECK(type_ == kLoadLiteralNarrow);
1998 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
1999 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2000 buffer->Store<int16_t>(location_, encoding >> 16);
2001 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2002 break;
2003 }
2004 case kLiteral64KiB: {
2005 DCHECK(type_ == kLoadLiteralNarrow);
2006 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2007 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2008 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2009 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2010 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2011 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2012 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2013 break;
2014 }
2015 case kLiteral1MiB: {
2016 DCHECK(type_ == kLoadLiteralNarrow);
2017 int32_t offset = GetOffset(code_size);
2018 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2019 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2020 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2021 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2022 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2023 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2024 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2025 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2026 break;
2027 }
2028 case kLiteralFar: {
2029 DCHECK(type_ == kLoadLiteralNarrow);
2030 int32_t offset = GetOffset(code_size);
2031 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2032 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2033 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2034 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2035 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2036 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2037 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2038 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2039 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2040 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2041 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2042 break;
2043 }
2044
2045 case kLongOrFPLiteral1KiB: {
2046 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2047 buffer->Store<int16_t>(location_, encoding >> 16);
2048 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2049 break;
2050 }
2051 case kLongOrFPLiteral256KiB: {
2052 int32_t offset = GetOffset(code_size);
2053 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
2054 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2055 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
2056 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2057 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2058 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2059 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2060 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2061 break;
2062 }
2063 case kLongOrFPLiteralFar: {
2064 int32_t offset = GetOffset(code_size);
2065 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2066 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2067 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2068 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2069 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2070 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2071 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2072 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2073 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2074 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2075 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2076 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002077 }
2078 }
2079}
2080
Dave Allison65fcc2c2014-04-28 13:45:27 -07002081uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002082 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002083 uint32_t location = buffer_.Size();
2084
2085 // This is always unresolved as it must be a forward branch.
2086 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002087 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002088}
2089
2090
2091// NOTE: this only support immediate offsets, not [rx,ry].
2092// TODO: support [rx,ry] instructions.
2093void Thumb2Assembler::EmitLoadStore(Condition cond,
2094 bool load,
2095 bool byte,
2096 bool half,
2097 bool is_signed,
2098 Register rd,
2099 const Address& ad) {
2100 CHECK_NE(rd, kNoRegister);
2101 CheckCondition(cond);
2102 bool must_be_32bit = force_32bit_;
2103 if (IsHighRegister(rd)) {
2104 must_be_32bit = true;
2105 }
2106
2107 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07002108 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002109 must_be_32bit = true;
2110 }
2111
2112 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2113 must_be_32bit = true;
2114 }
2115
Dave Allison45fdb932014-06-25 12:37:10 -07002116 if (ad.IsImmediate()) {
2117 // Immediate offset
2118 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002119
Dave Allison45fdb932014-06-25 12:37:10 -07002120 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002121 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002122 must_be_32bit = true;
2123 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002124
2125 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002126 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002127 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002128 must_be_32bit = true;
2129 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002130 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002131 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002132 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002133 must_be_32bit = true;
2134 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002135 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002136 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002137 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002138 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002139 }
2140 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002141
Dave Allison45fdb932014-06-25 12:37:10 -07002142 if (must_be_32bit) {
2143 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2144 (load ? B20 : 0) |
2145 (is_signed ? B24 : 0) |
2146 static_cast<uint32_t>(rd) << 12 |
2147 ad.encodingThumb(true) |
2148 (byte ? 0 : half ? B21 : B22);
2149 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002150 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002151 // 16 bit thumb1.
2152 uint8_t opA = 0;
2153 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002154
2155 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002156 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002157 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002158 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002159 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002160 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002161 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002162 sp_relative = true;
2163 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002164 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002165 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002166 }
Dave Allison45fdb932014-06-25 12:37:10 -07002167 int16_t encoding = opA << 12 |
2168 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002169
Dave Allison45fdb932014-06-25 12:37:10 -07002170 CHECK_GE(offset, 0);
2171 if (sp_relative) {
2172 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002173 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002174 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002175 encoding |= rd << 8 | offset >> 2;
2176 } else {
2177 // No SP relative. The offset is shifted right depending on
2178 // the size of the load/store.
2179 encoding |= static_cast<uint32_t>(rd);
2180
2181 if (byte) {
2182 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002183 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002184 } else if (half) {
2185 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002186 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002187 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002188 offset >>= 1;
2189 } else {
2190 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002191 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002192 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002193 offset >>= 2;
2194 }
2195 encoding |= rn << 3 | offset << 6;
2196 }
2197
2198 Emit16(encoding);
2199 }
2200 } else {
2201 // Register shift.
2202 if (ad.GetRegister() == PC) {
2203 // PC relative literal encoding.
2204 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002205 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002206 int32_t up = B23;
2207 if (offset < 0) {
2208 offset = -offset;
2209 up = 0;
2210 }
2211 CHECK_LT(offset, (1 << 12));
2212 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2213 offset | up |
2214 static_cast<uint32_t>(rd) << 12;
2215 Emit32(encoding);
2216 } else {
2217 // 16 bit literal load.
2218 CHECK_GE(offset, 0);
2219 CHECK_LT(offset, (1 << 10));
2220 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2221 Emit16(encoding);
2222 }
2223 } else {
2224 if (ad.GetShiftCount() != 0) {
2225 // If there is a shift count this must be 32 bit.
2226 must_be_32bit = true;
2227 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2228 must_be_32bit = true;
2229 }
2230
2231 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002232 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002233 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002234 if (half) {
2235 encoding |= B21;
2236 } else if (!byte) {
2237 encoding |= B22;
2238 }
Dave Allison45fdb932014-06-25 12:37:10 -07002239 Emit32(encoding);
2240 } else {
2241 // 16 bit register offset.
2242 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2243 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002244 if (byte) {
2245 encoding |= B10;
2246 } else if (half) {
2247 encoding |= B9;
2248 }
Dave Allison45fdb932014-06-25 12:37:10 -07002249 Emit16(encoding);
2250 }
2251 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002252 }
2253}
2254
2255
2256void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002257 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002258 bool load,
2259 Register base,
2260 RegList regs) {
2261 CHECK_NE(base, kNoRegister);
2262 CheckCondition(cond);
2263 bool must_be_32bit = force_32bit_;
2264
Vladimir Markoe8469c12014-11-26 18:09:30 +00002265 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2266 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2267 // Use 16-bit PUSH/POP.
2268 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2269 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2270 Emit16(encoding);
2271 return;
2272 }
2273
Dave Allison65fcc2c2014-04-28 13:45:27 -07002274 if ((regs & 0xff00) != 0) {
2275 must_be_32bit = true;
2276 }
2277
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002278 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002279 // 16 bit always uses writeback.
2280 if (!w_bit) {
2281 must_be_32bit = true;
2282 }
2283
2284 if (must_be_32bit) {
2285 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002286 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002287 case IA:
2288 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002289 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002290 break;
2291 case DB:
2292 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002293 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002294 break;
2295 case DA:
2296 case IB:
2297 case DA_W:
2298 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002299 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002300 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002301 }
2302 if (load) {
2303 // Cannot have SP in the list.
2304 CHECK_EQ((regs & (1 << SP)), 0);
2305 } else {
2306 // Cannot have PC or SP in the list.
2307 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2308 }
2309 int32_t encoding = B31 | B30 | B29 | B27 |
2310 (op << 23) |
2311 (load ? B20 : 0) |
2312 base << 16 |
2313 regs |
2314 (w_bit << 21);
2315 Emit32(encoding);
2316 } else {
2317 int16_t encoding = B15 | B14 |
2318 (load ? B11 : 0) |
2319 base << 8 |
2320 regs;
2321 Emit16(encoding);
2322 }
2323}
2324
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002325void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2326 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002327 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002328 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002329 if (cond == AL) {
2330 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002331 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002332 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002333 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002334 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002335 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002336 }
2337 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002338 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002339 }
2340 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002341 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002342 }
2343
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002344 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2345 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2346
Dave Allison65fcc2c2014-04-28 13:45:27 -07002347 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002348 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002349 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002350 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002351 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002352 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2353 // the code with the label serving as the head.
2354 Emit16(static_cast<uint16_t>(label->position_));
2355 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002356 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002357
2358 if (use32bit) {
2359 Emit16(0);
2360 }
2361 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002362}
2363
2364
2365void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2366 CHECK_NE(rd, kNoRegister);
2367 CHECK_NE(rm, kNoRegister);
2368 CheckCondition(cond);
2369 CHECK_NE(rd, PC);
2370 CHECK_NE(rm, PC);
2371 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2372 B25 | B23 | B21 | B20 |
2373 static_cast<uint32_t>(rm) << 16 |
2374 0xf << 12 |
2375 static_cast<uint32_t>(rd) << 8 |
2376 B7 |
2377 static_cast<uint32_t>(rm);
2378 Emit32(encoding);
2379}
2380
2381
2382void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2383 CheckCondition(cond);
2384 bool must_be_32bit = force_32bit_;
2385 if (IsHighRegister(rd)|| imm16 >= 256u) {
2386 must_be_32bit = true;
2387 }
2388
2389 if (must_be_32bit) {
2390 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002391 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2392 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2393 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002394 uint32_t imm8 = imm16 & 0xff;
2395 int32_t encoding = B31 | B30 | B29 | B28 |
2396 B25 | B22 |
2397 static_cast<uint32_t>(rd) << 8 |
2398 i << 26 |
2399 imm4 << 16 |
2400 imm3 << 12 |
2401 imm8;
2402 Emit32(encoding);
2403 } else {
2404 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2405 imm16;
2406 Emit16(encoding);
2407 }
2408}
2409
2410
2411void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2412 CheckCondition(cond);
2413 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002414 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2415 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2416 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002417 uint32_t imm8 = imm16 & 0xff;
2418 int32_t encoding = B31 | B30 | B29 | B28 |
2419 B25 | B23 | B22 |
2420 static_cast<uint32_t>(rd) << 8 |
2421 i << 26 |
2422 imm4 << 16 |
2423 imm3 << 12 |
2424 imm8;
2425 Emit32(encoding);
2426}
2427
2428
2429void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2430 CHECK_NE(rn, kNoRegister);
2431 CHECK_NE(rt, kNoRegister);
2432 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002433 CHECK_LT(imm, (1u << 10));
2434
2435 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2436 static_cast<uint32_t>(rn) << 16 |
2437 static_cast<uint32_t>(rt) << 12 |
2438 0xf << 8 |
2439 imm >> 2;
2440 Emit32(encoding);
2441}
2442
2443
2444void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2445 ldrex(rt, rn, 0, cond);
2446}
2447
2448
2449void Thumb2Assembler::strex(Register rd,
2450 Register rt,
2451 Register rn,
2452 uint16_t imm,
2453 Condition cond) {
2454 CHECK_NE(rn, kNoRegister);
2455 CHECK_NE(rd, kNoRegister);
2456 CHECK_NE(rt, kNoRegister);
2457 CheckCondition(cond);
2458 CHECK_LT(imm, (1u << 10));
2459
2460 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2461 static_cast<uint32_t>(rn) << 16 |
2462 static_cast<uint32_t>(rt) << 12 |
2463 static_cast<uint32_t>(rd) << 8 |
2464 imm >> 2;
2465 Emit32(encoding);
2466}
2467
2468
Calin Juravle52c48962014-12-16 17:02:57 +00002469void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2470 CHECK_NE(rn, kNoRegister);
2471 CHECK_NE(rt, kNoRegister);
2472 CHECK_NE(rt2, kNoRegister);
2473 CHECK_NE(rt, rt2);
2474 CheckCondition(cond);
2475
2476 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2477 static_cast<uint32_t>(rn) << 16 |
2478 static_cast<uint32_t>(rt) << 12 |
2479 static_cast<uint32_t>(rt2) << 8 |
2480 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2481 Emit32(encoding);
2482}
2483
2484
Dave Allison65fcc2c2014-04-28 13:45:27 -07002485void Thumb2Assembler::strex(Register rd,
2486 Register rt,
2487 Register rn,
2488 Condition cond) {
2489 strex(rd, rt, rn, 0, cond);
2490}
2491
2492
Calin Juravle52c48962014-12-16 17:02:57 +00002493void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2494 CHECK_NE(rd, kNoRegister);
2495 CHECK_NE(rn, kNoRegister);
2496 CHECK_NE(rt, kNoRegister);
2497 CHECK_NE(rt2, kNoRegister);
2498 CHECK_NE(rt, rt2);
2499 CHECK_NE(rd, rt);
2500 CHECK_NE(rd, rt2);
2501 CheckCondition(cond);
2502
2503 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2504 static_cast<uint32_t>(rn) << 16 |
2505 static_cast<uint32_t>(rt) << 12 |
2506 static_cast<uint32_t>(rt2) << 8 |
2507 B6 | B5 | B4 |
2508 static_cast<uint32_t>(rd);
2509 Emit32(encoding);
2510}
2511
2512
Dave Allison65fcc2c2014-04-28 13:45:27 -07002513void Thumb2Assembler::clrex(Condition cond) {
2514 CheckCondition(cond);
2515 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2516 B21 | B20 |
2517 0xf << 16 |
2518 B15 |
2519 0xf << 8 |
2520 B5 |
2521 0xf;
2522 Emit32(encoding);
2523}
2524
2525
2526void Thumb2Assembler::nop(Condition cond) {
2527 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002528 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002529 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002530 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002531}
2532
2533
2534void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2535 CHECK_NE(sn, kNoSRegister);
2536 CHECK_NE(rt, kNoRegister);
2537 CHECK_NE(rt, SP);
2538 CHECK_NE(rt, PC);
2539 CheckCondition(cond);
2540 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2541 B27 | B26 | B25 |
2542 ((static_cast<int32_t>(sn) >> 1)*B16) |
2543 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2544 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2545 Emit32(encoding);
2546}
2547
2548
2549void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2550 CHECK_NE(sn, kNoSRegister);
2551 CHECK_NE(rt, kNoRegister);
2552 CHECK_NE(rt, SP);
2553 CHECK_NE(rt, PC);
2554 CheckCondition(cond);
2555 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2556 B27 | B26 | B25 | B20 |
2557 ((static_cast<int32_t>(sn) >> 1)*B16) |
2558 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2559 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2560 Emit32(encoding);
2561}
2562
2563
2564void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2565 Condition cond) {
2566 CHECK_NE(sm, kNoSRegister);
2567 CHECK_NE(sm, S31);
2568 CHECK_NE(rt, kNoRegister);
2569 CHECK_NE(rt, SP);
2570 CHECK_NE(rt, PC);
2571 CHECK_NE(rt2, kNoRegister);
2572 CHECK_NE(rt2, SP);
2573 CHECK_NE(rt2, PC);
2574 CheckCondition(cond);
2575 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2576 B27 | B26 | B22 |
2577 (static_cast<int32_t>(rt2)*B16) |
2578 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2579 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2580 (static_cast<int32_t>(sm) >> 1);
2581 Emit32(encoding);
2582}
2583
2584
2585void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2586 Condition cond) {
2587 CHECK_NE(sm, kNoSRegister);
2588 CHECK_NE(sm, S31);
2589 CHECK_NE(rt, kNoRegister);
2590 CHECK_NE(rt, SP);
2591 CHECK_NE(rt, PC);
2592 CHECK_NE(rt2, kNoRegister);
2593 CHECK_NE(rt2, SP);
2594 CHECK_NE(rt2, PC);
2595 CHECK_NE(rt, rt2);
2596 CheckCondition(cond);
2597 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2598 B27 | B26 | B22 | B20 |
2599 (static_cast<int32_t>(rt2)*B16) |
2600 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2601 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2602 (static_cast<int32_t>(sm) >> 1);
2603 Emit32(encoding);
2604}
2605
2606
2607void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2608 Condition cond) {
2609 CHECK_NE(dm, kNoDRegister);
2610 CHECK_NE(rt, kNoRegister);
2611 CHECK_NE(rt, SP);
2612 CHECK_NE(rt, PC);
2613 CHECK_NE(rt2, kNoRegister);
2614 CHECK_NE(rt2, SP);
2615 CHECK_NE(rt2, PC);
2616 CheckCondition(cond);
2617 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2618 B27 | B26 | B22 |
2619 (static_cast<int32_t>(rt2)*B16) |
2620 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2621 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2622 (static_cast<int32_t>(dm) & 0xf);
2623 Emit32(encoding);
2624}
2625
2626
2627void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2628 Condition cond) {
2629 CHECK_NE(dm, kNoDRegister);
2630 CHECK_NE(rt, kNoRegister);
2631 CHECK_NE(rt, SP);
2632 CHECK_NE(rt, PC);
2633 CHECK_NE(rt2, kNoRegister);
2634 CHECK_NE(rt2, SP);
2635 CHECK_NE(rt2, PC);
2636 CHECK_NE(rt, rt2);
2637 CheckCondition(cond);
2638 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2639 B27 | B26 | B22 | B20 |
2640 (static_cast<int32_t>(rt2)*B16) |
2641 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2642 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2643 (static_cast<int32_t>(dm) & 0xf);
2644 Emit32(encoding);
2645}
2646
2647
2648void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2649 const Address& addr = static_cast<const Address&>(ad);
2650 CHECK_NE(sd, kNoSRegister);
2651 CheckCondition(cond);
2652 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2653 B27 | B26 | B24 | B20 |
2654 ((static_cast<int32_t>(sd) & 1)*B22) |
2655 ((static_cast<int32_t>(sd) >> 1)*B12) |
2656 B11 | B9 | addr.vencoding();
2657 Emit32(encoding);
2658}
2659
2660
2661void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2662 const Address& addr = static_cast<const Address&>(ad);
2663 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2664 CHECK_NE(sd, kNoSRegister);
2665 CheckCondition(cond);
2666 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2667 B27 | B26 | B24 |
2668 ((static_cast<int32_t>(sd) & 1)*B22) |
2669 ((static_cast<int32_t>(sd) >> 1)*B12) |
2670 B11 | B9 | addr.vencoding();
2671 Emit32(encoding);
2672}
2673
2674
2675void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2676 const Address& addr = static_cast<const Address&>(ad);
2677 CHECK_NE(dd, kNoDRegister);
2678 CheckCondition(cond);
2679 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2680 B27 | B26 | B24 | B20 |
2681 ((static_cast<int32_t>(dd) >> 4)*B22) |
2682 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2683 B11 | B9 | B8 | addr.vencoding();
2684 Emit32(encoding);
2685}
2686
2687
2688void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2689 const Address& addr = static_cast<const Address&>(ad);
2690 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2691 CHECK_NE(dd, kNoDRegister);
2692 CheckCondition(cond);
2693 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2694 B27 | B26 | B24 |
2695 ((static_cast<int32_t>(dd) >> 4)*B22) |
2696 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2697 B11 | B9 | B8 | addr.vencoding();
2698 Emit32(encoding);
2699}
2700
2701
2702void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2703 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2704}
2705
2706
2707void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2708 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2709}
2710
2711
2712void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2713 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2714}
2715
2716
2717void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2718 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2719}
2720
2721
2722void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2723 CheckCondition(cond);
2724
2725 uint32_t D;
2726 uint32_t Vd;
2727 if (dbl) {
2728 // Encoded as D:Vd.
2729 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002730 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002731 } else {
2732 // Encoded as Vd:D.
2733 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002734 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002735 }
2736 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2737 B11 | B9 |
2738 (dbl ? B8 : 0) |
2739 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002740 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002741 nregs << (dbl ? 1 : 0) |
2742 D << 22 |
2743 Vd << 12;
2744 Emit32(encoding);
2745}
2746
2747
2748void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2749 SRegister sd, SRegister sn, SRegister sm) {
2750 CHECK_NE(sd, kNoSRegister);
2751 CHECK_NE(sn, kNoSRegister);
2752 CHECK_NE(sm, kNoSRegister);
2753 CheckCondition(cond);
2754 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2755 B27 | B26 | B25 | B11 | B9 | opcode |
2756 ((static_cast<int32_t>(sd) & 1)*B22) |
2757 ((static_cast<int32_t>(sn) >> 1)*B16) |
2758 ((static_cast<int32_t>(sd) >> 1)*B12) |
2759 ((static_cast<int32_t>(sn) & 1)*B7) |
2760 ((static_cast<int32_t>(sm) & 1)*B5) |
2761 (static_cast<int32_t>(sm) >> 1);
2762 Emit32(encoding);
2763}
2764
2765
2766void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2767 DRegister dd, DRegister dn, DRegister dm) {
2768 CHECK_NE(dd, kNoDRegister);
2769 CHECK_NE(dn, kNoDRegister);
2770 CHECK_NE(dm, kNoDRegister);
2771 CheckCondition(cond);
2772 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2773 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2774 ((static_cast<int32_t>(dd) >> 4)*B22) |
2775 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2776 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2777 ((static_cast<int32_t>(dn) >> 4)*B7) |
2778 ((static_cast<int32_t>(dm) >> 4)*B5) |
2779 (static_cast<int32_t>(dm) & 0xf);
2780 Emit32(encoding);
2781}
2782
2783
2784void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2785 SRegister sd, DRegister dm) {
2786 CHECK_NE(sd, kNoSRegister);
2787 CHECK_NE(dm, kNoDRegister);
2788 CheckCondition(cond);
2789 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2790 B27 | B26 | B25 | B11 | B9 | opcode |
2791 ((static_cast<int32_t>(sd) & 1)*B22) |
2792 ((static_cast<int32_t>(sd) >> 1)*B12) |
2793 ((static_cast<int32_t>(dm) >> 4)*B5) |
2794 (static_cast<int32_t>(dm) & 0xf);
2795 Emit32(encoding);
2796}
2797
2798
2799void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2800 DRegister dd, SRegister sm) {
2801 CHECK_NE(dd, kNoDRegister);
2802 CHECK_NE(sm, kNoSRegister);
2803 CheckCondition(cond);
2804 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2805 B27 | B26 | B25 | B11 | B9 | opcode |
2806 ((static_cast<int32_t>(dd) >> 4)*B22) |
2807 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2808 ((static_cast<int32_t>(sm) & 1)*B5) |
2809 (static_cast<int32_t>(sm) >> 1);
2810 Emit32(encoding);
2811}
2812
2813
2814void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002815 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002816 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002817 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2818 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2819 (static_cast<int32_t>(PC)*B12) |
2820 B11 | B9 | B4;
2821 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002822}
2823
2824
2825void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002826 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002827 int16_t encoding = B15 | B14 | B12 |
2828 B11 | B10 | B9 | B8 |
2829 imm8;
2830 Emit16(encoding);
2831}
2832
2833
2834void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002835 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002836 int16_t encoding = B15 | B13 | B12 |
2837 B11 | B10 | B9 |
2838 imm8;
2839 Emit16(encoding);
2840}
2841
2842// Convert the given IT state to a mask bit given bit 0 of the first
2843// condition and a shift position.
2844static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
2845 switch (s) {
2846 case kItOmitted: return 1 << shift;
2847 case kItThen: return firstcond0 << shift;
2848 case kItElse: return !firstcond0 << shift;
2849 }
2850 return 0;
2851}
2852
2853
2854// Set the IT condition in the given position for the given state. This is used
2855// to check that conditional instructions match the preceding IT statement.
2856void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
2857 switch (s) {
2858 case kItOmitted: it_conditions_[index] = AL; break;
2859 case kItThen: it_conditions_[index] = cond; break;
2860 case kItElse:
2861 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
2862 break;
2863 }
2864}
2865
2866
2867void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
2868 CheckCondition(AL); // Not allowed in IT block.
2869 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
2870
2871 // All conditions to AL.
2872 for (uint8_t i = 0; i < 4; ++i) {
2873 it_conditions_[i] = AL;
2874 }
2875
2876 SetItCondition(kItThen, firstcond, 0);
2877 uint8_t mask = ToItMask(i1, firstcond0, 3);
2878 SetItCondition(i1, firstcond, 1);
2879
2880 if (i1 != kItOmitted) {
2881 mask |= ToItMask(i2, firstcond0, 2);
2882 SetItCondition(i2, firstcond, 2);
2883 if (i2 != kItOmitted) {
2884 mask |= ToItMask(i3, firstcond0, 1);
2885 SetItCondition(i3, firstcond, 3);
2886 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002887 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002888 }
2889 }
2890 }
2891
2892 // Start at first condition.
2893 it_cond_index_ = 0;
2894 next_condition_ = it_conditions_[0];
2895 uint16_t encoding = B15 | B13 | B12 |
2896 B11 | B10 | B9 | B8 |
2897 firstcond << 4 |
2898 mask;
2899 Emit16(encoding);
2900}
2901
2902
2903void Thumb2Assembler::cbz(Register rn, Label* label) {
2904 CheckCondition(AL);
2905 if (label->IsBound()) {
2906 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002907 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002908 } else if (IsHighRegister(rn)) {
2909 LOG(FATAL) << "cbz can only be used with low registers";
2910 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002911 } else {
2912 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
2913 label->LinkTo(branchid);
2914 }
2915}
2916
2917
2918void Thumb2Assembler::cbnz(Register rn, Label* label) {
2919 CheckCondition(AL);
2920 if (label->IsBound()) {
2921 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002922 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002923 } else if (IsHighRegister(rn)) {
2924 LOG(FATAL) << "cbnz can only be used with low registers";
2925 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002926 } else {
2927 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
2928 label->LinkTo(branchid);
2929 }
2930}
2931
2932
2933void Thumb2Assembler::blx(Register rm, Condition cond) {
2934 CHECK_NE(rm, kNoRegister);
2935 CheckCondition(cond);
2936 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
2937 Emit16(encoding);
2938}
2939
2940
2941void Thumb2Assembler::bx(Register rm, Condition cond) {
2942 CHECK_NE(rm, kNoRegister);
2943 CheckCondition(cond);
2944 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
2945 Emit16(encoding);
2946}
2947
2948
2949void Thumb2Assembler::Push(Register rd, Condition cond) {
2950 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
2951}
2952
2953
2954void Thumb2Assembler::Pop(Register rd, Condition cond) {
2955 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
2956}
2957
2958
2959void Thumb2Assembler::PushList(RegList regs, Condition cond) {
2960 stm(DB_W, SP, regs, cond);
2961}
2962
2963
2964void Thumb2Assembler::PopList(RegList regs, Condition cond) {
2965 ldm(IA_W, SP, regs, cond);
2966}
2967
2968
2969void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
2970 if (cond != AL || rd != rm) {
2971 mov(rd, ShifterOperand(rm), cond);
2972 }
2973}
2974
2975
Dave Allison65fcc2c2014-04-28 13:45:27 -07002976void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002977 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002978}
2979
2980
2981void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01002982 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002983 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07002984 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01002985 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002986}
2987
2988
2989void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01002990 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002991 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002992 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07002993 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01002994 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002995}
2996
2997
2998void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01002999 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003000 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003001 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003002 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003003 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003004}
3005
3006
3007void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003008 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003009 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003010 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003011 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003012}
3013
3014
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003015void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003016 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003017 EmitShift(rd, rm, RRX, rm, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003018}
3019
3020
3021void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003022 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003023 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003024 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003025}
3026
3027
3028void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003029 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003030 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003031 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003032}
3033
3034
3035void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003036 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003037 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003038 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003039}
3040
3041
3042void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003043 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003044 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003045 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003046}
3047
3048
3049int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3050 // The offset is off by 4 due to the way the ARM CPUs read PC.
3051 offset -= 4;
3052 offset >>= 1;
3053
3054 uint32_t value = 0;
3055 // There are two different encodings depending on the value of bit 12. In one case
3056 // intermediate values are calculated using the sign bit.
3057 if ((inst & B12) == B12) {
3058 // 25 bits of offset.
3059 uint32_t signbit = (offset >> 31) & 0x1;
3060 uint32_t i1 = (offset >> 22) & 0x1;
3061 uint32_t i2 = (offset >> 21) & 0x1;
3062 uint32_t imm10 = (offset >> 11) & 0x03ff;
3063 uint32_t imm11 = offset & 0x07ff;
3064 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3065 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3066 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3067 imm11;
3068 // Remove the offset from the current encoding.
3069 inst &= ~(0x3ff << 16 | 0x7ff);
3070 } else {
3071 uint32_t signbit = (offset >> 31) & 0x1;
3072 uint32_t imm6 = (offset >> 11) & 0x03f;
3073 uint32_t imm11 = offset & 0x07ff;
3074 uint32_t j1 = (offset >> 19) & 1;
3075 uint32_t j2 = (offset >> 17) & 1;
3076 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3077 imm11;
3078 // Remove the offset from the current encoding.
3079 inst &= ~(0x3f << 16 | 0x7ff);
3080 }
3081 // Mask out offset bits in current instruction.
3082 inst &= ~(B26 | B13 | B11);
3083 inst |= value;
3084 return inst;
3085}
3086
3087
3088int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3089 int32_t imm32;
3090 if ((instr & B12) == B12) {
3091 uint32_t S = (instr >> 26) & 1;
3092 uint32_t J2 = (instr >> 11) & 1;
3093 uint32_t J1 = (instr >> 13) & 1;
3094 uint32_t imm10 = (instr >> 16) & 0x3FF;
3095 uint32_t imm11 = instr & 0x7FF;
3096
3097 uint32_t I1 = ~(J1 ^ S) & 1;
3098 uint32_t I2 = ~(J2 ^ S) & 1;
3099 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3100 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3101 } else {
3102 uint32_t S = (instr >> 26) & 1;
3103 uint32_t J2 = (instr >> 11) & 1;
3104 uint32_t J1 = (instr >> 13) & 1;
3105 uint32_t imm6 = (instr >> 16) & 0x3F;
3106 uint32_t imm11 = instr & 0x7FF;
3107
3108 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3109 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3110 }
3111 imm32 += 4;
3112 return imm32;
3113}
3114
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003115uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3116 // We can reconstruct the adjustment by going through all the fixups from the beginning
3117 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3118 // with increasing old_position, we can use the data from last AdjustedPosition() to
3119 // continue where we left off and the whole loop should be O(m+n) where m is the number
3120 // of positions to adjust and n is the number of fixups.
3121 if (old_position < last_old_position_) {
3122 last_position_adjustment_ = 0u;
3123 last_old_position_ = 0u;
3124 last_fixup_id_ = 0u;
3125 }
3126 while (last_fixup_id_ != fixups_.size()) {
3127 Fixup* fixup = GetFixup(last_fixup_id_);
3128 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3129 break;
3130 }
3131 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3132 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3133 }
3134 ++last_fixup_id_;
3135 }
3136 last_old_position_ = old_position;
3137 return old_position + last_position_adjustment_;
3138}
3139
3140Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3141 DCHECK(size == 4u || size == 8u) << size;
3142 literals_.emplace_back(size, data);
3143 return &literals_.back();
3144}
3145
3146void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3147 DCHECK_EQ(literal->GetSize(), 4u);
3148 DCHECK(!literal->GetLabel()->IsBound());
3149 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3150 uint32_t location = buffer_.Size();
3151 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3152 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3153 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3154 literal->GetLabel()->LinkTo(fixup_id);
3155 if (use32bit) {
3156 Emit16(0);
3157 }
3158 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3159}
3160
3161void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3162 DCHECK_EQ(literal->GetSize(), 8u);
3163 DCHECK(!literal->GetLabel()->IsBound());
3164 uint32_t location = buffer_.Size();
3165 FixupId fixup_id =
3166 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3167 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3168 literal->GetLabel()->LinkTo(fixup_id);
3169 Emit16(0);
3170 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3171}
3172
3173void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3174 DCHECK_EQ(literal->GetSize(), 4u);
3175 DCHECK(!literal->GetLabel()->IsBound());
3176 uint32_t location = buffer_.Size();
3177 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3178 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3179 literal->GetLabel()->LinkTo(fixup_id);
3180 Emit16(0);
3181 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3182}
3183
3184void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3185 DCHECK_EQ(literal->GetSize(), 8u);
3186 DCHECK(!literal->GetLabel()->IsBound());
3187 uint32_t location = buffer_.Size();
3188 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3189 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3190 literal->GetLabel()->LinkTo(fixup_id);
3191 Emit16(0);
3192 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3193}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003194
3195void Thumb2Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
3196 AddConstant(rd, rd, value, cond);
3197}
3198
3199
3200void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
3201 Condition cond) {
3202 if (value == 0) {
3203 if (rd != rn) {
3204 mov(rd, ShifterOperand(rn), cond);
3205 }
3206 return;
3207 }
3208 // We prefer to select the shorter code sequence rather than selecting add for
3209 // positive values and sub for negatives ones, which would slightly improve
3210 // the readability of generated code for some constants.
3211 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003212 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003213 add(rd, rn, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003214 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003215 sub(rd, rn, shifter_op, cond);
3216 } else {
3217 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003218 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003219 mvn(IP, shifter_op, cond);
3220 add(rd, rn, ShifterOperand(IP), cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003221 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003222 mvn(IP, shifter_op, cond);
3223 sub(rd, rn, ShifterOperand(IP), cond);
3224 } else {
3225 movw(IP, Low16Bits(value), cond);
3226 uint16_t value_high = High16Bits(value);
3227 if (value_high != 0) {
3228 movt(IP, value_high, cond);
3229 }
3230 add(rd, rn, ShifterOperand(IP), cond);
3231 }
3232 }
3233}
3234
3235
3236void Thumb2Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
3237 Condition cond) {
3238 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003239 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003240 add(rd, rn, shifter_op, cond, kCcSet);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003241 } else if (ShifterOperandCanHold(rd, rn, ADD, -value, &shifter_op)) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003242 sub(rd, rn, shifter_op, cond, kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003243 } else {
3244 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003245 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003246 mvn(IP, shifter_op, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003247 add(rd, rn, ShifterOperand(IP), cond, kCcSet);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003248 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003249 mvn(IP, shifter_op, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003250 sub(rd, rn, ShifterOperand(IP), cond, kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003251 } else {
3252 movw(IP, Low16Bits(value), cond);
3253 uint16_t value_high = High16Bits(value);
3254 if (value_high != 0) {
3255 movt(IP, value_high, cond);
3256 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003257 add(rd, rn, ShifterOperand(IP), cond, kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003258 }
3259 }
3260}
3261
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003262
Dave Allison65fcc2c2014-04-28 13:45:27 -07003263void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3264 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003265 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003266 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003267 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003268 mvn(rd, shifter_op, cond);
3269 } else {
3270 movw(rd, Low16Bits(value), cond);
3271 uint16_t value_high = High16Bits(value);
3272 if (value_high != 0) {
3273 movt(rd, value_high, cond);
3274 }
3275 }
3276}
3277
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003278
Dave Allison65fcc2c2014-04-28 13:45:27 -07003279// Implementation note: this method must emit at most one instruction when
3280// Address::CanHoldLoadOffsetThumb.
3281void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3282 Register reg,
3283 Register base,
3284 int32_t offset,
3285 Condition cond) {
3286 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003287 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003288 LoadImmediate(IP, offset, cond);
3289 add(IP, IP, ShifterOperand(base), cond);
3290 base = IP;
3291 offset = 0;
3292 }
3293 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3294 switch (type) {
3295 case kLoadSignedByte:
3296 ldrsb(reg, Address(base, offset), cond);
3297 break;
3298 case kLoadUnsignedByte:
3299 ldrb(reg, Address(base, offset), cond);
3300 break;
3301 case kLoadSignedHalfword:
3302 ldrsh(reg, Address(base, offset), cond);
3303 break;
3304 case kLoadUnsignedHalfword:
3305 ldrh(reg, Address(base, offset), cond);
3306 break;
3307 case kLoadWord:
3308 ldr(reg, Address(base, offset), cond);
3309 break;
3310 case kLoadWordPair:
3311 ldrd(reg, Address(base, offset), cond);
3312 break;
3313 default:
3314 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003315 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003316 }
3317}
3318
3319
3320// Implementation note: this method must emit at most one instruction when
3321// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3322void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3323 Register base,
3324 int32_t offset,
3325 Condition cond) {
3326 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3327 CHECK_NE(base, IP);
3328 LoadImmediate(IP, offset, cond);
3329 add(IP, IP, ShifterOperand(base), cond);
3330 base = IP;
3331 offset = 0;
3332 }
3333 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3334 vldrs(reg, Address(base, offset), cond);
3335}
3336
3337
3338// Implementation note: this method must emit at most one instruction when
3339// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3340void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3341 Register base,
3342 int32_t offset,
3343 Condition cond) {
3344 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3345 CHECK_NE(base, IP);
3346 LoadImmediate(IP, offset, cond);
3347 add(IP, IP, ShifterOperand(base), cond);
3348 base = IP;
3349 offset = 0;
3350 }
3351 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3352 vldrd(reg, Address(base, offset), cond);
3353}
3354
3355
3356// Implementation note: this method must emit at most one instruction when
3357// Address::CanHoldStoreOffsetThumb.
3358void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3359 Register reg,
3360 Register base,
3361 int32_t offset,
3362 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003363 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003364 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003365 CHECK_NE(base, IP);
Roland Levillain4af147e2015-04-07 13:54:49 +01003366 if (reg != IP &&
3367 (type != kStoreWordPair || reg + 1 != IP)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003368 tmp_reg = IP;
3369 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003370 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
3371 // the case of a word-pair store)) and to build the Address
3372 // object used by the store instruction(s) below). Instead,
3373 // save R5 on the stack (or R6 if R5 is not available), use it
3374 // as secondary temporary register, and restore it after the
3375 // store instruction has been emitted.
Roland Levillain775ef492014-11-04 17:43:11 +00003376 tmp_reg = base != R5 ? R5 : R6;
3377 Push(tmp_reg);
3378 if (base == SP) {
3379 offset += kRegisterSize;
3380 }
3381 }
3382 LoadImmediate(tmp_reg, offset, cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003383 add(tmp_reg, tmp_reg, ShifterOperand(base), AL);
Roland Levillain775ef492014-11-04 17:43:11 +00003384 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003385 offset = 0;
3386 }
3387 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3388 switch (type) {
3389 case kStoreByte:
3390 strb(reg, Address(base, offset), cond);
3391 break;
3392 case kStoreHalfword:
3393 strh(reg, Address(base, offset), cond);
3394 break;
3395 case kStoreWord:
3396 str(reg, Address(base, offset), cond);
3397 break;
3398 case kStoreWordPair:
3399 strd(reg, Address(base, offset), cond);
3400 break;
3401 default:
3402 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003403 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003404 }
Roland Levillain775ef492014-11-04 17:43:11 +00003405 if (tmp_reg != kNoRegister && tmp_reg != IP) {
3406 DCHECK(tmp_reg == R5 || tmp_reg == R6);
3407 Pop(tmp_reg);
3408 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003409}
3410
3411
3412// Implementation note: this method must emit at most one instruction when
3413// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3414void Thumb2Assembler::StoreSToOffset(SRegister reg,
3415 Register base,
3416 int32_t offset,
3417 Condition cond) {
3418 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3419 CHECK_NE(base, IP);
3420 LoadImmediate(IP, offset, cond);
3421 add(IP, IP, ShifterOperand(base), cond);
3422 base = IP;
3423 offset = 0;
3424 }
3425 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3426 vstrs(reg, Address(base, offset), cond);
3427}
3428
3429
3430// Implementation note: this method must emit at most one instruction when
3431// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3432void Thumb2Assembler::StoreDToOffset(DRegister reg,
3433 Register base,
3434 int32_t offset,
3435 Condition cond) {
3436 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3437 CHECK_NE(base, IP);
3438 LoadImmediate(IP, offset, cond);
3439 add(IP, IP, ShifterOperand(base), cond);
3440 base = IP;
3441 offset = 0;
3442 }
3443 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3444 vstrd(reg, Address(base, offset), cond);
3445}
3446
3447
3448void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3449 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003450 dmb(SY);
3451}
3452
3453
3454void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003455 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3456 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003457}
3458
3459
3460void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003461 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003462 cbz(r, label);
3463 } else {
3464 cmp(r, ShifterOperand(0));
3465 b(label, EQ);
3466 }
3467}
3468
3469
Dave Allison65fcc2c2014-04-28 13:45:27 -07003470void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003471 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003472 cbnz(r, label);
3473 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003474 cmp(r, ShifterOperand(0));
3475 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003476 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003477}
3478} // namespace arm
3479} // namespace art