blob: 1e71d06b4918f35639a50dae0ac0ddba8b71e1d1 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Markoa64f2492016-04-25 12:43:50 +000017#include <type_traits>
18
Dave Allison65fcc2c2014-04-28 13:45:27 -070019#include "assembler_thumb2.h"
20
Vladimir Marko80afd022015-05-19 18:08:00 +010021#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070022#include "base/logging.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "offsets.h"
25#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070026
27namespace art {
28namespace arm {
29
Vladimir Markoa64f2492016-04-25 12:43:50 +000030template <typename Function>
31void Thumb2Assembler::Fixup::ForExpandableDependencies(Thumb2Assembler* assembler, Function fn) {
32 static_assert(
33 std::is_same<typename std::result_of<Function(FixupId, FixupId)>::type, void>::value,
34 "Incorrect signature for argument `fn`: expected (FixupId, FixupId) -> void");
35 Fixup* fixups = assembler->fixups_.data();
36 for (FixupId fixup_id = 0u, end_id = assembler->fixups_.size(); fixup_id != end_id; ++fixup_id) {
37 uint32_t target = fixups[fixup_id].target_;
38 if (target > fixups[fixup_id].location_) {
39 for (FixupId id = fixup_id + 1u; id != end_id && fixups[id].location_ < target; ++id) {
40 if (fixups[id].CanExpand()) {
41 fn(id, fixup_id);
42 }
43 }
44 } else {
45 for (FixupId id = fixup_id; id != 0u && fixups[id - 1u].location_ >= target; --id) {
46 if (fixups[id - 1u].CanExpand()) {
47 fn(id - 1u, fixup_id);
48 }
49 }
50 }
51 }
52}
53
Vladimir Marko6b756b52015-07-14 11:58:38 +010054void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) {
55 // For each Fixup, it's easy to find the Fixups that it depends on as they are either
56 // the following or the preceding Fixups until we find the target. However, for fixup
57 // adjustment we need the reverse lookup, i.e. what Fixups depend on a given Fixup.
58 // This function creates a compact representation of this relationship, where we have
59 // all the dependents in a single array and Fixups reference their ranges by start
60 // index and count. (Instead of having a per-fixup vector.)
61
62 // Count the number of dependents of each Fixup.
Vladimir Marko6b756b52015-07-14 11:58:38 +010063 Fixup* fixups = assembler->fixups_.data();
Vladimir Markoa64f2492016-04-25 12:43:50 +000064 ForExpandableDependencies(
65 assembler,
66 [fixups](FixupId dependency, FixupId dependent ATTRIBUTE_UNUSED) {
67 fixups[dependency].dependents_count_ += 1u;
68 });
Vladimir Marko6b756b52015-07-14 11:58:38 +010069 // Assign index ranges in fixup_dependents_ to individual fixups. Record the end of the
70 // range in dependents_start_, we shall later decrement it as we fill in fixup_dependents_.
71 uint32_t number_of_dependents = 0u;
Vladimir Markoa64f2492016-04-25 12:43:50 +000072 for (FixupId fixup_id = 0u, end_id = assembler->fixups_.size(); fixup_id != end_id; ++fixup_id) {
Vladimir Marko6b756b52015-07-14 11:58:38 +010073 number_of_dependents += fixups[fixup_id].dependents_count_;
74 fixups[fixup_id].dependents_start_ = number_of_dependents;
75 }
76 if (number_of_dependents == 0u) {
77 return;
78 }
79 // Create and fill in the fixup_dependents_.
Vladimir Marko93205e32016-04-13 11:59:46 +010080 assembler->fixup_dependents_.resize(number_of_dependents);
81 FixupId* dependents = assembler->fixup_dependents_.data();
Vladimir Markoa64f2492016-04-25 12:43:50 +000082 ForExpandableDependencies(
83 assembler,
84 [fixups, dependents](FixupId dependency, FixupId dependent) {
85 fixups[dependency].dependents_start_ -= 1u;
86 dependents[fixups[dependency].dependents_start_] = dependent;
87 });
Vladimir Marko6b756b52015-07-14 11:58:38 +010088}
89
Vladimir Markocf93a5c2015-06-16 11:33:24 +000090void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
91 CHECK(!label->IsBound());
92
93 while (label->IsLinked()) {
94 FixupId fixup_id = label->Position(); // The id for linked Fixup.
95 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
96 fixup->Resolve(bound_pc); // Fixup can be resolved now.
Vladimir Markocf93a5c2015-06-16 11:33:24 +000097 uint32_t fixup_location = fixup->GetLocation();
98 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
99 buffer_.Store<int16_t>(fixup_location, 0);
100 label->position_ = next; // Move to next.
101 }
102 label->BindTo(bound_pc);
103}
104
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700105uint32_t Thumb2Assembler::BindLiterals() {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000106 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
107 uint32_t code_size = buffer_.Size();
108 for (Literal& lit : literals_) {
109 Label* label = lit.GetLabel();
110 BindLabel(label, code_size);
111 code_size += lit.GetSize();
112 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700113 return code_size;
114}
115
116void Thumb2Assembler::BindJumpTables(uint32_t code_size) {
117 for (JumpTable& table : jump_tables_) {
118 Label* label = table.GetLabel();
119 BindLabel(label, code_size);
120 code_size += table.GetSize();
121 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000122}
123
124void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
125 std::deque<FixupId>* fixups_to_recalculate) {
126 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
127 if (adjustment != 0u) {
Vladimir Markoa64f2492016-04-25 12:43:50 +0000128 DCHECK(fixup->CanExpand());
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000129 *current_code_size += adjustment;
Vladimir Marko6b756b52015-07-14 11:58:38 +0100130 for (FixupId dependent_id : fixup->Dependents(*this)) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000131 Fixup* dependent = GetFixup(dependent_id);
132 dependent->IncreaseAdjustment(adjustment);
133 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
134 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
135 fixups_to_recalculate->push_back(dependent_id);
136 }
137 }
138 }
139}
140
141uint32_t Thumb2Assembler::AdjustFixups() {
Vladimir Marko6b756b52015-07-14 11:58:38 +0100142 Fixup::PrepareDependents(this);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000143 uint32_t current_code_size = buffer_.Size();
144 std::deque<FixupId> fixups_to_recalculate;
145 if (kIsDebugBuild) {
146 // We will use the placeholders in the buffer_ to mark whether the fixup has
147 // been added to the fixups_to_recalculate. Make sure we start with zeros.
148 for (Fixup& fixup : fixups_) {
149 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
150 }
151 }
152 for (Fixup& fixup : fixups_) {
153 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
154 }
155 while (!fixups_to_recalculate.empty()) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100156 do {
157 // Pop the fixup.
158 FixupId fixup_id = fixups_to_recalculate.front();
159 fixups_to_recalculate.pop_front();
160 Fixup* fixup = GetFixup(fixup_id);
161 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
162 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
163 // See if it needs adjustment.
164 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
165 } while (!fixups_to_recalculate.empty());
166
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700167 if ((current_code_size & 2) != 0 && (!literals_.empty() || !jump_tables_.empty())) {
Vladimir Marko663c9342015-07-22 11:28:14 +0100168 // If we need to add padding before literals, this may just push some out of range,
169 // so recalculate all load literals. This makes up for the fact that we don't mark
170 // load literal as a dependency of all previous Fixups even though it actually is.
171 for (Fixup& fixup : fixups_) {
172 if (fixup.IsLoadLiteral()) {
173 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
174 }
175 }
176 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000177 }
178 if (kIsDebugBuild) {
179 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
180 for (Fixup& fixup : fixups_) {
181 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
182 }
183 }
184
185 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100186 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000187 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
188 if (literals_adjustment != 0u) {
189 for (Literal& literal : literals_) {
190 Label* label = literal.GetLabel();
191 DCHECK(label->IsBound());
192 int old_position = label->Position();
193 label->Reinitialize();
194 label->BindTo(old_position + literals_adjustment);
195 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700196 for (JumpTable& table : jump_tables_) {
197 Label* label = table.GetLabel();
198 DCHECK(label->IsBound());
199 int old_position = label->Position();
200 label->Reinitialize();
201 label->BindTo(old_position + literals_adjustment);
202 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000203 }
204
205 return current_code_size;
206}
207
208void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
209 // Move non-fixup code to its final place and emit fixups.
210 // Process fixups in reverse order so that we don't repeatedly move the same data.
211 size_t src_end = buffer_.Size();
212 size_t dest_end = adjusted_code_size;
213 buffer_.Resize(dest_end);
214 DCHECK_GE(dest_end, src_end);
215 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
216 Fixup* fixup = &*i;
217 if (fixup->GetOriginalSize() == fixup->GetSize()) {
218 // The size of this Fixup didn't change. To avoid moving the data
219 // in small chunks, emit the code to its original position.
220 fixup->Emit(&buffer_, adjusted_code_size);
221 fixup->Finalize(dest_end - src_end);
222 } else {
223 // Move the data between the end of the fixup and src_end to its final location.
224 size_t old_fixup_location = fixup->GetLocation();
225 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
226 size_t data_size = src_end - src_begin;
227 size_t dest_begin = dest_end - data_size;
228 buffer_.Move(dest_begin, src_begin, data_size);
229 src_end = old_fixup_location;
230 dest_end = dest_begin - fixup->GetSizeInBytes();
231 // Finalize the Fixup and emit the data to the new location.
232 fixup->Finalize(dest_end - src_end);
233 fixup->Emit(&buffer_, adjusted_code_size);
234 }
235 }
236 CHECK_EQ(src_end, dest_end);
237}
238
239void Thumb2Assembler::EmitLiterals() {
240 if (!literals_.empty()) {
241 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
242 // We don't support byte and half-word literals.
243 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100244 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000245 if ((code_size & 2u) != 0u) {
246 Emit16(0);
247 }
248 for (Literal& literal : literals_) {
249 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
250 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
251 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
252 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
253 buffer_.Emit<uint8_t>(literal.GetData()[i]);
254 }
255 }
256 }
257}
258
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700259void Thumb2Assembler::EmitJumpTables() {
260 if (!jump_tables_.empty()) {
261 // Jump tables require 4 byte alignment. (We don't support byte and half-word jump tables.)
262 uint32_t code_size = buffer_.Size();
263 DCHECK_ALIGNED(code_size, 2);
264 if ((code_size & 2u) != 0u) {
265 Emit16(0);
266 }
267 for (JumpTable& table : jump_tables_) {
268 // Bulk ensure capacity, as this may be large.
269 size_t orig_size = buffer_.Size();
Vladimir Marko9152fed2016-04-20 14:39:47 +0100270 size_t required_capacity = orig_size + table.GetSize();
271 if (required_capacity > buffer_.Capacity()) {
272 buffer_.ExtendCapacity(required_capacity);
273 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700274#ifndef NDEBUG
275 buffer_.has_ensured_capacity_ = true;
276#endif
277
278 DCHECK_EQ(static_cast<size_t>(table.GetLabel()->Position()), buffer_.Size());
279 int32_t anchor_position = table.GetAnchorLabel()->Position() + 4;
280
281 for (Label* target : table.GetData()) {
282 // Ensure that the label was tracked, so that it will have the right position.
283 DCHECK(std::find(tracked_labels_.begin(), tracked_labels_.end(), target) !=
284 tracked_labels_.end());
285
286 int32_t offset = target->Position() - anchor_position;
287 buffer_.Emit<int32_t>(offset);
288 }
289
290#ifndef NDEBUG
291 buffer_.has_ensured_capacity_ = false;
292#endif
293 size_t new_size = buffer_.Size();
294 DCHECK_LE(new_size - orig_size, table.GetSize());
295 }
296 }
297}
298
Vladimir Marko10ef6942015-10-22 15:25:54 +0100299void Thumb2Assembler::PatchCFI() {
300 if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
301 return;
302 }
303
304 typedef DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC DelayedAdvancePC;
305 const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
306 const std::vector<uint8_t>& old_stream = data.first;
307 const std::vector<DelayedAdvancePC>& advances = data.second;
308
309 // Refill our data buffer with patched opcodes.
310 cfi().ReserveCFIStream(old_stream.size() + advances.size() + 16);
311 size_t stream_pos = 0;
312 for (const DelayedAdvancePC& advance : advances) {
313 DCHECK_GE(advance.stream_pos, stream_pos);
314 // Copy old data up to the point where advance was issued.
315 cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
316 stream_pos = advance.stream_pos;
317 // Insert the advance command with its final offset.
318 size_t final_pc = GetAdjustedPosition(advance.pc);
319 cfi().AdvancePC(final_pc);
320 }
321 // Copy the final segment if any.
322 cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
323}
324
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000325inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100326 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000327 int16_t encoding = B15 | B14;
328 if (cond != AL) {
329 DCHECK(IsInt<9>(offset));
330 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
331 } else {
332 DCHECK(IsInt<12>(offset));
333 encoding |= B13 | ((offset >> 1) & 0x7ff);
334 }
335 return encoding;
336}
337
338inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100339 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000340 int32_t s = (offset >> 31) & 1; // Sign bit.
341 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
342 (s << 26) | // Sign bit goes to bit 26.
343 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
344 if (cond != AL) {
345 DCHECK(IsInt<21>(offset));
346 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
347 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
348 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
349 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
350 } else {
351 DCHECK(IsInt<25>(offset));
352 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
353 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
354 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
355 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
356 (j1 << 13) | (j2 << 11);
357 }
358 return encoding;
359}
360
361inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
362 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100363 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000364 DCHECK(IsUint<7>(offset));
365 DCHECK(cond == EQ || cond == NE);
366 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
367 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
368 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
369}
370
371inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
372 DCHECK(!IsHighRegister(rn));
373 DCHECK(IsUint<8>(value));
374 return B13 | B11 | (rn << 8) | value;
375}
376
377inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
378 // The high bit of rn is moved across 4-bit rm.
379 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
380 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
381}
382
383inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
384 DCHECK(IsUint<16>(value));
385 return B31 | B30 | B29 | B28 | B25 | B22 |
386 (static_cast<int32_t>(rd) << 8) |
387 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
388 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
389 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
390 (value & 0xff); // Keep imm8 in bits 0-7.
391}
392
393inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
394 DCHECK_EQ(value & 0xffff, 0);
395 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
396 return movw_encoding | B25 | B23;
397}
398
399inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
400 uint32_t mod_imm = ModifiedImmediate(value);
401 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
402 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
403 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
404}
405
406inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
407 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100408 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000409 DCHECK(IsUint<10>(offset));
410 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
411}
412
413inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
414 // NOTE: We don't support negative offset, i.e. U=0 (B23).
415 return LdrRtRnImm12Encoding(rt, PC, offset);
416}
417
418inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100419 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000420 CHECK(IsUint<10>(offset));
421 return B31 | B30 | B29 | B27 |
422 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
423 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
424 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
425}
426
427inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100428 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000429 CHECK(IsUint<10>(offset));
430 return B31 | B30 | B29 | B27 | B26 | B24 |
431 B23 /* U = 1 */ | B20 | B11 | B9 |
432 (static_cast<int32_t>(rn) << 16) |
433 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
434 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
435 (offset >> 2);
436}
437
438inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100439 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000440 CHECK(IsUint<10>(offset));
441 return B31 | B30 | B29 | B27 | B26 | B24 |
442 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
443 (rn << 16) |
444 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
445 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
446 (offset >> 2);
447}
448
449inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
450 DCHECK(!IsHighRegister(rt));
451 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100452 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000453 DCHECK(IsUint<7>(offset));
454 return B14 | B13 | B11 |
455 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
456 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
457}
458
459int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
460 switch (type_) {
461 case kLoadLiteralWide:
462 return LdrdEncoding32(rn_, rt2_, rbase, offset);
463 case kLoadFPLiteralSingle:
464 return VldrsEncoding32(sd_, rbase, offset);
465 case kLoadFPLiteralDouble:
466 return VldrdEncoding32(dd_, rbase, offset);
467 default:
468 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
469 UNREACHABLE();
470 }
471}
472
473inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
474 DCHECK(IsUint<12>(offset));
475 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
476}
477
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700478inline int16_t Thumb2Assembler::AdrEncoding16(Register rd, int32_t offset) {
479 DCHECK(IsUint<10>(offset));
480 DCHECK(IsAligned<4>(offset));
481 DCHECK(!IsHighRegister(rd));
482 return B15 | B13 | (rd << 8) | (offset >> 2);
483}
484
485inline int32_t Thumb2Assembler::AdrEncoding32(Register rd, int32_t offset) {
486 DCHECK(IsUint<12>(offset));
487 // Bit 26: offset[11]
488 // Bits 14-12: offset[10-8]
489 // Bits 7-0: offset[7-0]
490 int32_t immediate_mask =
491 ((offset & (1 << 11)) << (26 - 11)) |
492 ((offset & (7 << 8)) << (12 - 8)) |
493 (offset & 0xFF);
494 return B31 | B30 | B29 | B28 | B25 | B19 | B18 | B17 | B16 | (rd << 8) | immediate_mask;
495}
496
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000497void Thumb2Assembler::FinalizeCode() {
498 ArmAssembler::FinalizeCode();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700499 uint32_t size_after_literals = BindLiterals();
500 BindJumpTables(size_after_literals);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000501 uint32_t adjusted_code_size = AdjustFixups();
502 EmitFixups(adjusted_code_size);
503 EmitLiterals();
Andreas Gampe7cffc3b2015-10-19 21:31:53 -0700504 FinalizeTrackedLabels();
505 EmitJumpTables();
Vladimir Marko10ef6942015-10-22 15:25:54 +0100506 PatchCFI();
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000507}
508
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100509bool Thumb2Assembler::ShifterOperandCanAlwaysHold(uint32_t immediate) {
510 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
511}
512
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000513bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
514 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000515 Opcode opcode,
516 uint32_t immediate,
Vladimir Markof5c09c32015-12-17 12:08:08 +0000517 SetCc set_cc,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000518 ShifterOperand* shifter_op) {
519 shifter_op->type_ = ShifterOperand::kImmediate;
520 shifter_op->immed_ = immediate;
521 shifter_op->is_shift_ = false;
522 shifter_op->is_rotate_ = false;
523 switch (opcode) {
524 case ADD:
525 case SUB:
Vladimir Markof5c09c32015-12-17 12:08:08 +0000526 // Less than (or equal to) 12 bits can be done if we don't need to set condition codes.
527 if (immediate < (1 << 12) && set_cc != kCcSet) {
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000528 return true;
529 }
530 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
531
532 case MOV:
533 // TODO: Support less than or equal to 12bits.
534 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100535
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000536 case MVN:
537 default:
538 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
539 }
540}
541
Dave Allison65fcc2c2014-04-28 13:45:27 -0700542void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100543 Condition cond, SetCc set_cc) {
544 EmitDataProcessing(cond, AND, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700545}
546
547
548void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100549 Condition cond, SetCc set_cc) {
550 EmitDataProcessing(cond, EOR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700551}
552
553
554void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100555 Condition cond, SetCc set_cc) {
556 EmitDataProcessing(cond, SUB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700557}
558
559
560void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100561 Condition cond, SetCc set_cc) {
562 EmitDataProcessing(cond, RSB, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700563}
564
565
566void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100567 Condition cond, SetCc set_cc) {
568 EmitDataProcessing(cond, ADD, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700569}
570
571
572void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100573 Condition cond, SetCc set_cc) {
574 EmitDataProcessing(cond, ADC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700575}
576
577
578void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100579 Condition cond, SetCc set_cc) {
580 EmitDataProcessing(cond, SBC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700581}
582
583
584void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100585 Condition cond, SetCc set_cc) {
586 EmitDataProcessing(cond, RSC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700587}
588
589
590void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
591 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100592 EmitDataProcessing(cond, TST, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700593}
594
595
596void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
597 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100598 EmitDataProcessing(cond, TEQ, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700599}
600
601
602void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100603 EmitDataProcessing(cond, CMP, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700604}
605
606
607void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100608 EmitDataProcessing(cond, CMN, kCcSet, rn, R0, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700609}
610
611
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100612void Thumb2Assembler::orr(Register rd, Register rn, const ShifterOperand& so,
613 Condition cond, SetCc set_cc) {
614 EmitDataProcessing(cond, ORR, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700615}
616
617
Vladimir Markod2b4ca22015-09-14 15:13:26 +0100618void Thumb2Assembler::orn(Register rd, Register rn, const ShifterOperand& so,
619 Condition cond, SetCc set_cc) {
620 EmitDataProcessing(cond, ORN, set_cc, rn, rd, so);
621}
622
623
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100624void Thumb2Assembler::mov(Register rd, const ShifterOperand& so,
625 Condition cond, SetCc set_cc) {
626 EmitDataProcessing(cond, MOV, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700627}
628
629
630void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100631 Condition cond, SetCc set_cc) {
632 EmitDataProcessing(cond, BIC, set_cc, rn, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700633}
634
635
Vladimir Marko73cf0fb2015-07-30 15:07:22 +0100636void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so,
637 Condition cond, SetCc set_cc) {
638 EmitDataProcessing(cond, MVN, set_cc, R0, rd, so);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700639}
640
641
642void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700643 CheckCondition(cond);
644
Dave Allison65fcc2c2014-04-28 13:45:27 -0700645 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
646 // 16 bit.
647 int16_t encoding = B14 | B9 | B8 | B6 |
648 rn << 3 | rd;
649 Emit16(encoding);
650 } else {
651 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700652 uint32_t op1 = 0U /* 0b000 */;
653 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700654 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
655 op1 << 20 |
656 B15 | B14 | B13 | B12 |
657 op2 << 4 |
658 static_cast<uint32_t>(rd) << 8 |
659 static_cast<uint32_t>(rn) << 16 |
660 static_cast<uint32_t>(rm);
661
662 Emit32(encoding);
663 }
664}
665
666
667void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
668 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700669 CheckCondition(cond);
670
Andreas Gampec8ccf682014-09-29 20:07:43 -0700671 uint32_t op1 = 0U /* 0b000 */;
672 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700673 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
674 op1 << 20 |
675 op2 << 4 |
676 static_cast<uint32_t>(rd) << 8 |
677 static_cast<uint32_t>(ra) << 12 |
678 static_cast<uint32_t>(rn) << 16 |
679 static_cast<uint32_t>(rm);
680
681 Emit32(encoding);
682}
683
684
685void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
686 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700687 CheckCondition(cond);
688
Andreas Gampec8ccf682014-09-29 20:07:43 -0700689 uint32_t op1 = 0U /* 0b000 */;
690 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700691 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
692 op1 << 20 |
693 op2 << 4 |
694 static_cast<uint32_t>(rd) << 8 |
695 static_cast<uint32_t>(ra) << 12 |
696 static_cast<uint32_t>(rn) << 16 |
697 static_cast<uint32_t>(rm);
698
699 Emit32(encoding);
700}
701
702
Zheng Xuc6667102015-05-15 16:08:45 +0800703void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
704 Register rm, Condition cond) {
705 CheckCondition(cond);
706
707 uint32_t op1 = 0U /* 0b000; */;
708 uint32_t op2 = 0U /* 0b0000 */;
709 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
710 op1 << 20 |
711 op2 << 4 |
712 static_cast<uint32_t>(rd_lo) << 12 |
713 static_cast<uint32_t>(rd_hi) << 8 |
714 static_cast<uint32_t>(rn) << 16 |
715 static_cast<uint32_t>(rm);
716
717 Emit32(encoding);
718}
719
720
Dave Allison65fcc2c2014-04-28 13:45:27 -0700721void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
722 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700723 CheckCondition(cond);
724
Andreas Gampec8ccf682014-09-29 20:07:43 -0700725 uint32_t op1 = 2U /* 0b010; */;
726 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700727 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
728 op1 << 20 |
729 op2 << 4 |
730 static_cast<uint32_t>(rd_lo) << 12 |
731 static_cast<uint32_t>(rd_hi) << 8 |
732 static_cast<uint32_t>(rn) << 16 |
733 static_cast<uint32_t>(rm);
734
735 Emit32(encoding);
736}
737
738
739void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700740 CheckCondition(cond);
741
Andreas Gampec8ccf682014-09-29 20:07:43 -0700742 uint32_t op1 = 1U /* 0b001 */;
743 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700744 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
745 op1 << 20 |
746 op2 << 4 |
747 0xf << 12 |
748 static_cast<uint32_t>(rd) << 8 |
749 static_cast<uint32_t>(rn) << 16 |
750 static_cast<uint32_t>(rm);
751
752 Emit32(encoding);
753}
754
755
756void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700757 CheckCondition(cond);
758
Andreas Gampec8ccf682014-09-29 20:07:43 -0700759 uint32_t op1 = 1U /* 0b001 */;
760 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700761 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
762 op1 << 20 |
763 op2 << 4 |
764 0xf << 12 |
765 static_cast<uint32_t>(rd) << 8 |
766 static_cast<uint32_t>(rn) << 16 |
767 static_cast<uint32_t>(rm);
768
769 Emit32(encoding);
770}
771
772
Roland Levillain51d3fc42014-11-13 14:11:42 +0000773void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
774 CheckCondition(cond);
775 CHECK_LE(lsb, 31U);
776 CHECK(1U <= width && width <= 32U) << width;
777 uint32_t widthminus1 = width - 1;
778 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
779 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
780
781 uint32_t op = 20U /* 0b10100 */;
782 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
783 op << 20 |
784 static_cast<uint32_t>(rn) << 16 |
785 imm3 << 12 |
786 static_cast<uint32_t>(rd) << 8 |
787 imm2 << 6 |
788 widthminus1;
789
790 Emit32(encoding);
791}
792
793
Roland Levillain981e4542014-11-14 11:47:14 +0000794void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
795 CheckCondition(cond);
796 CHECK_LE(lsb, 31U);
797 CHECK(1U <= width && width <= 32U) << width;
798 uint32_t widthminus1 = width - 1;
799 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
800 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
801
802 uint32_t op = 28U /* 0b11100 */;
803 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
804 op << 20 |
805 static_cast<uint32_t>(rn) << 16 |
806 imm3 << 12 |
807 static_cast<uint32_t>(rd) << 8 |
808 imm2 << 6 |
809 widthminus1;
810
811 Emit32(encoding);
812}
813
814
Dave Allison65fcc2c2014-04-28 13:45:27 -0700815void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
816 EmitLoadStore(cond, true, false, false, false, rd, ad);
817}
818
819
820void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
821 EmitLoadStore(cond, false, false, false, false, rd, ad);
822}
823
824
825void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
826 EmitLoadStore(cond, true, true, false, false, rd, ad);
827}
828
829
830void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
831 EmitLoadStore(cond, false, true, false, false, rd, ad);
832}
833
834
835void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
836 EmitLoadStore(cond, true, false, true, false, rd, ad);
837}
838
839
840void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
841 EmitLoadStore(cond, false, false, true, false, rd, ad);
842}
843
844
845void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
846 EmitLoadStore(cond, true, true, false, true, rd, ad);
847}
848
849
850void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
851 EmitLoadStore(cond, true, false, true, true, rd, ad);
852}
853
854
855void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100856 ldrd(rd, Register(rd + 1), ad, cond);
857}
858
859
860void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700861 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100862 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700863 // This is different from other loads. The encoding is like ARM.
864 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
865 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100866 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700867 ad.encodingThumbLdrdStrd();
868 Emit32(encoding);
869}
870
871
872void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100873 strd(rd, Register(rd + 1), ad, cond);
874}
875
876
877void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700878 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100879 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700880 // This is different from other loads. The encoding is like ARM.
881 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
882 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100883 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700884 ad.encodingThumbLdrdStrd();
885 Emit32(encoding);
886}
887
888
889void Thumb2Assembler::ldm(BlockAddressMode am,
890 Register base,
891 RegList regs,
892 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000893 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
894 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700895 // Thumb doesn't support one reg in the list.
896 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000897 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700898 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700899 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700900 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
901 } else {
902 EmitMultiMemOp(cond, am, true, base, regs);
903 }
904}
905
906
907void Thumb2Assembler::stm(BlockAddressMode am,
908 Register base,
909 RegList regs,
910 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000911 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
912 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700913 // Thumb doesn't support one reg in the list.
914 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000915 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700916 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700917 CHECK(am == IA || am == IA_W);
918 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700919 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
920 } else {
921 EmitMultiMemOp(cond, am, false, base, regs);
922 }
923}
924
925
926bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
927 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
928 if (((imm32 & ((1 << 19) - 1)) == 0) &&
929 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
930 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
931 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
932 ((imm32 >> 19) & ((1 << 6) -1));
933 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
934 sd, S0, S0);
935 return true;
936 }
937 return false;
938}
939
940
941bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
942 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
943 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
944 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
945 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
946 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
947 ((imm64 >> 48) & ((1 << 6) -1));
948 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
949 dd, D0, D0);
950 return true;
951 }
952 return false;
953}
954
955
956void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
957 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
958}
959
960
961void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
962 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
963}
964
965
966void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
967 Condition cond) {
968 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
969}
970
971
972void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
973 Condition cond) {
974 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
975}
976
977
978void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
979 Condition cond) {
980 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
981}
982
983
984void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
985 Condition cond) {
986 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
987}
988
989
990void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
991 Condition cond) {
992 EmitVFPsss(cond, B21, sd, sn, sm);
993}
994
995
996void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
997 Condition cond) {
998 EmitVFPddd(cond, B21, dd, dn, dm);
999}
1000
1001
1002void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
1003 Condition cond) {
1004 EmitVFPsss(cond, 0, sd, sn, sm);
1005}
1006
1007
1008void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
1009 Condition cond) {
1010 EmitVFPddd(cond, 0, dd, dn, dm);
1011}
1012
1013
1014void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
1015 Condition cond) {
1016 EmitVFPsss(cond, B6, sd, sn, sm);
1017}
1018
1019
1020void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
1021 Condition cond) {
1022 EmitVFPddd(cond, B6, dd, dn, dm);
1023}
1024
1025
1026void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
1027 Condition cond) {
1028 EmitVFPsss(cond, B23, sd, sn, sm);
1029}
1030
1031
1032void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
1033 Condition cond) {
1034 EmitVFPddd(cond, B23, dd, dn, dm);
1035}
1036
1037
1038void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
1039 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
1040}
1041
1042
1043void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
1044 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
1045}
1046
1047
1048void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
1049 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
1050}
1051
1052
1053void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
1054 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
1055}
1056
1057
1058void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
1059 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
1060}
1061
1062void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
1063 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
1064}
1065
1066
1067void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
1068 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
1069}
1070
1071
1072void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
1073 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
1074}
1075
1076
1077void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
1078 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
1079}
1080
1081
1082void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
1083 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
1084}
1085
1086
1087void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
1088 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
1089}
1090
1091
1092void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
1093 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
1094}
1095
1096
1097void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
1098 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
1099}
1100
1101
1102void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
1103 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
1104}
1105
1106
1107void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
1108 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
1109}
1110
1111
1112void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
1113 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
1114}
1115
1116
1117void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
1118 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
1119}
1120
1121
1122void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
1123 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
1124}
1125
1126
1127void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
1128 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
1129}
1130
1131
1132void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
1133 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1134}
1135
1136void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -07001137 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001138 EmitBranch(cond, label, false, false);
1139}
1140
1141
1142void Thumb2Assembler::bl(Label* label, Condition cond) {
1143 CheckCondition(cond);
1144 EmitBranch(cond, label, true, false);
1145}
1146
1147
1148void Thumb2Assembler::blx(Label* label) {
1149 EmitBranch(AL, label, true, true);
1150}
1151
1152
1153void Thumb2Assembler::MarkExceptionHandler(Label* label) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001154 EmitDataProcessing(AL, TST, kCcSet, PC, R0, ShifterOperand(0));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001155 Label l;
1156 b(&l);
1157 EmitBranch(AL, label, false, false);
1158 Bind(&l);
1159}
1160
1161
1162void Thumb2Assembler::Emit32(int32_t value) {
1163 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1164 buffer_.Emit<int16_t>(value >> 16);
1165 buffer_.Emit<int16_t>(value & 0xffff);
1166}
1167
1168
1169void Thumb2Assembler::Emit16(int16_t value) {
1170 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1171 buffer_.Emit<int16_t>(value);
1172}
1173
1174
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001175bool Thumb2Assembler::Is32BitDataProcessing(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001176 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001177 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001178 Register rn,
1179 Register rd,
1180 const ShifterOperand& so) {
1181 if (force_32bit_) {
1182 return true;
1183 }
1184
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001185 // Check special case for SP relative ADD and SUB immediate.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001186 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate() && set_cc != kCcSet) {
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001187 // If the immediate is in range, use 16 bit.
1188 if (rd == SP) {
1189 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1190 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001191 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001192 } else if (!IsHighRegister(rd) && opcode == ADD) {
1193 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1194 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001195 }
1196 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001197 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001198
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001199 bool can_contain_high_register =
1200 (opcode == CMP) ||
1201 (opcode == MOV && set_cc != kCcSet) ||
1202 ((opcode == ADD) && (rn == rd) && set_cc != kCcSet);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001203
1204 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1205 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001206 return true;
1207 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001208
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001209 // There are high register instructions available for this opcode.
1210 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1211 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1212 return true;
1213 }
1214
1215 // The ADD and MOV instructions that work with high registers don't have 16-bit
1216 // immediate variants.
1217 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001218 return true;
1219 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001220 }
1221
1222 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1223 return true;
1224 }
1225
Dave Allison65fcc2c2014-04-28 13:45:27 -07001226 bool rn_is_valid = true;
1227
1228 // Check for single operand instructions and ADD/SUB.
1229 switch (opcode) {
1230 case CMP:
1231 case MOV:
1232 case TST:
1233 case MVN:
1234 rn_is_valid = false; // There is no Rn for these instructions.
1235 break;
1236 case TEQ:
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001237 case ORN:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001238 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001239 case ADD:
1240 case SUB:
1241 break;
1242 default:
1243 if (so.IsRegister() && rd != rn) {
1244 return true;
1245 }
1246 }
1247
1248 if (so.IsImmediate()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001249 if (opcode == RSB) {
1250 DCHECK(rn_is_valid);
1251 if (so.GetImmediate() != 0u) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001252 return true;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001253 }
1254 } else if (rn_is_valid && rn != rd) {
1255 // The only thumb1 instructions with a register and an immediate are ADD and SUB
1256 // with a 3-bit immediate, and RSB with zero immediate.
1257 if (opcode == ADD || opcode == SUB) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00001258 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1259 return true; // Cannot match "setflags".
1260 }
1261 if (!IsUint<3>(so.GetImmediate()) && !IsUint<3>(-so.GetImmediate())) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001262 return true;
1263 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001264 } else {
1265 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001266 }
1267 } else {
1268 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1269 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1270 return true;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001271 } else if (opcode != CMP && ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1272 return true; // Cannot match "setflags" for ADD, SUB or MOV.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001273 } else {
Vladimir Markof5c09c32015-12-17 12:08:08 +00001274 // For ADD and SUB allow also negative 8-bit immediate as we will emit the oposite opcode.
1275 if (!IsUint<8>(so.GetImmediate()) &&
1276 (opcode == MOV || opcode == CMP || !IsUint<8>(-so.GetImmediate()))) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001277 return true;
1278 }
1279 }
1280 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001281 } else {
1282 DCHECK(so.IsRegister());
1283 if (so.IsShift()) {
1284 // Shift operand - check if it is a MOV convertible to a 16-bit shift instruction.
1285 if (opcode != MOV) {
Zheng Xuc6667102015-05-15 16:08:45 +08001286 return true;
1287 }
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001288 // Check for MOV with an ROR/RRX. There is no 16-bit ROR immediate and no 16-bit RRX.
1289 if (so.GetShift() == ROR || so.GetShift() == RRX) {
1290 return true;
1291 }
1292 // 16-bit shifts set condition codes if and only if outside IT block,
1293 // i.e. if and only if cond == AL.
1294 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1295 return true;
1296 }
1297 } else {
1298 // Register operand without shift.
1299 switch (opcode) {
1300 case ADD:
1301 // The 16-bit ADD that cannot contain high registers can set condition codes
1302 // if and only if outside IT block, i.e. if and only if cond == AL.
1303 if (!can_contain_high_register &&
1304 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
1305 return true;
1306 }
1307 break;
1308 case AND:
1309 case BIC:
1310 case EOR:
1311 case ORR:
1312 case MVN:
1313 case ADC:
1314 case SUB:
1315 case SBC:
1316 // These 16-bit opcodes set condition codes if and only if outside IT block,
1317 // i.e. if and only if cond == AL.
1318 if ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet) {
1319 return true;
1320 }
1321 break;
1322 case RSB:
1323 case RSC:
1324 // No 16-bit RSB/RSC Rd, Rm, Rn. It would be equivalent to SUB/SBC Rd, Rn, Rm.
1325 return true;
1326 case CMP:
1327 default:
1328 break;
1329 }
Zheng Xuc6667102015-05-15 16:08:45 +08001330 }
1331 }
1332
Dave Allison65fcc2c2014-04-28 13:45:27 -07001333 // The instruction can be encoded in 16 bits.
1334 return false;
1335}
1336
1337
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001338void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001339 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001340 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001341 Register rn,
1342 Register rd,
1343 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001344 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001345 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001346 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1347 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1348 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1349 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1350 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001351 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001352 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001353 case RSC: break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001354 case TST: thumb_opcode = 0U /* 0b0000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1355 case TEQ: thumb_opcode = 4U /* 0b0100 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1356 case CMP: thumb_opcode = 13U /* 0b1101 */; DCHECK(set_cc == kCcSet); rd = PC; break;
1357 case CMN: thumb_opcode = 8U /* 0b1000 */; DCHECK(set_cc == kCcSet); rd = PC; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001358 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1359 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1360 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1361 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Vladimir Markod2b4ca22015-09-14 15:13:26 +01001362 case ORN: thumb_opcode = 3U /* 0b0011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001363 default:
1364 break;
1365 }
1366
Andreas Gampec8ccf682014-09-29 20:07:43 -07001367 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001368 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001369 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001370 }
1371
1372 int32_t encoding = 0;
1373 if (so.IsImmediate()) {
1374 // Check special cases.
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00001375 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12)) &&
1376 /* Prefer T3 encoding to T4. */ !ShifterOperandCanAlwaysHold(so.GetImmediate())) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001377 if (set_cc != kCcSet) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001378 if (opcode == SUB) {
1379 thumb_opcode = 5U;
1380 } else if (opcode == ADD) {
1381 thumb_opcode = 0U;
1382 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001383 }
1384 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001385
1386 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001387 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001388 uint32_t imm8 = imm & 0xff;
1389
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001390 encoding = B31 | B30 | B29 | B28 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001391 (set_cc == kCcSet ? B20 : B25) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001392 thumb_opcode << 21 |
1393 rn << 16 |
1394 rd << 8 |
1395 i << 26 |
1396 imm3 << 12 |
1397 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001398 } else {
1399 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001400 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001401 if (imm == kInvalidModifiedImmediate) {
1402 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001403 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001404 }
1405 encoding = B31 | B30 | B29 | B28 |
1406 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001407 (set_cc == kCcSet ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001408 rn << 16 |
1409 rd << 8 |
1410 imm;
1411 }
1412 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001413 // Register (possibly shifted)
1414 encoding = B31 | B30 | B29 | B27 | B25 |
1415 thumb_opcode << 21 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001416 (set_cc == kCcSet ? B20 : 0) |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001417 rn << 16 |
1418 rd << 8 |
1419 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001420 }
1421 Emit32(encoding);
1422}
1423
1424
1425void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1426 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001427 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001428 Register rn,
1429 Register rd,
1430 const ShifterOperand& so) {
1431 if (opcode == ADD || opcode == SUB) {
1432 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1433 return;
1434 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001435 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001436 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001437 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001438 uint8_t opcode_shift = 6;
1439 uint8_t rd_shift = 0;
1440 uint8_t rn_shift = 3;
1441 uint8_t immediate_shift = 0;
1442 bool use_immediate = false;
1443 uint8_t immediate = 0;
1444
1445 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1446 // Convert shifted mov operand2 into 16 bit opcodes.
1447 dp_opcode = 0;
1448 opcode_shift = 11;
1449
1450 use_immediate = true;
1451 immediate = so.GetImmediate();
1452 immediate_shift = 6;
1453
1454 rn = so.GetRegister();
1455
1456 switch (so.GetShift()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001457 case LSL:
1458 DCHECK_LE(immediate, 31u);
1459 thumb_opcode = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001460 break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001461 case LSR:
1462 DCHECK(1 <= immediate && immediate <= 32);
1463 immediate &= 31; // 32 is encoded as 0.
1464 thumb_opcode = 1U /* 0b01 */;
1465 break;
1466 case ASR:
1467 DCHECK(1 <= immediate && immediate <= 32);
1468 immediate &= 31; // 32 is encoded as 0.
1469 thumb_opcode = 2U /* 0b10 */;
1470 break;
1471 case ROR: // No 16-bit ROR immediate.
1472 case RRX: // No 16-bit RRX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001473 default:
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001474 LOG(FATAL) << "Unexpected shift: " << so.GetShift();
1475 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001476 }
1477 } else {
1478 if (so.IsImmediate()) {
1479 use_immediate = true;
1480 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001481 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001482 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1483 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001484 // Adjust rn and rd: only two registers will be emitted.
1485 switch (opcode) {
1486 case AND:
1487 case ORR:
1488 case EOR:
1489 case RSB:
1490 case ADC:
1491 case SBC:
1492 case BIC: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001493 // Sets condition codes if and only if outside IT block,
1494 // check that it complies with set_cc.
1495 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001496 if (rn == rd) {
1497 rn = so.GetRegister();
1498 } else {
1499 CHECK_EQ(rd, so.GetRegister());
1500 }
1501 break;
1502 }
1503 case CMP:
1504 case CMN: {
1505 CHECK_EQ(rd, 0);
1506 rd = rn;
1507 rn = so.GetRegister();
1508 break;
1509 }
1510 case MVN: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001511 // Sets condition codes if and only if outside IT block,
1512 // check that it complies with set_cc.
1513 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1514 CHECK_EQ(rn, 0);
1515 rn = so.GetRegister();
1516 break;
1517 }
1518 case TST:
1519 case TEQ: {
1520 DCHECK(set_cc == kCcSet);
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001521 CHECK_EQ(rn, 0);
1522 rn = so.GetRegister();
1523 break;
1524 }
1525 default:
1526 break;
1527 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001528 }
1529
1530 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001531 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001532 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001533 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001534 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001535 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1536 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001537 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1538 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1539 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1540 case CMP: {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001541 DCHECK(set_cc == kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001542 if (use_immediate) {
1543 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001544 dp_opcode = 0;
1545 opcode_shift = 11;
1546 thumb_opcode = 5U /* 0b101 */;
1547 rd_shift = 8;
1548 rn_shift = 8;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001549 } else if (IsHighRegister(rd) || IsHighRegister(rn)) {
1550 // Special cmp for high registers.
1551 dp_opcode = 1U /* 0b01 */;
1552 opcode_shift = 7;
1553 // Put the top bit of rd into the bottom bit of the opcode.
1554 thumb_opcode = 10U /* 0b0001010 */ | static_cast<uint32_t>(rd) >> 3;
1555 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001556 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001557 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001558 }
1559
1560 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001561 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001562 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001563 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001564 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001565 break;
1566 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001567 case MOV:
1568 dp_opcode = 0;
1569 if (use_immediate) {
1570 // T2 encoding.
1571 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001572 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001573 rd_shift = 8;
1574 rn_shift = 8;
1575 } else {
1576 rn = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001577 if (set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001578 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001579 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001580 opcode_shift = 7;
1581 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001582 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1583 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001584 } else {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001585 DCHECK(!IsHighRegister(rn));
1586 DCHECK(!IsHighRegister(rd));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001587 thumb_opcode = 0;
1588 }
1589 }
1590 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001591
1592 case TEQ:
1593 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001594 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001595 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001596 break;
1597 }
1598 }
1599
Andreas Gampec8ccf682014-09-29 20:07:43 -07001600 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001601 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001602 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001603 }
1604
1605 int16_t encoding = dp_opcode << 14 |
1606 (thumb_opcode << opcode_shift) |
1607 rd << rd_shift |
1608 rn << rn_shift |
1609 (use_immediate ? (immediate << immediate_shift) : 0);
1610
1611 Emit16(encoding);
1612}
1613
1614
1615// ADD and SUB are complex enough to warrant their own emitter.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001616void Thumb2Assembler::Emit16BitAddSub(Condition cond,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001617 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001618 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001619 Register rn,
1620 Register rd,
1621 const ShifterOperand& so) {
1622 uint8_t dp_opcode = 0;
1623 uint8_t opcode_shift = 6;
1624 uint8_t rd_shift = 0;
1625 uint8_t rn_shift = 3;
1626 uint8_t immediate_shift = 0;
1627 bool use_immediate = false;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001628 uint32_t immediate = 0; // Should be at most 10 bits but keep the full immediate for CHECKs.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001629 uint8_t thumb_opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001630
1631 if (so.IsImmediate()) {
1632 use_immediate = true;
1633 immediate = so.GetImmediate();
Vladimir Markof5c09c32015-12-17 12:08:08 +00001634 if (!IsUint<10>(immediate)) {
1635 // Flip ADD/SUB.
1636 opcode = (opcode == ADD) ? SUB : ADD;
1637 immediate = -immediate;
1638 DCHECK(IsUint<10>(immediate)); // More stringent checks below.
1639 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001640 }
1641
1642 switch (opcode) {
1643 case ADD:
1644 if (so.IsRegister()) {
1645 Register rm = so.GetRegister();
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001646 if (rn == rd && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001647 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001648 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001649 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001650 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001651 // Make Rn also contain the top bit of rd.
1652 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001653 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1654 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001655 } else {
1656 // T1.
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001657 DCHECK(!IsHighRegister(rd));
1658 DCHECK(!IsHighRegister(rn));
1659 DCHECK(!IsHighRegister(rm));
1660 // Sets condition codes if and only if outside IT block,
1661 // check that it complies with set_cc.
1662 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001663 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001664 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001665 immediate = static_cast<uint32_t>(so.GetRegister());
1666 use_immediate = true;
1667 immediate_shift = 6;
1668 }
1669 } else {
1670 // Immediate.
1671 if (rd == SP && rn == SP) {
1672 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001673 dp_opcode = 2U /* 0b10 */;
1674 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001675 opcode_shift = 12;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001676 CHECK(IsUint<9>(immediate));
Roland Levillain14d90572015-07-16 10:52:26 +01001677 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001678
1679 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1680 rn = R0;
1681 rd = R0;
1682 rd_shift = 0;
1683 rn_shift = 0;
1684 immediate >>= 2;
1685 } else if (rd != SP && rn == SP) {
1686 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001687 dp_opcode = 2U /* 0b10 */;
1688 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001689 opcode_shift = 11;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001690 CHECK(IsUint<10>(immediate));
Roland Levillain14d90572015-07-16 10:52:26 +01001691 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001692
1693 // Remove rn from instruction.
1694 rn = R0;
1695 rn_shift = 0;
1696 rd_shift = 8;
1697 immediate >>= 2;
1698 } else if (rn != rd) {
1699 // Must use T1.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001700 CHECK(IsUint<3>(immediate));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001701 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001702 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001703 immediate_shift = 6;
1704 } else {
1705 // T2 encoding.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001706 CHECK(IsUint<8>(immediate));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001707 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001708 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001709 rd_shift = 8;
1710 rn_shift = 8;
1711 }
1712 }
1713 break;
1714
1715 case SUB:
1716 if (so.IsRegister()) {
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001717 // T1.
1718 Register rm = so.GetRegister();
1719 DCHECK(!IsHighRegister(rd));
1720 DCHECK(!IsHighRegister(rn));
1721 DCHECK(!IsHighRegister(rm));
1722 // Sets condition codes if and only if outside IT block,
1723 // check that it complies with set_cc.
1724 DCHECK((cond == AL) ? set_cc != kCcKeep : set_cc != kCcSet);
1725 opcode_shift = 9;
1726 thumb_opcode = 13U /* 0b01101 */;
1727 immediate = static_cast<uint32_t>(rm);
1728 use_immediate = true;
1729 immediate_shift = 6;
1730 } else {
1731 if (rd == SP && rn == SP) {
1732 // SUB sp, sp, #imm
1733 dp_opcode = 2U /* 0b10 */;
1734 thumb_opcode = 0x61 /* 0b1100001 */;
1735 opcode_shift = 7;
Vladimir Markof5c09c32015-12-17 12:08:08 +00001736 CHECK(IsUint<9>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001737 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001738
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001739 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1740 rn = R0;
1741 rd = R0;
1742 rd_shift = 0;
1743 rn_shift = 0;
1744 immediate >>= 2;
1745 } else if (rn != rd) {
1746 // Must use T1.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001747 CHECK(IsUint<3>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001748 opcode_shift = 9;
1749 thumb_opcode = 15U /* 0b01111 */;
1750 immediate_shift = 6;
1751 } else {
1752 // T2 encoding.
Vladimir Markof5c09c32015-12-17 12:08:08 +00001753 CHECK(IsUint<8>(immediate));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001754 opcode_shift = 11;
1755 thumb_opcode = 7U /* 0b111 */;
1756 rd_shift = 8;
1757 rn_shift = 8;
1758 }
1759 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001760 break;
1761 default:
1762 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001763 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001764 }
1765
1766 int16_t encoding = dp_opcode << 14 |
1767 (thumb_opcode << opcode_shift) |
1768 rd << rd_shift |
1769 rn << rn_shift |
1770 (use_immediate ? (immediate << immediate_shift) : 0);
1771
1772 Emit16(encoding);
1773}
1774
1775
1776void Thumb2Assembler::EmitDataProcessing(Condition cond,
1777 Opcode opcode,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001778 SetCc set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001779 Register rn,
1780 Register rd,
1781 const ShifterOperand& so) {
1782 CHECK_NE(rd, kNoRegister);
1783 CheckCondition(cond);
1784
1785 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1786 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1787 } else {
1788 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1789 }
1790}
1791
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001792void Thumb2Assembler::EmitShift(Register rd,
1793 Register rm,
1794 Shift shift,
1795 uint8_t amount,
1796 Condition cond,
1797 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001798 CHECK_LT(amount, (1 << 5));
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001799 if ((IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) ||
1800 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001801 uint16_t opcode = 0;
1802 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001803 case LSL: opcode = 0U /* 0b00 */; break;
1804 case LSR: opcode = 1U /* 0b01 */; break;
1805 case ASR: opcode = 2U /* 0b10 */; break;
1806 case ROR: opcode = 3U /* 0b11 */; break;
1807 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001808 default:
1809 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001810 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001811 }
1812 // 32 bit.
1813 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001814 0xf << 16 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001815 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001816 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001817 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1818 static_cast<int16_t>(rd) << 8 | opcode << 4;
1819 Emit32(encoding);
1820 } else {
1821 // 16 bit shift
1822 uint16_t opcode = 0;
1823 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001824 case LSL: opcode = 0U /* 0b00 */; break;
1825 case LSR: opcode = 1U /* 0b01 */; break;
1826 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001827 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001828 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1829 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001830 }
1831 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1832 static_cast<int16_t>(rd);
1833 Emit16(encoding);
1834 }
1835}
1836
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001837void Thumb2Assembler::EmitShift(Register rd,
1838 Register rn,
1839 Shift shift,
1840 Register rm,
1841 Condition cond,
1842 SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07001843 CHECK_NE(shift, RRX);
1844 bool must_be_32bit = false;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001845 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn ||
1846 ((cond == AL) ? set_cc == kCcKeep : set_cc == kCcSet)) {
Dave Allison45fdb932014-06-25 12:37:10 -07001847 must_be_32bit = true;
1848 }
1849
1850 if (must_be_32bit) {
1851 uint16_t opcode = 0;
1852 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001853 case LSL: opcode = 0U /* 0b00 */; break;
1854 case LSR: opcode = 1U /* 0b01 */; break;
1855 case ASR: opcode = 2U /* 0b10 */; break;
1856 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001857 default:
1858 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001859 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001860 }
1861 // 32 bit.
1862 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001863 0xf << 12 | (set_cc == kCcSet ? B20 : 0);
Dave Allison45fdb932014-06-25 12:37:10 -07001864 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1865 static_cast<int16_t>(rd) << 8 | opcode << 21;
1866 Emit32(encoding);
1867 } else {
1868 uint16_t opcode = 0;
1869 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001870 case LSL: opcode = 2U /* 0b0010 */; break;
1871 case LSR: opcode = 3U /* 0b0011 */; break;
1872 case ASR: opcode = 4U /* 0b0100 */; break;
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01001873 case ROR: opcode = 7U /* 0b0111 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001874 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001875 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1876 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001877 }
1878 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1879 static_cast<int16_t>(rd);
1880 Emit16(encoding);
1881 }
1882}
1883
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001884inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1885 switch (size) {
1886 case kBranch16Bit:
1887 return 2u;
1888 case kBranch32Bit:
1889 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001890
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001891 case kCbxz16Bit:
1892 return 2u;
1893 case kCbxz32Bit:
1894 return 4u;
1895 case kCbxz48Bit:
1896 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001897
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001898 case kLiteral1KiB:
1899 return 2u;
1900 case kLiteral4KiB:
1901 return 4u;
1902 case kLiteral64KiB:
1903 return 8u;
1904 case kLiteral1MiB:
1905 return 10u;
1906 case kLiteralFar:
1907 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001908
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001909 case kLiteralAddr1KiB:
1910 return 2u;
1911 case kLiteralAddr4KiB:
1912 return 4u;
1913 case kLiteralAddr64KiB:
1914 return 6u;
1915 case kLiteralAddrFar:
1916 return 10u;
1917
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001918 case kLongOrFPLiteral1KiB:
1919 return 4u;
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01001920 case kLongOrFPLiteral64KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001921 return 10u;
1922 case kLongOrFPLiteralFar:
1923 return 14u;
1924 }
1925 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1926 UNREACHABLE();
1927}
1928
1929inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1930 return SizeInBytes(original_size_);
1931}
1932
1933inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1934 return SizeInBytes(size_);
1935}
1936
1937inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1938 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001939 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001940 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1941 return current_code_size & 2;
1942}
1943
1944inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1945 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1946 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1947 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1948 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1949 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1950 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1951 if (target_ > location_) {
1952 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1953 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001954 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001955 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1956 diff -= static_cast<int32_t>(adjustment_);
1957 }
1958 // The default PC adjustment for Thumb2 is 4 bytes.
1959 DCHECK_GE(diff, int32_min + 4);
1960 diff -= 4;
1961 // Add additional adjustment for instructions preceding the PC usage, padding
1962 // before the literal pool and rounding down the PC for literal loads.
1963 switch (GetSize()) {
1964 case kBranch16Bit:
1965 case kBranch32Bit:
1966 break;
1967
1968 case kCbxz16Bit:
1969 break;
1970 case kCbxz32Bit:
1971 case kCbxz48Bit:
1972 DCHECK_GE(diff, int32_min + 2);
1973 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1974 break;
1975
1976 case kLiteral1KiB:
1977 case kLiteral4KiB:
1978 case kLongOrFPLiteral1KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001979 case kLiteralAddr1KiB:
1980 case kLiteralAddr4KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001981 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1982 diff += LiteralPoolPaddingSize(current_code_size);
1983 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1984 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1985 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001986 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001987 diff = diff + (diff & 2);
1988 DCHECK_GE(diff, 0);
1989 break;
1990 case kLiteral1MiB:
1991 case kLiteral64KiB:
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01001992 case kLongOrFPLiteral64KiB:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07001993 case kLiteralAddr64KiB:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001994 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1995 diff -= 4; // One extra 32-bit MOV.
1996 diff += LiteralPoolPaddingSize(current_code_size);
1997 break;
1998 case kLiteralFar:
1999 case kLongOrFPLiteralFar:
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002000 case kLiteralAddrFar:
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002001 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
2002 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
2003 diff += LiteralPoolPaddingSize(current_code_size);
2004 break;
2005 }
2006 return diff;
2007}
2008
2009inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
2010 DCHECK_NE(target_, kUnresolved);
2011 Size old_size = size_;
2012 size_ = new_size;
2013 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
2014 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
2015 if (target_ > location_) {
2016 adjustment_ += adjustment;
2017 }
2018 return adjustment;
2019}
2020
Vladimir Marko167bc0b2016-09-13 15:11:50 +01002021bool Thumb2Assembler::Fixup::IsCandidateForEmitEarly() const {
2022 DCHECK(size_ == original_size_);
2023 if (target_ == kUnresolved) {
2024 return false;
2025 }
2026 // GetOffset() does not depend on current_code_size for branches, only for literals.
2027 constexpr uint32_t current_code_size = 0u;
2028 switch (GetSize()) {
2029 case kBranch16Bit:
2030 return IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size));
2031 case kBranch32Bit:
2032 // We don't support conditional branches beyond +-1MiB
2033 // or unconditional branches beyond +-16MiB.
2034 return true;
2035
2036 case kCbxz16Bit:
2037 return IsUint<7>(GetOffset(current_code_size));
2038 case kCbxz32Bit:
2039 return IsInt<9>(GetOffset(current_code_size));
2040 case kCbxz48Bit:
2041 // We don't support conditional branches beyond +-1MiB.
2042 return true;
2043
2044 case kLiteral1KiB:
2045 case kLiteral4KiB:
2046 case kLiteral64KiB:
2047 case kLiteral1MiB:
2048 case kLiteralFar:
2049 case kLiteralAddr1KiB:
2050 case kLiteralAddr4KiB:
2051 case kLiteralAddr64KiB:
2052 case kLiteralAddrFar:
2053 case kLongOrFPLiteral1KiB:
2054 case kLongOrFPLiteral64KiB:
2055 case kLongOrFPLiteralFar:
2056 return false;
2057 }
2058}
2059
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002060uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
2061 uint32_t old_code_size = current_code_size;
2062 switch (GetSize()) {
2063 case kBranch16Bit:
2064 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
2065 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01002066 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002067 current_code_size += IncreaseSize(kBranch32Bit);
2068 FALLTHROUGH_INTENDED;
2069 case kBranch32Bit:
2070 // We don't support conditional branches beyond +-1MiB
2071 // or unconditional branches beyond +-16MiB.
2072 break;
2073
2074 case kCbxz16Bit:
2075 if (IsUint<7>(GetOffset(current_code_size))) {
2076 break;
2077 }
2078 current_code_size += IncreaseSize(kCbxz32Bit);
2079 FALLTHROUGH_INTENDED;
2080 case kCbxz32Bit:
2081 if (IsInt<9>(GetOffset(current_code_size))) {
2082 break;
2083 }
2084 current_code_size += IncreaseSize(kCbxz48Bit);
2085 FALLTHROUGH_INTENDED;
2086 case kCbxz48Bit:
2087 // We don't support conditional branches beyond +-1MiB.
2088 break;
2089
2090 case kLiteral1KiB:
2091 DCHECK(!IsHighRegister(rn_));
2092 if (IsUint<10>(GetOffset(current_code_size))) {
2093 break;
2094 }
2095 current_code_size += IncreaseSize(kLiteral4KiB);
2096 FALLTHROUGH_INTENDED;
2097 case kLiteral4KiB:
2098 if (IsUint<12>(GetOffset(current_code_size))) {
2099 break;
2100 }
2101 current_code_size += IncreaseSize(kLiteral64KiB);
2102 FALLTHROUGH_INTENDED;
2103 case kLiteral64KiB:
2104 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
2105 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
2106 break;
2107 }
2108 current_code_size += IncreaseSize(kLiteral1MiB);
2109 FALLTHROUGH_INTENDED;
2110 case kLiteral1MiB:
2111 if (IsUint<20>(GetOffset(current_code_size))) {
2112 break;
2113 }
2114 current_code_size += IncreaseSize(kLiteralFar);
2115 FALLTHROUGH_INTENDED;
2116 case kLiteralFar:
2117 // This encoding can reach any target.
2118 break;
2119
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002120 case kLiteralAddr1KiB:
2121 DCHECK(!IsHighRegister(rn_));
2122 if (IsUint<10>(GetOffset(current_code_size))) {
2123 break;
2124 }
2125 current_code_size += IncreaseSize(kLiteralAddr4KiB);
2126 FALLTHROUGH_INTENDED;
2127 case kLiteralAddr4KiB:
2128 if (IsUint<12>(GetOffset(current_code_size))) {
2129 break;
2130 }
2131 current_code_size += IncreaseSize(kLiteralAddr64KiB);
2132 FALLTHROUGH_INTENDED;
2133 case kLiteralAddr64KiB:
2134 if (IsUint<16>(GetOffset(current_code_size))) {
2135 break;
2136 }
2137 current_code_size += IncreaseSize(kLiteralAddrFar);
2138 FALLTHROUGH_INTENDED;
2139 case kLiteralAddrFar:
2140 // This encoding can reach any target.
2141 break;
2142
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002143 case kLongOrFPLiteral1KiB:
2144 if (IsUint<10>(GetOffset(current_code_size))) {
2145 break;
2146 }
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01002147 current_code_size += IncreaseSize(kLongOrFPLiteral64KiB);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002148 FALLTHROUGH_INTENDED;
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01002149 case kLongOrFPLiteral64KiB:
2150 if (IsUint<16>(GetOffset(current_code_size))) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002151 break;
2152 }
2153 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
2154 FALLTHROUGH_INTENDED;
2155 case kLongOrFPLiteralFar:
2156 // This encoding can reach any target.
2157 break;
2158 }
2159 return current_code_size - old_code_size;
2160}
2161
2162void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
2163 switch (GetSize()) {
2164 case kBranch16Bit: {
2165 DCHECK(type_ == kUnconditional || type_ == kConditional);
2166 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2167 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01002168 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002169 break;
2170 }
2171 case kBranch32Bit: {
2172 DCHECK(type_ == kConditional || type_ == kUnconditional ||
2173 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
2174 DCHECK_EQ(type_ == kConditional, cond_ != AL);
2175 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
2176 if (type_ == kUnconditionalLink) {
2177 DCHECK_NE(encoding & B12, 0);
2178 encoding |= B14;
2179 } else if (type_ == kUnconditionalLinkX) {
2180 DCHECK_NE(encoding & B12, 0);
2181 encoding ^= B14 | B12;
2182 }
2183 buffer->Store<int16_t>(location_, encoding >> 16);
2184 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2185 break;
2186 }
2187
2188 case kCbxz16Bit: {
2189 DCHECK(type_ == kCompareAndBranchXZero);
2190 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
2191 buffer->Store<int16_t>(location_, encoding);
2192 break;
2193 }
2194 case kCbxz32Bit: {
2195 DCHECK(type_ == kCompareAndBranchXZero);
2196 DCHECK(cond_ == EQ || cond_ == NE);
2197 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2198 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
2199 buffer->Store<int16_t>(location_, cmp_encoding);
2200 buffer->Store<int16_t>(location_ + 2, b_encoding);
2201 break;
2202 }
2203 case kCbxz48Bit: {
2204 DCHECK(type_ == kCompareAndBranchXZero);
2205 DCHECK(cond_ == EQ || cond_ == NE);
2206 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
2207 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
2208 buffer->Store<int16_t>(location_, cmp_encoding);
2209 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
2210 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
2211 break;
2212 }
2213
2214 case kLiteral1KiB: {
2215 DCHECK(type_ == kLoadLiteralNarrow);
2216 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
2217 buffer->Store<int16_t>(location_, encoding);
2218 break;
2219 }
2220 case kLiteral4KiB: {
2221 DCHECK(type_ == kLoadLiteralNarrow);
2222 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
2223 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
2224 buffer->Store<int16_t>(location_, encoding >> 16);
2225 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2226 break;
2227 }
2228 case kLiteral64KiB: {
2229 DCHECK(type_ == kLoadLiteralNarrow);
2230 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2231 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2232 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
2233 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2234 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2235 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2236 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
2237 break;
2238 }
2239 case kLiteral1MiB: {
2240 DCHECK(type_ == kLoadLiteralNarrow);
2241 int32_t offset = GetOffset(code_size);
2242 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
2243 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2244 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
2245 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2246 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2247 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2248 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2249 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2250 break;
2251 }
2252 case kLiteralFar: {
2253 DCHECK(type_ == kLoadLiteralNarrow);
2254 int32_t offset = GetOffset(code_size);
2255 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2256 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2257 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2258 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
2259 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2260 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2261 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2262 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2263 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2264 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2265 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2266 break;
2267 }
2268
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07002269 case kLiteralAddr1KiB: {
2270 DCHECK(type_ == kLoadLiteralAddr);
2271 int16_t encoding = AdrEncoding16(rn_, GetOffset(code_size));
2272 buffer->Store<int16_t>(location_, encoding);
2273 break;
2274 }
2275 case kLiteralAddr4KiB: {
2276 DCHECK(type_ == kLoadLiteralAddr);
2277 int32_t encoding = AdrEncoding32(rn_, GetOffset(code_size));
2278 buffer->Store<int16_t>(location_, encoding >> 16);
2279 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2280 break;
2281 }
2282 case kLiteralAddr64KiB: {
2283 DCHECK(type_ == kLoadLiteralAddr);
2284 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
2285 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2286 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2287 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2288 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2289 break;
2290 }
2291 case kLiteralAddrFar: {
2292 DCHECK(type_ == kLoadLiteralAddr);
2293 int32_t offset = GetOffset(code_size);
2294 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
2295 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
2296 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
2297 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2298 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2299 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2300 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2301 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2302 break;
2303 }
2304
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002305 case kLongOrFPLiteral1KiB: {
2306 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
2307 buffer->Store<int16_t>(location_, encoding >> 16);
2308 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
2309 break;
2310 }
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01002311 case kLongOrFPLiteral64KiB: {
2312 int32_t mov_encoding = MovwEncoding32(IP, GetOffset(code_size));
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002313 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01002314 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0u); // DCHECKs type_.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002315 buffer->Store<int16_t>(location_, mov_encoding >> 16);
2316 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
2317 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
2318 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
2319 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
2320 break;
2321 }
2322 case kLongOrFPLiteralFar: {
2323 int32_t offset = GetOffset(code_size);
2324 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
2325 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
2326 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
2327 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
2328 buffer->Store<int16_t>(location_, movw_encoding >> 16);
2329 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
2330 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
2331 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
2332 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
2333 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
2334 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
2335 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002336 }
2337 }
2338}
2339
Dave Allison65fcc2c2014-04-28 13:45:27 -07002340uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002341 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002342 uint32_t location = buffer_.Size();
2343
2344 // This is always unresolved as it must be a forward branch.
2345 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002346 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002347}
2348
2349
2350// NOTE: this only support immediate offsets, not [rx,ry].
2351// TODO: support [rx,ry] instructions.
2352void Thumb2Assembler::EmitLoadStore(Condition cond,
2353 bool load,
2354 bool byte,
2355 bool half,
2356 bool is_signed,
2357 Register rd,
2358 const Address& ad) {
2359 CHECK_NE(rd, kNoRegister);
2360 CheckCondition(cond);
2361 bool must_be_32bit = force_32bit_;
2362 if (IsHighRegister(rd)) {
2363 must_be_32bit = true;
2364 }
2365
2366 Register rn = ad.GetRegister();
Vladimir Marko3a656e12016-08-02 14:57:56 +01002367 if (IsHighRegister(rn) && (byte || half || (rn != SP && rn != PC))) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002368 must_be_32bit = true;
2369 }
2370
2371 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
2372 must_be_32bit = true;
2373 }
2374
Dave Allison45fdb932014-06-25 12:37:10 -07002375 if (ad.IsImmediate()) {
2376 // Immediate offset
2377 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002378
Dave Allison65fcc2c2014-04-28 13:45:27 -07002379 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002380 // 5 bit offset, no shift.
Vladimir Marko3a656e12016-08-02 14:57:56 +01002381 if ((offset & ~0x1f) != 0) {
Dave Allison45fdb932014-06-25 12:37:10 -07002382 must_be_32bit = true;
2383 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002384 } else if (half) {
Vladimir Marko3a656e12016-08-02 14:57:56 +01002385 // 5 bit offset, shifted by 1.
2386 if ((offset & ~(0x1f << 1)) != 0) {
2387 must_be_32bit = true;
2388 }
2389 } else if (rn == SP || rn == PC) {
2390 // The 16 bit SP/PC relative instruction can only have an (imm8 << 2) offset.
2391 if ((offset & ~(0xff << 2)) != 0) {
Dave Allison45fdb932014-06-25 12:37:10 -07002392 must_be_32bit = true;
2393 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002394 } else {
Vladimir Marko3a656e12016-08-02 14:57:56 +01002395 // 5 bit offset, shifted by 2.
2396 if ((offset & ~(0x1f << 2)) != 0) {
Dave Allison45fdb932014-06-25 12:37:10 -07002397 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002398 }
2399 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002400
Dave Allison45fdb932014-06-25 12:37:10 -07002401 if (must_be_32bit) {
2402 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2403 (load ? B20 : 0) |
2404 (is_signed ? B24 : 0) |
2405 static_cast<uint32_t>(rd) << 12 |
2406 ad.encodingThumb(true) |
2407 (byte ? 0 : half ? B21 : B22);
2408 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002409 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002410 // 16 bit thumb1.
2411 uint8_t opA = 0;
Vladimir Marko3a656e12016-08-02 14:57:56 +01002412 bool sp_or_pc_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002413
2414 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002415 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002416 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002417 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002418 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002419 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002420 opA = 9U /* 0b1001 */;
Vladimir Marko3a656e12016-08-02 14:57:56 +01002421 sp_or_pc_relative = true;
2422 } else if (rn == PC) {
2423 opA = 4U;
2424 sp_or_pc_relative = true;
Dave Allison45fdb932014-06-25 12:37:10 -07002425 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002426 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002427 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002428 }
Dave Allison45fdb932014-06-25 12:37:10 -07002429 int16_t encoding = opA << 12 |
2430 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002431
Dave Allison45fdb932014-06-25 12:37:10 -07002432 CHECK_GE(offset, 0);
Vladimir Marko3a656e12016-08-02 14:57:56 +01002433 if (sp_or_pc_relative) {
Dave Allison45fdb932014-06-25 12:37:10 -07002434 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002435 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002436 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002437 encoding |= rd << 8 | offset >> 2;
2438 } else {
2439 // No SP relative. The offset is shifted right depending on
2440 // the size of the load/store.
2441 encoding |= static_cast<uint32_t>(rd);
2442
2443 if (byte) {
2444 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002445 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002446 } else if (half) {
2447 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002448 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002449 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002450 offset >>= 1;
2451 } else {
2452 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002453 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002454 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002455 offset >>= 2;
2456 }
2457 encoding |= rn << 3 | offset << 6;
2458 }
2459
2460 Emit16(encoding);
2461 }
2462 } else {
2463 // Register shift.
Vladimir Markoaad75c62016-10-03 08:46:48 +00002464 CHECK_NE(ad.GetRegister(), PC);
2465 if (ad.GetShiftCount() != 0) {
2466 // If there is a shift count this must be 32 bit.
2467 must_be_32bit = true;
2468 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2469 must_be_32bit = true;
2470 }
Vladimir Marko5f926052016-09-30 17:04:49 +00002471
Vladimir Markoaad75c62016-10-03 08:46:48 +00002472 if (must_be_32bit) {
2473 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
2474 ad.encodingThumb(true);
2475 if (half) {
2476 encoding |= B21;
2477 } else if (!byte) {
2478 encoding |= B22;
Vladimir Marko5f926052016-09-30 17:04:49 +00002479 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00002480 if (load && is_signed && (byte || half)) {
2481 encoding |= B24;
2482 }
2483 Emit32(encoding);
2484 } else {
2485 // 16 bit register offset.
2486 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2487 ad.encodingThumb(false);
2488 if (byte) {
2489 encoding |= B10;
2490 } else if (half) {
2491 encoding |= B9;
2492 }
2493 Emit16(encoding);
Dave Allison45fdb932014-06-25 12:37:10 -07002494 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002495 }
2496}
2497
2498
2499void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002500 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002501 bool load,
2502 Register base,
2503 RegList regs) {
2504 CHECK_NE(base, kNoRegister);
2505 CheckCondition(cond);
2506 bool must_be_32bit = force_32bit_;
2507
Vladimir Markoe8469c12014-11-26 18:09:30 +00002508 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2509 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2510 // Use 16-bit PUSH/POP.
2511 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2512 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2513 Emit16(encoding);
2514 return;
2515 }
2516
Dave Allison65fcc2c2014-04-28 13:45:27 -07002517 if ((regs & 0xff00) != 0) {
2518 must_be_32bit = true;
2519 }
2520
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002521 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002522 // 16 bit always uses writeback.
2523 if (!w_bit) {
2524 must_be_32bit = true;
2525 }
2526
2527 if (must_be_32bit) {
2528 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002529 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002530 case IA:
2531 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002532 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002533 break;
2534 case DB:
2535 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002536 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002537 break;
2538 case DA:
2539 case IB:
2540 case DA_W:
2541 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002542 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002543 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002544 }
2545 if (load) {
2546 // Cannot have SP in the list.
2547 CHECK_EQ((regs & (1 << SP)), 0);
2548 } else {
2549 // Cannot have PC or SP in the list.
2550 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2551 }
2552 int32_t encoding = B31 | B30 | B29 | B27 |
2553 (op << 23) |
2554 (load ? B20 : 0) |
2555 base << 16 |
2556 regs |
2557 (w_bit << 21);
2558 Emit32(encoding);
2559 } else {
2560 int16_t encoding = B15 | B14 |
2561 (load ? B11 : 0) |
2562 base << 8 |
2563 regs;
2564 Emit16(encoding);
2565 }
2566}
2567
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002568void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2569 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002570 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002571 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002572 if (cond == AL) {
2573 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002574 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002575 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002576 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002577 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002578 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002579 }
2580 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002581 branch_type = Fixup::kUnconditional; // B.
Vladimir Markoa64f2492016-04-25 12:43:50 +00002582 // The T2 encoding offset is `SignExtend(imm11:'0', 32)` and there is a PC adjustment of 4.
2583 static constexpr size_t kMaxT2BackwardDistance = (1u << 11) - 4u;
2584 if (!use32bit && label->IsBound() && pc - label->Position() > kMaxT2BackwardDistance) {
2585 use32bit = true;
2586 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002587 }
2588 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002589 branch_type = Fixup::kConditional; // B<cond>.
Vladimir Markoa64f2492016-04-25 12:43:50 +00002590 // The T1 encoding offset is `SignExtend(imm8:'0', 32)` and there is a PC adjustment of 4.
2591 static constexpr size_t kMaxT1BackwardDistance = (1u << 8) - 4u;
2592 if (!use32bit && label->IsBound() && pc - label->Position() > kMaxT1BackwardDistance) {
2593 use32bit = true;
2594 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002595 }
2596
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002597 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2598 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2599
Dave Allison65fcc2c2014-04-28 13:45:27 -07002600 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002601 // The branch is to a bound label which means that it's a backwards branch.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002602 GetFixup(branch_id)->Resolve(label->Position());
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002603 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002604 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002605 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2606 // the code with the label serving as the head.
2607 Emit16(static_cast<uint16_t>(label->position_));
2608 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002609 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002610
2611 if (use32bit) {
2612 Emit16(0);
2613 }
2614 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002615}
2616
2617
Artem Serovc257da72016-02-02 13:49:43 +00002618void Thumb2Assembler::Emit32Miscellaneous(uint8_t op1,
2619 uint8_t op2,
2620 uint32_t rest_encoding) {
2621 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B23 |
2622 op1 << 20 |
2623 0xf << 12 |
2624 B7 |
2625 op2 << 4 |
2626 rest_encoding;
2627 Emit32(encoding);
2628}
2629
2630
2631void Thumb2Assembler::Emit16Miscellaneous(uint32_t rest_encoding) {
2632 int16_t encoding = B15 | B13 | B12 |
2633 rest_encoding;
2634 Emit16(encoding);
2635}
2636
Dave Allison65fcc2c2014-04-28 13:45:27 -07002637void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2638 CHECK_NE(rd, kNoRegister);
2639 CHECK_NE(rm, kNoRegister);
2640 CheckCondition(cond);
2641 CHECK_NE(rd, PC);
2642 CHECK_NE(rm, PC);
Artem Serovc257da72016-02-02 13:49:43 +00002643 int32_t encoding =
Dave Allison65fcc2c2014-04-28 13:45:27 -07002644 static_cast<uint32_t>(rm) << 16 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002645 static_cast<uint32_t>(rd) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002646 static_cast<uint32_t>(rm);
Artem Serovc257da72016-02-02 13:49:43 +00002647 Emit32Miscellaneous(0b11, 0b00, encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002648}
2649
2650
2651void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2652 CheckCondition(cond);
Vladimir Markob4536b72015-11-24 13:45:23 +00002653 // Always 32 bits, encoding T3. (Other encondings are called MOV, not MOVW.)
2654 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2655 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2656 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
2657 uint32_t imm8 = imm16 & 0xff;
2658 int32_t encoding = B31 | B30 | B29 | B28 |
2659 B25 | B22 |
2660 static_cast<uint32_t>(rd) << 8 |
2661 i << 26 |
2662 imm4 << 16 |
2663 imm3 << 12 |
2664 imm8;
2665 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002666}
2667
2668
2669void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2670 CheckCondition(cond);
2671 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002672 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2673 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2674 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002675 uint32_t imm8 = imm16 & 0xff;
2676 int32_t encoding = B31 | B30 | B29 | B28 |
2677 B25 | B23 | B22 |
2678 static_cast<uint32_t>(rd) << 8 |
2679 i << 26 |
2680 imm4 << 16 |
2681 imm3 << 12 |
2682 imm8;
2683 Emit32(encoding);
2684}
2685
2686
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002687void Thumb2Assembler::rbit(Register rd, Register rm, Condition cond) {
2688 CHECK_NE(rd, kNoRegister);
2689 CHECK_NE(rm, kNoRegister);
2690 CheckCondition(cond);
2691 CHECK_NE(rd, PC);
2692 CHECK_NE(rm, PC);
2693 CHECK_NE(rd, SP);
2694 CHECK_NE(rm, SP);
Artem Serovc257da72016-02-02 13:49:43 +00002695 int32_t encoding =
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002696 static_cast<uint32_t>(rm) << 16 |
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002697 static_cast<uint32_t>(rd) << 8 |
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002698 static_cast<uint32_t>(rm);
Artem Serovc257da72016-02-02 13:49:43 +00002699
2700 Emit32Miscellaneous(0b01, 0b10, encoding);
2701}
2702
2703
2704void Thumb2Assembler::EmitReverseBytes(Register rd, Register rm,
2705 uint32_t op) {
2706 CHECK_NE(rd, kNoRegister);
2707 CHECK_NE(rm, kNoRegister);
2708 CHECK_NE(rd, PC);
2709 CHECK_NE(rm, PC);
2710 CHECK_NE(rd, SP);
2711 CHECK_NE(rm, SP);
2712
2713 if (!IsHighRegister(rd) && !IsHighRegister(rm) && !force_32bit_) {
2714 uint16_t t1_op = B11 | B9 | (op << 6);
2715 int16_t encoding = t1_op |
2716 static_cast<uint16_t>(rm) << 3 |
2717 static_cast<uint16_t>(rd);
2718 Emit16Miscellaneous(encoding);
2719 } else {
2720 int32_t encoding =
2721 static_cast<uint32_t>(rm) << 16 |
2722 static_cast<uint32_t>(rd) << 8 |
2723 static_cast<uint32_t>(rm);
2724 Emit32Miscellaneous(0b01, op, encoding);
2725 }
2726}
2727
2728
2729void Thumb2Assembler::rev(Register rd, Register rm, Condition cond) {
2730 CheckCondition(cond);
2731 EmitReverseBytes(rd, rm, 0b00);
2732}
2733
2734
2735void Thumb2Assembler::rev16(Register rd, Register rm, Condition cond) {
2736 CheckCondition(cond);
2737 EmitReverseBytes(rd, rm, 0b01);
2738}
2739
2740
2741void Thumb2Assembler::revsh(Register rd, Register rm, Condition cond) {
2742 CheckCondition(cond);
2743 EmitReverseBytes(rd, rm, 0b11);
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002744}
2745
2746
Dave Allison65fcc2c2014-04-28 13:45:27 -07002747void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2748 CHECK_NE(rn, kNoRegister);
2749 CHECK_NE(rt, kNoRegister);
2750 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002751 CHECK_LT(imm, (1u << 10));
2752
2753 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2754 static_cast<uint32_t>(rn) << 16 |
2755 static_cast<uint32_t>(rt) << 12 |
2756 0xf << 8 |
2757 imm >> 2;
2758 Emit32(encoding);
2759}
2760
2761
2762void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2763 ldrex(rt, rn, 0, cond);
2764}
2765
2766
2767void Thumb2Assembler::strex(Register rd,
2768 Register rt,
2769 Register rn,
2770 uint16_t imm,
2771 Condition cond) {
2772 CHECK_NE(rn, kNoRegister);
2773 CHECK_NE(rd, kNoRegister);
2774 CHECK_NE(rt, kNoRegister);
2775 CheckCondition(cond);
2776 CHECK_LT(imm, (1u << 10));
2777
2778 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2779 static_cast<uint32_t>(rn) << 16 |
2780 static_cast<uint32_t>(rt) << 12 |
2781 static_cast<uint32_t>(rd) << 8 |
2782 imm >> 2;
2783 Emit32(encoding);
2784}
2785
2786
Calin Juravle52c48962014-12-16 17:02:57 +00002787void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2788 CHECK_NE(rn, kNoRegister);
2789 CHECK_NE(rt, kNoRegister);
2790 CHECK_NE(rt2, kNoRegister);
2791 CHECK_NE(rt, rt2);
2792 CheckCondition(cond);
2793
2794 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2795 static_cast<uint32_t>(rn) << 16 |
2796 static_cast<uint32_t>(rt) << 12 |
2797 static_cast<uint32_t>(rt2) << 8 |
2798 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2799 Emit32(encoding);
2800}
2801
2802
Dave Allison65fcc2c2014-04-28 13:45:27 -07002803void Thumb2Assembler::strex(Register rd,
2804 Register rt,
2805 Register rn,
2806 Condition cond) {
2807 strex(rd, rt, rn, 0, cond);
2808}
2809
2810
Calin Juravle52c48962014-12-16 17:02:57 +00002811void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2812 CHECK_NE(rd, kNoRegister);
2813 CHECK_NE(rn, kNoRegister);
2814 CHECK_NE(rt, kNoRegister);
2815 CHECK_NE(rt2, kNoRegister);
2816 CHECK_NE(rt, rt2);
2817 CHECK_NE(rd, rt);
2818 CHECK_NE(rd, rt2);
2819 CheckCondition(cond);
2820
2821 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2822 static_cast<uint32_t>(rn) << 16 |
2823 static_cast<uint32_t>(rt) << 12 |
2824 static_cast<uint32_t>(rt2) << 8 |
2825 B6 | B5 | B4 |
2826 static_cast<uint32_t>(rd);
2827 Emit32(encoding);
2828}
2829
2830
Dave Allison65fcc2c2014-04-28 13:45:27 -07002831void Thumb2Assembler::clrex(Condition cond) {
2832 CheckCondition(cond);
Roland Levillain188edb32016-10-24 16:31:16 +01002833 int32_t encoding = B31 | B30 | B29 | B28 | B25 | B24 | B23 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002834 B21 | B20 |
2835 0xf << 16 |
2836 B15 |
2837 0xf << 8 |
2838 B5 |
2839 0xf;
2840 Emit32(encoding);
2841}
2842
2843
2844void Thumb2Assembler::nop(Condition cond) {
2845 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002846 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002847 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002848 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002849}
2850
2851
2852void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2853 CHECK_NE(sn, kNoSRegister);
2854 CHECK_NE(rt, kNoRegister);
2855 CHECK_NE(rt, SP);
2856 CHECK_NE(rt, PC);
2857 CheckCondition(cond);
2858 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2859 B27 | B26 | B25 |
2860 ((static_cast<int32_t>(sn) >> 1)*B16) |
2861 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2862 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2863 Emit32(encoding);
2864}
2865
2866
2867void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2868 CHECK_NE(sn, kNoSRegister);
2869 CHECK_NE(rt, kNoRegister);
2870 CHECK_NE(rt, SP);
2871 CHECK_NE(rt, PC);
2872 CheckCondition(cond);
2873 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2874 B27 | B26 | B25 | B20 |
2875 ((static_cast<int32_t>(sn) >> 1)*B16) |
2876 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2877 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2878 Emit32(encoding);
2879}
2880
2881
2882void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2883 Condition cond) {
2884 CHECK_NE(sm, kNoSRegister);
2885 CHECK_NE(sm, S31);
2886 CHECK_NE(rt, kNoRegister);
2887 CHECK_NE(rt, SP);
2888 CHECK_NE(rt, PC);
2889 CHECK_NE(rt2, kNoRegister);
2890 CHECK_NE(rt2, SP);
2891 CHECK_NE(rt2, PC);
2892 CheckCondition(cond);
2893 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2894 B27 | B26 | B22 |
2895 (static_cast<int32_t>(rt2)*B16) |
2896 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2897 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2898 (static_cast<int32_t>(sm) >> 1);
2899 Emit32(encoding);
2900}
2901
2902
2903void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2904 Condition cond) {
2905 CHECK_NE(sm, kNoSRegister);
2906 CHECK_NE(sm, S31);
2907 CHECK_NE(rt, kNoRegister);
2908 CHECK_NE(rt, SP);
2909 CHECK_NE(rt, PC);
2910 CHECK_NE(rt2, kNoRegister);
2911 CHECK_NE(rt2, SP);
2912 CHECK_NE(rt2, PC);
2913 CHECK_NE(rt, rt2);
2914 CheckCondition(cond);
2915 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2916 B27 | B26 | B22 | B20 |
2917 (static_cast<int32_t>(rt2)*B16) |
2918 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2919 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2920 (static_cast<int32_t>(sm) >> 1);
2921 Emit32(encoding);
2922}
2923
2924
2925void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2926 Condition cond) {
2927 CHECK_NE(dm, kNoDRegister);
2928 CHECK_NE(rt, kNoRegister);
2929 CHECK_NE(rt, SP);
2930 CHECK_NE(rt, PC);
2931 CHECK_NE(rt2, kNoRegister);
2932 CHECK_NE(rt2, SP);
2933 CHECK_NE(rt2, PC);
2934 CheckCondition(cond);
2935 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2936 B27 | B26 | B22 |
2937 (static_cast<int32_t>(rt2)*B16) |
2938 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2939 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2940 (static_cast<int32_t>(dm) & 0xf);
2941 Emit32(encoding);
2942}
2943
2944
2945void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2946 Condition cond) {
2947 CHECK_NE(dm, kNoDRegister);
2948 CHECK_NE(rt, kNoRegister);
2949 CHECK_NE(rt, SP);
2950 CHECK_NE(rt, PC);
2951 CHECK_NE(rt2, kNoRegister);
2952 CHECK_NE(rt2, SP);
2953 CHECK_NE(rt2, PC);
2954 CHECK_NE(rt, rt2);
2955 CheckCondition(cond);
2956 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2957 B27 | B26 | B22 | B20 |
2958 (static_cast<int32_t>(rt2)*B16) |
2959 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2960 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2961 (static_cast<int32_t>(dm) & 0xf);
2962 Emit32(encoding);
2963}
2964
2965
2966void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2967 const Address& addr = static_cast<const Address&>(ad);
2968 CHECK_NE(sd, kNoSRegister);
2969 CheckCondition(cond);
2970 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2971 B27 | B26 | B24 | B20 |
2972 ((static_cast<int32_t>(sd) & 1)*B22) |
2973 ((static_cast<int32_t>(sd) >> 1)*B12) |
2974 B11 | B9 | addr.vencoding();
2975 Emit32(encoding);
2976}
2977
2978
2979void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2980 const Address& addr = static_cast<const Address&>(ad);
2981 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2982 CHECK_NE(sd, kNoSRegister);
2983 CheckCondition(cond);
2984 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2985 B27 | B26 | B24 |
2986 ((static_cast<int32_t>(sd) & 1)*B22) |
2987 ((static_cast<int32_t>(sd) >> 1)*B12) |
2988 B11 | B9 | addr.vencoding();
2989 Emit32(encoding);
2990}
2991
2992
2993void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2994 const Address& addr = static_cast<const Address&>(ad);
2995 CHECK_NE(dd, kNoDRegister);
2996 CheckCondition(cond);
2997 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2998 B27 | B26 | B24 | B20 |
2999 ((static_cast<int32_t>(dd) >> 4)*B22) |
3000 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3001 B11 | B9 | B8 | addr.vencoding();
3002 Emit32(encoding);
3003}
3004
3005
3006void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
3007 const Address& addr = static_cast<const Address&>(ad);
3008 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
3009 CHECK_NE(dd, kNoDRegister);
3010 CheckCondition(cond);
3011 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3012 B27 | B26 | B24 |
3013 ((static_cast<int32_t>(dd) >> 4)*B22) |
3014 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3015 B11 | B9 | B8 | addr.vencoding();
3016 Emit32(encoding);
3017}
3018
3019
3020void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
3021 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
3022}
3023
3024
3025void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
3026 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
3027}
3028
3029
3030void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
3031 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
3032}
3033
3034
3035void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
3036 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
3037}
3038
3039
Artem Serovcb3cf4a2016-07-15 15:01:13 +01003040void Thumb2Assembler::vldmiad(Register base_reg, DRegister reg, int nregs, Condition cond) {
3041 int32_t rest = B23;
3042 EmitVLdmOrStm(rest,
3043 static_cast<uint32_t>(reg),
3044 nregs,
3045 base_reg,
3046 /*is_load*/ true,
3047 /*dbl*/ true,
3048 cond);
3049}
3050
3051
3052void Thumb2Assembler::vstmiad(Register base_reg, DRegister reg, int nregs, Condition cond) {
3053 int32_t rest = B23;
3054 EmitVLdmOrStm(rest,
3055 static_cast<uint32_t>(reg),
3056 nregs,
3057 base_reg,
3058 /*is_load*/ false,
3059 /*dbl*/ true,
3060 cond);
3061}
3062
3063
Dave Allison65fcc2c2014-04-28 13:45:27 -07003064void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
Artem Serovcb3cf4a2016-07-15 15:01:13 +01003065 int32_t rest = B21 | (push ? B24 : B23);
3066 EmitVLdmOrStm(rest, reg, nregs, SP, /*is_load*/ !push, dbl, cond);
3067}
3068
3069
3070void Thumb2Assembler::EmitVLdmOrStm(int32_t rest,
3071 uint32_t reg,
3072 int nregs,
3073 Register rn,
3074 bool is_load,
3075 bool dbl,
3076 Condition cond) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003077 CheckCondition(cond);
3078
Artem Serovcb3cf4a2016-07-15 15:01:13 +01003079 DCHECK_GT(nregs, 0);
3080 DCHECK_LE(reg + nregs, 32u);
3081 DCHECK(!dbl || (nregs <= 16));
3082
Dave Allison65fcc2c2014-04-28 13:45:27 -07003083 uint32_t D;
3084 uint32_t Vd;
3085 if (dbl) {
3086 // Encoded as D:Vd.
3087 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07003088 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003089 } else {
3090 // Encoded as Vd:D.
3091 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07003092 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003093 }
Artem Serovcb3cf4a2016-07-15 15:01:13 +01003094
3095 int32_t encoding = rest |
3096 14U /* 0b1110 */ << 28 |
3097 B27 | B26 | B11 | B9 |
3098 (is_load ? B20 : 0) |
3099 static_cast<int16_t>(rn) << 16 |
3100 D << 22 |
3101 Vd << 12 |
3102 (dbl ? B8 : 0) |
3103 nregs << (dbl ? 1 : 0);
3104
Dave Allison65fcc2c2014-04-28 13:45:27 -07003105 Emit32(encoding);
3106}
3107
3108
3109void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
3110 SRegister sd, SRegister sn, SRegister sm) {
3111 CHECK_NE(sd, kNoSRegister);
3112 CHECK_NE(sn, kNoSRegister);
3113 CHECK_NE(sm, kNoSRegister);
3114 CheckCondition(cond);
3115 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3116 B27 | B26 | B25 | B11 | B9 | opcode |
3117 ((static_cast<int32_t>(sd) & 1)*B22) |
3118 ((static_cast<int32_t>(sn) >> 1)*B16) |
3119 ((static_cast<int32_t>(sd) >> 1)*B12) |
3120 ((static_cast<int32_t>(sn) & 1)*B7) |
3121 ((static_cast<int32_t>(sm) & 1)*B5) |
3122 (static_cast<int32_t>(sm) >> 1);
3123 Emit32(encoding);
3124}
3125
3126
3127void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
3128 DRegister dd, DRegister dn, DRegister dm) {
3129 CHECK_NE(dd, kNoDRegister);
3130 CHECK_NE(dn, kNoDRegister);
3131 CHECK_NE(dm, kNoDRegister);
3132 CheckCondition(cond);
3133 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3134 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
3135 ((static_cast<int32_t>(dd) >> 4)*B22) |
3136 ((static_cast<int32_t>(dn) & 0xf)*B16) |
3137 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3138 ((static_cast<int32_t>(dn) >> 4)*B7) |
3139 ((static_cast<int32_t>(dm) >> 4)*B5) |
3140 (static_cast<int32_t>(dm) & 0xf);
3141 Emit32(encoding);
3142}
3143
3144
3145void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
3146 SRegister sd, DRegister dm) {
3147 CHECK_NE(sd, kNoSRegister);
3148 CHECK_NE(dm, kNoDRegister);
3149 CheckCondition(cond);
3150 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3151 B27 | B26 | B25 | B11 | B9 | opcode |
3152 ((static_cast<int32_t>(sd) & 1)*B22) |
3153 ((static_cast<int32_t>(sd) >> 1)*B12) |
3154 ((static_cast<int32_t>(dm) >> 4)*B5) |
3155 (static_cast<int32_t>(dm) & 0xf);
3156 Emit32(encoding);
3157}
3158
3159
3160void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
3161 DRegister dd, SRegister sm) {
3162 CHECK_NE(dd, kNoDRegister);
3163 CHECK_NE(sm, kNoSRegister);
3164 CheckCondition(cond);
3165 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3166 B27 | B26 | B25 | B11 | B9 | opcode |
3167 ((static_cast<int32_t>(dd) >> 4)*B22) |
3168 ((static_cast<int32_t>(dd) & 0xf)*B12) |
3169 ((static_cast<int32_t>(sm) & 1)*B5) |
3170 (static_cast<int32_t>(sm) >> 1);
3171 Emit32(encoding);
3172}
3173
3174
3175void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00003176 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003177 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00003178 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
3179 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
3180 (static_cast<int32_t>(PC)*B12) |
3181 B11 | B9 | B4;
3182 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003183}
3184
xueliang.zhonge652c122016-06-13 14:42:27 +01003185void Thumb2Assembler::vcntd(DRegister dd, DRegister dm) {
3186 uint32_t encoding = (B31 | B30 | B29 | B28 | B27 | B26 | B25 | B24 | B23 | B21 | B20) |
3187 ((static_cast<int32_t>(dd) >> 4) * B22) |
3188 ((static_cast<uint32_t>(dd) & 0xf) * B12) |
3189 (B10 | B8) |
3190 ((static_cast<int32_t>(dm) >> 4) * B5) |
3191 (static_cast<uint32_t>(dm) & 0xf);
3192
3193 Emit32(encoding);
3194}
3195
3196void Thumb2Assembler::vpaddld(DRegister dd, DRegister dm, int32_t size, bool is_unsigned) {
3197 CHECK(size == 8 || size == 16 || size == 32) << size;
3198 uint32_t encoding = (B31 | B30 | B29 | B28 | B27 | B26 | B25 | B24 | B23 | B21 | B20) |
3199 ((static_cast<uint32_t>(size >> 4) & 0x3) * B18) |
3200 ((static_cast<int32_t>(dd) >> 4) * B22) |
3201 ((static_cast<uint32_t>(dd) & 0xf) * B12) |
3202 (B9) |
3203 (is_unsigned ? B7 : 0) |
3204 ((static_cast<int32_t>(dm) >> 4) * B5) |
3205 (static_cast<uint32_t>(dm) & 0xf);
3206
3207 Emit32(encoding);
3208}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003209
3210void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003211 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003212 int16_t encoding = B15 | B14 | B12 |
3213 B11 | B10 | B9 | B8 |
3214 imm8;
3215 Emit16(encoding);
3216}
3217
3218
3219void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08003220 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003221 int16_t encoding = B15 | B13 | B12 |
3222 B11 | B10 | B9 |
3223 imm8;
3224 Emit16(encoding);
3225}
3226
3227// Convert the given IT state to a mask bit given bit 0 of the first
3228// condition and a shift position.
3229static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
3230 switch (s) {
3231 case kItOmitted: return 1 << shift;
3232 case kItThen: return firstcond0 << shift;
3233 case kItElse: return !firstcond0 << shift;
3234 }
3235 return 0;
3236}
3237
3238
3239// Set the IT condition in the given position for the given state. This is used
3240// to check that conditional instructions match the preceding IT statement.
3241void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
3242 switch (s) {
3243 case kItOmitted: it_conditions_[index] = AL; break;
3244 case kItThen: it_conditions_[index] = cond; break;
3245 case kItElse:
3246 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
3247 break;
3248 }
3249}
3250
3251
3252void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
3253 CheckCondition(AL); // Not allowed in IT block.
3254 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
3255
3256 // All conditions to AL.
3257 for (uint8_t i = 0; i < 4; ++i) {
3258 it_conditions_[i] = AL;
3259 }
3260
3261 SetItCondition(kItThen, firstcond, 0);
3262 uint8_t mask = ToItMask(i1, firstcond0, 3);
3263 SetItCondition(i1, firstcond, 1);
3264
3265 if (i1 != kItOmitted) {
3266 mask |= ToItMask(i2, firstcond0, 2);
3267 SetItCondition(i2, firstcond, 2);
3268 if (i2 != kItOmitted) {
3269 mask |= ToItMask(i3, firstcond0, 1);
3270 SetItCondition(i3, firstcond, 3);
3271 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07003272 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003273 }
3274 }
3275 }
3276
3277 // Start at first condition.
3278 it_cond_index_ = 0;
3279 next_condition_ = it_conditions_[0];
3280 uint16_t encoding = B15 | B13 | B12 |
3281 B11 | B10 | B9 | B8 |
3282 firstcond << 4 |
3283 mask;
3284 Emit16(encoding);
3285}
3286
3287
3288void Thumb2Assembler::cbz(Register rn, Label* label) {
3289 CheckCondition(AL);
3290 if (label->IsBound()) {
3291 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003292 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003293 } else if (IsHighRegister(rn)) {
3294 LOG(FATAL) << "cbz can only be used with low registers";
3295 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003296 } else {
3297 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
3298 label->LinkTo(branchid);
3299 }
3300}
3301
3302
3303void Thumb2Assembler::cbnz(Register rn, Label* label) {
3304 CheckCondition(AL);
3305 if (label->IsBound()) {
3306 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00003307 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003308 } else if (IsHighRegister(rn)) {
3309 LOG(FATAL) << "cbnz can only be used with low registers";
3310 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003311 } else {
3312 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
3313 label->LinkTo(branchid);
3314 }
3315}
3316
3317
3318void Thumb2Assembler::blx(Register rm, Condition cond) {
3319 CHECK_NE(rm, kNoRegister);
3320 CheckCondition(cond);
3321 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
3322 Emit16(encoding);
3323}
3324
3325
3326void Thumb2Assembler::bx(Register rm, Condition cond) {
3327 CHECK_NE(rm, kNoRegister);
3328 CheckCondition(cond);
3329 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
3330 Emit16(encoding);
3331}
3332
3333
3334void Thumb2Assembler::Push(Register rd, Condition cond) {
3335 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
3336}
3337
3338
3339void Thumb2Assembler::Pop(Register rd, Condition cond) {
3340 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
3341}
3342
3343
3344void Thumb2Assembler::PushList(RegList regs, Condition cond) {
3345 stm(DB_W, SP, regs, cond);
3346}
3347
3348
3349void Thumb2Assembler::PopList(RegList regs, Condition cond) {
3350 ldm(IA_W, SP, regs, cond);
3351}
3352
Artem Serovf4d6aee2016-07-11 10:41:45 +01003353void Thumb2Assembler::StoreList(RegList regs, size_t stack_offset) {
3354 DCHECK_NE(regs, 0u);
3355 DCHECK_EQ(regs & (1u << IP), 0u);
3356 if (IsPowerOfTwo(regs)) {
3357 Register reg = static_cast<Register>(CTZ(static_cast<uint32_t>(regs)));
3358 str(reg, Address(SP, stack_offset));
3359 } else {
3360 add(IP, SP, ShifterOperand(stack_offset));
3361 stm(IA, IP, regs);
3362 }
3363}
3364
3365void Thumb2Assembler::LoadList(RegList regs, size_t stack_offset) {
3366 DCHECK_NE(regs, 0u);
3367 DCHECK_EQ(regs & (1u << IP), 0u);
3368 if (IsPowerOfTwo(regs)) {
3369 Register reg = static_cast<Register>(CTZ(static_cast<uint32_t>(regs)));
3370 ldr(reg, Address(SP, stack_offset));
3371 } else {
3372 Register lowest_reg = static_cast<Register>(CTZ(static_cast<uint32_t>(regs)));
3373 add(lowest_reg, SP, ShifterOperand(stack_offset));
3374 ldm(IA, lowest_reg, regs);
3375 }
3376}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003377
3378void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
3379 if (cond != AL || rd != rm) {
3380 mov(rd, ShifterOperand(rm), cond);
3381 }
3382}
3383
3384
Dave Allison65fcc2c2014-04-28 13:45:27 -07003385void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003386 BindLabel(label, buffer_.Size());
Vladimir Marko167bc0b2016-09-13 15:11:50 +01003387
3388 // Try to emit some Fixups now to reduce the memory needed during the branch fixup later.
3389 while (!fixups_.empty() && fixups_.back().IsCandidateForEmitEarly()) {
3390 const Fixup& last_fixup = fixups_.back();
3391 // Fixups are ordered by location, so the candidate can surely be emitted if it is
3392 // a forward branch. If it's a backward branch, it may go over any number of other
3393 // fixups. We could check for any number of emit early candidates but we want this
3394 // heuristics to be quick, so check just one.
3395 uint32_t target = last_fixup.GetTarget();
3396 if (target < last_fixup.GetLocation() &&
3397 fixups_.size() >= 2u &&
3398 fixups_[fixups_.size() - 2u].GetLocation() >= target) {
3399 const Fixup& prev_fixup = fixups_[fixups_.size() - 2u];
3400 if (!prev_fixup.IsCandidateForEmitEarly()) {
3401 break;
3402 }
3403 uint32_t min_target = std::min(target, prev_fixup.GetTarget());
3404 if (fixups_.size() >= 3u && fixups_[fixups_.size() - 3u].GetLocation() >= min_target) {
3405 break;
3406 }
3407 }
3408 last_fixup.Emit(&buffer_, buffer_.Size());
3409 fixups_.pop_back();
3410 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003411}
3412
3413
3414void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003415 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003416 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003417 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003418 EmitShift(rd, rm, LSL, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003419}
3420
3421
3422void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003423 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003424 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003425 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003426 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003427 EmitShift(rd, rm, LSR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003428}
3429
3430
3431void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003432 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003433 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003434 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07003435 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003436 EmitShift(rd, rm, ASR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003437}
3438
3439
3440void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003441 Condition cond, SetCc set_cc) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00003442 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07003443 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003444 EmitShift(rd, rm, ROR, shift_imm, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003445}
3446
3447
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003448void Thumb2Assembler::Rrx(Register rd, Register rm, Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003449 CheckCondition(cond);
Vladimir Markof9d741e2015-11-20 15:08:11 +00003450 EmitShift(rd, rm, RRX, 0, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003451}
3452
3453
3454void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003455 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003456 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003457 EmitShift(rd, rm, LSL, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003458}
3459
3460
3461void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003462 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003463 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003464 EmitShift(rd, rm, LSR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003465}
3466
3467
3468void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003469 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003470 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003471 EmitShift(rd, rm, ASR, rn, cond, set_cc);
Dave Allison45fdb932014-06-25 12:37:10 -07003472}
3473
3474
3475void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003476 Condition cond, SetCc set_cc) {
Dave Allison45fdb932014-06-25 12:37:10 -07003477 CheckCondition(cond);
Vladimir Marko73cf0fb2015-07-30 15:07:22 +01003478 EmitShift(rd, rm, ROR, rn, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003479}
3480
3481
3482int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
3483 // The offset is off by 4 due to the way the ARM CPUs read PC.
3484 offset -= 4;
3485 offset >>= 1;
3486
3487 uint32_t value = 0;
3488 // There are two different encodings depending on the value of bit 12. In one case
3489 // intermediate values are calculated using the sign bit.
3490 if ((inst & B12) == B12) {
3491 // 25 bits of offset.
3492 uint32_t signbit = (offset >> 31) & 0x1;
3493 uint32_t i1 = (offset >> 22) & 0x1;
3494 uint32_t i2 = (offset >> 21) & 0x1;
3495 uint32_t imm10 = (offset >> 11) & 0x03ff;
3496 uint32_t imm11 = offset & 0x07ff;
3497 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
3498 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
3499 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
3500 imm11;
3501 // Remove the offset from the current encoding.
3502 inst &= ~(0x3ff << 16 | 0x7ff);
3503 } else {
3504 uint32_t signbit = (offset >> 31) & 0x1;
3505 uint32_t imm6 = (offset >> 11) & 0x03f;
3506 uint32_t imm11 = offset & 0x07ff;
3507 uint32_t j1 = (offset >> 19) & 1;
3508 uint32_t j2 = (offset >> 17) & 1;
3509 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
3510 imm11;
3511 // Remove the offset from the current encoding.
3512 inst &= ~(0x3f << 16 | 0x7ff);
3513 }
3514 // Mask out offset bits in current instruction.
3515 inst &= ~(B26 | B13 | B11);
3516 inst |= value;
3517 return inst;
3518}
3519
3520
3521int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
3522 int32_t imm32;
3523 if ((instr & B12) == B12) {
3524 uint32_t S = (instr >> 26) & 1;
3525 uint32_t J2 = (instr >> 11) & 1;
3526 uint32_t J1 = (instr >> 13) & 1;
3527 uint32_t imm10 = (instr >> 16) & 0x3FF;
3528 uint32_t imm11 = instr & 0x7FF;
3529
3530 uint32_t I1 = ~(J1 ^ S) & 1;
3531 uint32_t I2 = ~(J2 ^ S) & 1;
3532 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
3533 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
3534 } else {
3535 uint32_t S = (instr >> 26) & 1;
3536 uint32_t J2 = (instr >> 11) & 1;
3537 uint32_t J1 = (instr >> 13) & 1;
3538 uint32_t imm6 = (instr >> 16) & 0x3F;
3539 uint32_t imm11 = instr & 0x7FF;
3540
3541 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
3542 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
3543 }
3544 imm32 += 4;
3545 return imm32;
3546}
3547
Vladimir Markocf93a5c2015-06-16 11:33:24 +00003548uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3549 // We can reconstruct the adjustment by going through all the fixups from the beginning
3550 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3551 // with increasing old_position, we can use the data from last AdjustedPosition() to
3552 // continue where we left off and the whole loop should be O(m+n) where m is the number
3553 // of positions to adjust and n is the number of fixups.
3554 if (old_position < last_old_position_) {
3555 last_position_adjustment_ = 0u;
3556 last_old_position_ = 0u;
3557 last_fixup_id_ = 0u;
3558 }
3559 while (last_fixup_id_ != fixups_.size()) {
3560 Fixup* fixup = GetFixup(last_fixup_id_);
3561 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3562 break;
3563 }
3564 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3565 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3566 }
3567 ++last_fixup_id_;
3568 }
3569 last_old_position_ = old_position;
3570 return old_position + last_position_adjustment_;
3571}
3572
3573Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3574 DCHECK(size == 4u || size == 8u) << size;
3575 literals_.emplace_back(size, data);
3576 return &literals_.back();
3577}
3578
3579void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3580 DCHECK_EQ(literal->GetSize(), 4u);
3581 DCHECK(!literal->GetLabel()->IsBound());
3582 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3583 uint32_t location = buffer_.Size();
3584 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3585 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3586 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3587 literal->GetLabel()->LinkTo(fixup_id);
3588 if (use32bit) {
3589 Emit16(0);
3590 }
3591 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3592}
3593
3594void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3595 DCHECK_EQ(literal->GetSize(), 8u);
3596 DCHECK(!literal->GetLabel()->IsBound());
3597 uint32_t location = buffer_.Size();
3598 FixupId fixup_id =
3599 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3600 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3601 literal->GetLabel()->LinkTo(fixup_id);
3602 Emit16(0);
3603 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3604}
3605
3606void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3607 DCHECK_EQ(literal->GetSize(), 4u);
3608 DCHECK(!literal->GetLabel()->IsBound());
3609 uint32_t location = buffer_.Size();
3610 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3611 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3612 literal->GetLabel()->LinkTo(fixup_id);
3613 Emit16(0);
3614 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3615}
3616
3617void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3618 DCHECK_EQ(literal->GetSize(), 8u);
3619 DCHECK(!literal->GetLabel()->IsBound());
3620 uint32_t location = buffer_.Size();
3621 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3622 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3623 literal->GetLabel()->LinkTo(fixup_id);
3624 Emit16(0);
3625 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3626}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003627
Dave Allison65fcc2c2014-04-28 13:45:27 -07003628
3629void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
Vladimir Marko449b1092015-09-08 12:16:45 +01003630 Condition cond, SetCc set_cc) {
3631 if (value == 0 && set_cc != kCcSet) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003632 if (rd != rn) {
3633 mov(rd, ShifterOperand(rn), cond);
3634 }
3635 return;
3636 }
3637 // We prefer to select the shorter code sequence rather than selecting add for
3638 // positive values and sub for negatives ones, which would slightly improve
3639 // the readability of generated code for some constants.
3640 ShifterOperand shifter_op;
Vladimir Markof5c09c32015-12-17 12:08:08 +00003641 if (ShifterOperandCanHold(rd, rn, ADD, value, set_cc, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003642 add(rd, rn, shifter_op, cond, set_cc);
Vladimir Markof5c09c32015-12-17 12:08:08 +00003643 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, set_cc, &shifter_op)) {
Vladimir Marko449b1092015-09-08 12:16:45 +01003644 sub(rd, rn, shifter_op, cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003645 } else {
3646 CHECK(rn != IP);
Vladimir Markof5c09c32015-12-17 12:08:08 +00003647 // If rd != rn, use rd as temp. This alows 16-bit ADD/SUB in more situations than using IP.
3648 Register temp = (rd != rn) ? rd : IP;
Vladimir Markoac6ac102015-12-17 12:14:00 +00003649 if (ShifterOperandCanHold(temp, kNoRegister, MVN, ~value, kCcKeep, &shifter_op)) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003650 mvn(temp, shifter_op, cond, kCcKeep);
3651 add(rd, rn, ShifterOperand(temp), cond, set_cc);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003652 } else if (ShifterOperandCanHold(temp, kNoRegister, MVN, ~(-value), kCcKeep, &shifter_op)) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003653 mvn(temp, shifter_op, cond, kCcKeep);
3654 sub(rd, rn, ShifterOperand(temp), cond, set_cc);
3655 } else if (High16Bits(-value) == 0) {
3656 movw(temp, Low16Bits(-value), cond);
3657 sub(rd, rn, ShifterOperand(temp), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003658 } else {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003659 movw(temp, Low16Bits(value), cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003660 uint16_t value_high = High16Bits(value);
3661 if (value_high != 0) {
Vladimir Markof5c09c32015-12-17 12:08:08 +00003662 movt(temp, value_high, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003663 }
Vladimir Markof5c09c32015-12-17 12:08:08 +00003664 add(rd, rn, ShifterOperand(temp), cond, set_cc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003665 }
3666 }
3667}
3668
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003669void Thumb2Assembler::CmpConstant(Register rn, int32_t value, Condition cond) {
Vladimir Markoac6ac102015-12-17 12:14:00 +00003670 // We prefer to select the shorter code sequence rather than using plain cmp and cmn
3671 // which would slightly improve the readability of generated code for some constants.
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003672 ShifterOperand shifter_op;
Vladimir Markof5c09c32015-12-17 12:08:08 +00003673 if (ShifterOperandCanHold(kNoRegister, rn, CMP, value, kCcSet, &shifter_op)) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003674 cmp(rn, shifter_op, cond);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003675 } else if (ShifterOperandCanHold(kNoRegister, rn, CMN, -value, kCcSet, &shifter_op)) {
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003676 cmn(rn, shifter_op, cond);
3677 } else {
3678 CHECK(rn != IP);
Vladimir Markoac6ac102015-12-17 12:14:00 +00003679 if (ShifterOperandCanHold(IP, kNoRegister, MVN, ~value, kCcKeep, &shifter_op)) {
3680 mvn(IP, shifter_op, cond, kCcKeep);
3681 cmp(rn, ShifterOperand(IP), cond);
3682 } else if (ShifterOperandCanHold(IP, kNoRegister, MVN, ~(-value), kCcKeep, &shifter_op)) {
3683 mvn(IP, shifter_op, cond, kCcKeep);
3684 cmn(rn, ShifterOperand(IP), cond);
3685 } else if (High16Bits(-value) == 0) {
3686 movw(IP, Low16Bits(-value), cond);
3687 cmn(rn, ShifterOperand(IP), cond);
3688 } else {
3689 movw(IP, Low16Bits(value), cond);
3690 uint16_t value_high = High16Bits(value);
3691 if (value_high != 0) {
3692 movt(IP, value_high, cond);
3693 }
3694 cmp(rn, ShifterOperand(IP), cond);
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003695 }
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003696 }
3697}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003698
Dave Allison65fcc2c2014-04-28 13:45:27 -07003699void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3700 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003701 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003702 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003703 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003704 mvn(rd, shifter_op, cond);
3705 } else {
3706 movw(rd, Low16Bits(value), cond);
3707 uint16_t value_high = High16Bits(value);
3708 if (value_high != 0) {
3709 movt(rd, value_high, cond);
3710 }
3711 }
3712}
3713
Vladimir Markoebdbf4b2016-07-07 15:37:02 +01003714void Thumb2Assembler::LoadDImmediate(DRegister dd, double value, Condition cond) {
3715 if (!vmovd(dd, value, cond)) {
3716 uint64_t int_value = bit_cast<uint64_t, double>(value);
3717 if (int_value == bit_cast<uint64_t, double>(0.0)) {
3718 // 0.0 is quite common, so we special case it by loading
3719 // 2.0 in `dd` and then subtracting it.
3720 bool success = vmovd(dd, 2.0, cond);
3721 CHECK(success);
3722 vsubd(dd, dd, dd, cond);
3723 } else {
3724 Literal* literal = literal64_dedupe_map_.GetOrCreate(
3725 int_value,
3726 [this, int_value]() { return NewLiteral<uint64_t>(int_value); });
3727 LoadLiteral(dd, literal);
3728 }
3729 }
3730}
3731
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003732int32_t Thumb2Assembler::GetAllowedLoadOffsetBits(LoadOperandType type) {
3733 switch (type) {
3734 case kLoadSignedByte:
3735 case kLoadSignedHalfword:
3736 case kLoadUnsignedHalfword:
3737 case kLoadUnsignedByte:
3738 case kLoadWord:
3739 // We can encode imm12 offset.
3740 return 0xfffu;
3741 case kLoadSWord:
3742 case kLoadDWord:
3743 case kLoadWordPair:
3744 // We can encode imm8:'00' offset.
3745 return 0xff << 2;
3746 default:
3747 LOG(FATAL) << "UNREACHABLE";
3748 UNREACHABLE();
3749 }
3750}
3751
3752int32_t Thumb2Assembler::GetAllowedStoreOffsetBits(StoreOperandType type) {
3753 switch (type) {
3754 case kStoreHalfword:
3755 case kStoreByte:
3756 case kStoreWord:
3757 // We can encode imm12 offset.
3758 return 0xfff;
3759 case kStoreSWord:
3760 case kStoreDWord:
3761 case kStoreWordPair:
3762 // We can encode imm8:'00' offset.
3763 return 0xff << 2;
3764 default:
3765 LOG(FATAL) << "UNREACHABLE";
3766 UNREACHABLE();
3767 }
3768}
3769
3770bool Thumb2Assembler::CanSplitLoadStoreOffset(int32_t allowed_offset_bits,
3771 int32_t offset,
3772 /*out*/ int32_t* add_to_base,
3773 /*out*/ int32_t* offset_for_load_store) {
3774 int32_t other_bits = offset & ~allowed_offset_bits;
3775 if (ShifterOperandCanAlwaysHold(other_bits) || ShifterOperandCanAlwaysHold(-other_bits)) {
3776 *add_to_base = offset & ~allowed_offset_bits;
3777 *offset_for_load_store = offset & allowed_offset_bits;
3778 return true;
3779 }
3780 return false;
3781}
3782
3783int32_t Thumb2Assembler::AdjustLoadStoreOffset(int32_t allowed_offset_bits,
3784 Register temp,
3785 Register base,
3786 int32_t offset,
3787 Condition cond) {
3788 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3789 int32_t add_to_base, offset_for_load;
3790 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3791 AddConstant(temp, base, add_to_base, cond, kCcKeep);
3792 return offset_for_load;
3793 } else {
3794 LoadImmediate(temp, offset, cond);
3795 add(temp, temp, ShifterOperand(base), cond, kCcKeep);
3796 return 0;
3797 }
3798}
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003799
Dave Allison65fcc2c2014-04-28 13:45:27 -07003800// Implementation note: this method must emit at most one instruction when
3801// Address::CanHoldLoadOffsetThumb.
3802void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3803 Register reg,
3804 Register base,
3805 int32_t offset,
3806 Condition cond) {
3807 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003808 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003809 // Inlined AdjustLoadStoreOffset() allows us to pull a few more tricks.
3810 int32_t allowed_offset_bits = GetAllowedLoadOffsetBits(type);
3811 DCHECK_NE(offset & ~allowed_offset_bits, 0);
3812 int32_t add_to_base, offset_for_load;
3813 if (CanSplitLoadStoreOffset(allowed_offset_bits, offset, &add_to_base, &offset_for_load)) {
3814 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3815 AddConstant(reg, base, add_to_base, cond, kCcKeep);
3816 base = reg;
3817 offset = offset_for_load;
3818 } else {
3819 Register temp = (reg == base) ? IP : reg;
3820 LoadImmediate(temp, offset, cond);
3821 // TODO: Implement indexed load (not available for LDRD) and use it here to avoid the ADD.
3822 // Use reg for the adjusted base. If it's low reg, we may end up using 16-bit load.
3823 add(reg, reg, ShifterOperand((reg == base) ? IP : base), cond, kCcKeep);
3824 base = reg;
3825 offset = 0;
3826 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003827 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003828 DCHECK(Address::CanHoldLoadOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003829 switch (type) {
3830 case kLoadSignedByte:
3831 ldrsb(reg, Address(base, offset), cond);
3832 break;
3833 case kLoadUnsignedByte:
3834 ldrb(reg, Address(base, offset), cond);
3835 break;
3836 case kLoadSignedHalfword:
3837 ldrsh(reg, Address(base, offset), cond);
3838 break;
3839 case kLoadUnsignedHalfword:
3840 ldrh(reg, Address(base, offset), cond);
3841 break;
3842 case kLoadWord:
3843 ldr(reg, Address(base, offset), cond);
3844 break;
3845 case kLoadWordPair:
3846 ldrd(reg, Address(base, offset), cond);
3847 break;
3848 default:
3849 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003850 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003851 }
3852}
3853
Dave Allison65fcc2c2014-04-28 13:45:27 -07003854// Implementation note: this method must emit at most one instruction when
3855// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3856void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3857 Register base,
3858 int32_t offset,
3859 Condition cond) {
3860 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3861 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003862 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003863 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003864 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003865 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003866 vldrs(reg, Address(base, offset), cond);
3867}
3868
3869
3870// Implementation note: this method must emit at most one instruction when
3871// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3872void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3873 Register base,
3874 int32_t offset,
3875 Condition cond) {
3876 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3877 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003878 offset = AdjustLoadStoreOffset(GetAllowedLoadOffsetBits(kLoadDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003879 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003880 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003881 DCHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003882 vldrd(reg, Address(base, offset), cond);
3883}
3884
3885
3886// Implementation note: this method must emit at most one instruction when
3887// Address::CanHoldStoreOffsetThumb.
3888void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3889 Register reg,
3890 Register base,
3891 int32_t offset,
3892 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003893 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003894 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003895 CHECK_NE(base, IP);
Roland Levillain23f02f32015-08-25 18:23:20 +01003896 if ((reg != IP) &&
3897 ((type != kStoreWordPair) || (reg + 1 != IP))) {
Roland Levillain775ef492014-11-04 17:43:11 +00003898 tmp_reg = IP;
3899 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003900 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
Roland Levillain23f02f32015-08-25 18:23:20 +01003901 // the case of a word-pair store) and `base`) to build the
3902 // Address object used by the store instruction(s) below.
3903 // Instead, save R5 on the stack (or R6 if R5 is already used by
3904 // `base`), use it as secondary temporary register, and restore
3905 // it after the store instruction has been emitted.
3906 tmp_reg = (base != R5) ? R5 : R6;
Roland Levillain775ef492014-11-04 17:43:11 +00003907 Push(tmp_reg);
3908 if (base == SP) {
3909 offset += kRegisterSize;
3910 }
3911 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003912 // TODO: Implement indexed store (not available for STRD), inline AdjustLoadStoreOffset()
3913 // and in the "unsplittable" path get rid of the "add" by using the store indexed instead.
3914 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(type), tmp_reg, base, offset, cond);
Roland Levillain775ef492014-11-04 17:43:11 +00003915 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003916 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003917 DCHECK(Address::CanHoldStoreOffsetThumb(type, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003918 switch (type) {
3919 case kStoreByte:
3920 strb(reg, Address(base, offset), cond);
3921 break;
3922 case kStoreHalfword:
3923 strh(reg, Address(base, offset), cond);
3924 break;
3925 case kStoreWord:
3926 str(reg, Address(base, offset), cond);
3927 break;
3928 case kStoreWordPair:
3929 strd(reg, Address(base, offset), cond);
3930 break;
3931 default:
3932 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003933 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003934 }
Roland Levillain23f02f32015-08-25 18:23:20 +01003935 if ((tmp_reg != kNoRegister) && (tmp_reg != IP)) {
3936 CHECK((tmp_reg == R5) || (tmp_reg == R6));
Roland Levillain775ef492014-11-04 17:43:11 +00003937 Pop(tmp_reg);
3938 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003939}
3940
3941
3942// Implementation note: this method must emit at most one instruction when
3943// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3944void Thumb2Assembler::StoreSToOffset(SRegister reg,
3945 Register base,
3946 int32_t offset,
3947 Condition cond) {
3948 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3949 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003950 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreSWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003951 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003952 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003953 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003954 vstrs(reg, Address(base, offset), cond);
3955}
3956
3957
3958// Implementation note: this method must emit at most one instruction when
3959// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3960void Thumb2Assembler::StoreDToOffset(DRegister reg,
3961 Register base,
3962 int32_t offset,
3963 Condition cond) {
3964 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3965 CHECK_NE(base, IP);
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003966 offset = AdjustLoadStoreOffset(GetAllowedStoreOffsetBits(kStoreDWord), IP, base, offset, cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003967 base = IP;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003968 }
Vladimir Marko6fd0ffe2015-11-19 21:13:52 +00003969 DCHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
Dave Allison65fcc2c2014-04-28 13:45:27 -07003970 vstrd(reg, Address(base, offset), cond);
3971}
3972
3973
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003974void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003975 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3976 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003977}
3978
3979
3980void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003981 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003982 cbz(r, label);
3983 } else {
3984 cmp(r, ShifterOperand(0));
3985 b(label, EQ);
3986 }
3987}
3988
3989
Dave Allison65fcc2c2014-04-28 13:45:27 -07003990void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003991 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003992 cbnz(r, label);
3993 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003994 cmp(r, ShifterOperand(0));
3995 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003996 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003997}
Andreas Gampe7cffc3b2015-10-19 21:31:53 -07003998
3999JumpTable* Thumb2Assembler::CreateJumpTable(std::vector<Label*>&& labels, Register base_reg) {
4000 jump_tables_.emplace_back(std::move(labels));
4001 JumpTable* table = &jump_tables_.back();
4002 DCHECK(!table->GetLabel()->IsBound());
4003
4004 bool use32bit = IsForced32Bit() || IsHighRegister(base_reg);
4005 uint32_t location = buffer_.Size();
4006 Fixup::Size size = use32bit ? Fixup::kLiteralAddr4KiB : Fixup::kLiteralAddr1KiB;
4007 FixupId fixup_id = AddFixup(Fixup::LoadLiteralAddress(location, base_reg, size));
4008 Emit16(static_cast<uint16_t>(table->GetLabel()->position_));
4009 table->GetLabel()->LinkTo(fixup_id);
4010 if (use32bit) {
4011 Emit16(0);
4012 }
4013 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
4014
4015 return table;
4016}
4017
4018void Thumb2Assembler::EmitJumpTableDispatch(JumpTable* jump_table, Register displacement_reg) {
4019 CHECK(!IsForced32Bit()) << "Forced 32-bit dispatch not implemented yet";
4020 // 32-bit ADD doesn't support PC as an input, so we need a two-instruction sequence:
4021 // SUB ip, ip, #0
4022 // ADD pc, ip, reg
4023 // TODO: Implement.
4024
4025 // The anchor's position needs to be fixed up before we can compute offsets - so make it a tracked
4026 // label.
4027 BindTrackedLabel(jump_table->GetAnchorLabel());
4028
4029 add(PC, PC, ShifterOperand(displacement_reg));
4030}
4031
Dave Allison65fcc2c2014-04-28 13:45:27 -07004032} // namespace arm
4033} // namespace art