blob: c31f46b8fe702ad723f4d8686fa3655fcf8fc348 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstrom7940e442013-07-12 13:46:57 -070017#include "codegen_arm.h"
Ian Rogersd582fa42014-11-05 23:46:43 -080018
19#include "arch/arm/instruction_set_features_arm.h"
20#include "arm_lir.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080021#include "base/logging.h"
Vladimir Markocc234812015-04-07 09:36:09 +010022#include "dex/mir_graph.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include "dex/quick/mir_to_lir-inl.h"
buzbeeb5860fb2014-06-21 15:31:01 -070024#include "dex/reg_storage_eq.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080025#include "driver/compiler_driver.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070026
27namespace art {
28
29/* This file contains codegen for the Thumb ISA. */
30
buzbee0d829482013-10-11 15:24:55 -070031static int32_t EncodeImmSingle(int32_t value) {
32 int32_t res;
33 int32_t bit_a = (value & 0x80000000) >> 31;
34 int32_t not_bit_b = (value & 0x40000000) >> 30;
35 int32_t bit_b = (value & 0x20000000) >> 29;
36 int32_t b_smear = (value & 0x3e000000) >> 25;
37 int32_t slice = (value & 0x01f80000) >> 19;
38 int32_t zeroes = (value & 0x0007ffff);
Brian Carlstrom7940e442013-07-12 13:46:57 -070039 if (zeroes != 0)
40 return -1;
41 if (bit_b) {
42 if ((not_bit_b != 0) || (b_smear != 0x1f))
43 return -1;
44 } else {
45 if ((not_bit_b != 1) || (b_smear != 0x0))
46 return -1;
47 }
48 res = (bit_a << 7) | (bit_b << 6) | slice;
49 return res;
50}
51
52/*
53 * Determine whether value can be encoded as a Thumb2 floating point
54 * immediate. If not, return -1. If so return encoded 8-bit value.
55 */
buzbee0d829482013-10-11 15:24:55 -070056static int32_t EncodeImmDouble(int64_t value) {
57 int32_t res;
Ian Rogers0f678472014-03-10 16:18:37 -070058 int32_t bit_a = (value & INT64_C(0x8000000000000000)) >> 63;
59 int32_t not_bit_b = (value & INT64_C(0x4000000000000000)) >> 62;
60 int32_t bit_b = (value & INT64_C(0x2000000000000000)) >> 61;
61 int32_t b_smear = (value & INT64_C(0x3fc0000000000000)) >> 54;
62 int32_t slice = (value & INT64_C(0x003f000000000000)) >> 48;
63 uint64_t zeroes = (value & INT64_C(0x0000ffffffffffff));
buzbee0d829482013-10-11 15:24:55 -070064 if (zeroes != 0ull)
Brian Carlstrom7940e442013-07-12 13:46:57 -070065 return -1;
66 if (bit_b) {
67 if ((not_bit_b != 0) || (b_smear != 0xff))
68 return -1;
69 } else {
70 if ((not_bit_b != 1) || (b_smear != 0x0))
71 return -1;
72 }
73 res = (bit_a << 7) | (bit_b << 6) | slice;
74 return res;
75}
76
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070077LIR* ArmMir2Lir::LoadFPConstantValue(int r_dest, int value) {
buzbee091cc402014-03-31 10:14:40 -070078 DCHECK(RegStorage::IsSingle(r_dest));
Brian Carlstrom7940e442013-07-12 13:46:57 -070079 if (value == 0) {
80 // TODO: we need better info about the target CPU. a vector exclusive or
81 // would probably be better here if we could rely on its existance.
82 // Load an immediate +2.0 (which encodes to 0)
83 NewLIR2(kThumb2Vmovs_IMM8, r_dest, 0);
84 // +0.0 = +2.0 - +2.0
85 return NewLIR3(kThumb2Vsubs, r_dest, r_dest, r_dest);
86 } else {
87 int encoded_imm = EncodeImmSingle(value);
88 if (encoded_imm >= 0) {
89 return NewLIR2(kThumb2Vmovs_IMM8, r_dest, encoded_imm);
90 }
91 }
92 LIR* data_target = ScanLiteralPool(literal_list_, value, 0);
Mathieu Chartier2cebb242015-04-21 16:50:40 -070093 if (data_target == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070094 data_target = AddWordData(&literal_list_, value);
95 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +010096 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -070097 LIR* load_pc_rel = RawLIR(current_dalvik_offset_, kThumb2Vldrs,
buzbee091cc402014-03-31 10:14:40 -070098 r_dest, rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -070099 AppendLIR(load_pc_rel);
100 return load_pc_rel;
101}
102
Brian Carlstrom7940e442013-07-12 13:46:57 -0700103/*
104 * Determine whether value can be encoded as a Thumb2 modified
105 * immediate. If not, return -1. If so, return i:imm3:a:bcdefgh form.
106 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700107int ArmMir2Lir::ModifiedImmediate(uint32_t value) {
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700108 uint32_t b0 = value & 0xff;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700109
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700110 /* Note: case of value==0 must use 0:000:0:0000000 encoding */
111 if (value <= 0xFF)
112 return b0; // 0:000:a:bcdefgh
113 if (value == ((b0 << 16) | b0))
114 return (0x1 << 8) | b0; /* 0:001:a:bcdefgh */
115 if (value == ((b0 << 24) | (b0 << 16) | (b0 << 8) | b0))
116 return (0x3 << 8) | b0; /* 0:011:a:bcdefgh */
117 b0 = (value >> 8) & 0xff;
118 if (value == ((b0 << 24) | (b0 << 8)))
119 return (0x2 << 8) | b0; /* 0:010:a:bcdefgh */
120 /* Can we do it with rotation? */
Vladimir Markoa29f6982014-11-25 16:32:34 +0000121 int z_leading = CLZ(value);
122 int z_trailing = CTZ(value);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700123 /* A run of eight or fewer active bits? */
124 if ((z_leading + z_trailing) < 24)
125 return -1; /* No - bail */
126 /* left-justify the constant, discarding msb (known to be 1) */
127 value <<= z_leading + 1;
128 /* Create bcdefgh */
129 value >>= 25;
130 /* Put it all together */
131 return value | ((0x8 + z_leading) << 7); /* [01000..11111]:bcdefgh */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700132}
133
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700134bool ArmMir2Lir::InexpensiveConstantInt(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
136}
137
Vladimir Markoa29f6982014-11-25 16:32:34 +0000138bool ArmMir2Lir::InexpensiveConstantInt(int32_t value, Instruction::Code opcode) {
139 switch (opcode) {
140 case Instruction::ADD_INT:
141 case Instruction::ADD_INT_2ADDR:
142 case Instruction::SUB_INT:
143 case Instruction::SUB_INT_2ADDR:
144 if ((value >> 12) == (value >> 31)) { // Signed 12-bit, RRI12 versions of ADD/SUB.
145 return true;
146 }
147 FALLTHROUGH_INTENDED;
148 case Instruction::IF_EQ:
149 case Instruction::IF_NE:
150 case Instruction::IF_LT:
151 case Instruction::IF_GE:
152 case Instruction::IF_GT:
153 case Instruction::IF_LE:
154 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(-value) >= 0);
155 case Instruction::SHL_INT:
156 case Instruction::SHL_INT_2ADDR:
157 case Instruction::SHR_INT:
158 case Instruction::SHR_INT_2ADDR:
159 case Instruction::USHR_INT:
160 case Instruction::USHR_INT_2ADDR:
161 return true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000162 case Instruction::CONST:
163 case Instruction::CONST_4:
164 case Instruction::CONST_16:
165 if ((value >> 16) == 0) {
166 return true; // movw, 16-bit unsigned.
167 }
168 FALLTHROUGH_INTENDED;
Vladimir Markoa29f6982014-11-25 16:32:34 +0000169 case Instruction::AND_INT:
170 case Instruction::AND_INT_2ADDR:
171 case Instruction::AND_INT_LIT16:
172 case Instruction::AND_INT_LIT8:
173 case Instruction::OR_INT:
174 case Instruction::OR_INT_2ADDR:
175 case Instruction::OR_INT_LIT16:
176 case Instruction::OR_INT_LIT8:
177 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
178 case Instruction::XOR_INT:
179 case Instruction::XOR_INT_2ADDR:
180 case Instruction::XOR_INT_LIT16:
181 case Instruction::XOR_INT_LIT8:
182 return (ModifiedImmediate(value) >= 0);
183 case Instruction::MUL_INT:
184 case Instruction::MUL_INT_2ADDR:
185 case Instruction::MUL_INT_LIT8:
186 case Instruction::MUL_INT_LIT16:
187 case Instruction::DIV_INT:
188 case Instruction::DIV_INT_2ADDR:
189 case Instruction::DIV_INT_LIT8:
190 case Instruction::DIV_INT_LIT16:
191 case Instruction::REM_INT:
192 case Instruction::REM_INT_2ADDR:
193 case Instruction::REM_INT_LIT8:
194 case Instruction::REM_INT_LIT16: {
195 EasyMultiplyOp ops[2];
196 return GetEasyMultiplyTwoOps(value, ops);
197 }
198 default:
199 return false;
200 }
201}
202
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700203bool ArmMir2Lir::InexpensiveConstantFloat(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 return EncodeImmSingle(value) >= 0;
205}
206
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700207bool ArmMir2Lir::InexpensiveConstantLong(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700208 return InexpensiveConstantInt(High32Bits(value)) && InexpensiveConstantInt(Low32Bits(value));
209}
210
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700211bool ArmMir2Lir::InexpensiveConstantDouble(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700212 return EncodeImmDouble(value) >= 0;
213}
214
215/*
216 * Load a immediate using a shortcut if possible; otherwise
217 * grab from the per-translation literal pool.
218 *
219 * No additional register clobbering operation performed. Use this version when
220 * 1) r_dest is freshly returned from AllocTemp or
221 * 2) The codegen is under fixed register usage
222 */
buzbee2700f7e2014-03-07 09:46:20 -0800223LIR* ArmMir2Lir::LoadConstantNoClobber(RegStorage r_dest, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700224 LIR* res;
225 int mod_imm;
226
buzbee091cc402014-03-31 10:14:40 -0700227 if (r_dest.IsFloat()) {
buzbee2700f7e2014-03-07 09:46:20 -0800228 return LoadFPConstantValue(r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700229 }
230
231 /* See if the value can be constructed cheaply */
buzbee091cc402014-03-31 10:14:40 -0700232 if (r_dest.Low8() && (value >= 0) && (value <= 255)) {
buzbee2700f7e2014-03-07 09:46:20 -0800233 return NewLIR2(kThumbMovImm, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700234 }
235 /* Check Modified immediate special cases */
236 mod_imm = ModifiedImmediate(value);
237 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800238 res = NewLIR2(kThumb2MovI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700239 return res;
240 }
241 mod_imm = ModifiedImmediate(~value);
242 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800243 res = NewLIR2(kThumb2MvnI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700244 return res;
245 }
246 /* 16-bit immediate? */
247 if ((value & 0xffff) == value) {
buzbee2700f7e2014-03-07 09:46:20 -0800248 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700249 return res;
250 }
251 /* Do a low/high pair */
buzbee2700f7e2014-03-07 09:46:20 -0800252 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), Low16Bits(value));
253 NewLIR2(kThumb2MovImm16H, r_dest.GetReg(), High16Bits(value));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700254 return res;
255}
256
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700257LIR* ArmMir2Lir::OpUnconditionalBranch(LIR* target) {
buzbee091cc402014-03-31 10:14:40 -0700258 LIR* res = NewLIR1(kThumbBUncond, 0 /* offset to be patched during assembly */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700259 res->target = target;
260 return res;
261}
262
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700263LIR* ArmMir2Lir::OpCondBranch(ConditionCode cc, LIR* target) {
Vladimir Marko174636d2014-11-26 12:33:45 +0000264 LIR* branch = NewLIR2(kThumbBCond, 0 /* offset to be patched */,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700265 ArmConditionEncoding(cc));
266 branch->target = target;
267 return branch;
268}
269
buzbee2700f7e2014-03-07 09:46:20 -0800270LIR* ArmMir2Lir::OpReg(OpKind op, RegStorage r_dest_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700271 ArmOpcode opcode = kThumbBkpt;
272 switch (op) {
273 case kOpBlx:
274 opcode = kThumbBlxR;
275 break;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700276 case kOpBx:
277 opcode = kThumbBx;
278 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700279 default:
280 LOG(FATAL) << "Bad opcode " << op;
281 }
buzbee2700f7e2014-03-07 09:46:20 -0800282 return NewLIR1(opcode, r_dest_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700283}
284
Ian Rogerse2143c02014-03-28 08:47:16 -0700285LIR* ArmMir2Lir::OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700286 int shift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700287 bool thumb_form =
buzbee091cc402014-03-31 10:14:40 -0700288 ((shift == 0) && r_dest_src1.Low8() && r_src2.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700289 ArmOpcode opcode = kThumbBkpt;
290 switch (op) {
291 case kOpAdc:
292 opcode = (thumb_form) ? kThumbAdcRR : kThumb2AdcRRR;
293 break;
294 case kOpAnd:
295 opcode = (thumb_form) ? kThumbAndRR : kThumb2AndRRR;
296 break;
297 case kOpBic:
298 opcode = (thumb_form) ? kThumbBicRR : kThumb2BicRRR;
299 break;
300 case kOpCmn:
301 DCHECK_EQ(shift, 0);
302 opcode = (thumb_form) ? kThumbCmnRR : kThumb2CmnRR;
303 break;
304 case kOpCmp:
305 if (thumb_form)
306 opcode = kThumbCmpRR;
buzbee091cc402014-03-31 10:14:40 -0700307 else if ((shift == 0) && !r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700308 opcode = kThumbCmpHH;
buzbee091cc402014-03-31 10:14:40 -0700309 else if ((shift == 0) && r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700310 opcode = kThumbCmpLH;
311 else if (shift == 0)
312 opcode = kThumbCmpHL;
313 else
314 opcode = kThumb2CmpRR;
315 break;
316 case kOpXor:
317 opcode = (thumb_form) ? kThumbEorRR : kThumb2EorRRR;
318 break;
319 case kOpMov:
320 DCHECK_EQ(shift, 0);
buzbee091cc402014-03-31 10:14:40 -0700321 if (r_dest_src1.Low8() && r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700322 opcode = kThumbMovRR;
buzbee091cc402014-03-31 10:14:40 -0700323 else if (!r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700324 opcode = kThumbMovRR_H2H;
buzbee091cc402014-03-31 10:14:40 -0700325 else if (r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700326 opcode = kThumbMovRR_H2L;
327 else
328 opcode = kThumbMovRR_L2H;
329 break;
330 case kOpMul:
331 DCHECK_EQ(shift, 0);
332 opcode = (thumb_form) ? kThumbMul : kThumb2MulRRR;
333 break;
334 case kOpMvn:
335 opcode = (thumb_form) ? kThumbMvn : kThumb2MnvRR;
336 break;
337 case kOpNeg:
338 DCHECK_EQ(shift, 0);
339 opcode = (thumb_form) ? kThumbNeg : kThumb2NegRR;
340 break;
341 case kOpOr:
342 opcode = (thumb_form) ? kThumbOrr : kThumb2OrrRRR;
343 break;
344 case kOpSbc:
345 opcode = (thumb_form) ? kThumbSbc : kThumb2SbcRRR;
346 break;
347 case kOpTst:
348 opcode = (thumb_form) ? kThumbTst : kThumb2TstRR;
349 break;
350 case kOpLsl:
351 DCHECK_EQ(shift, 0);
352 opcode = (thumb_form) ? kThumbLslRR : kThumb2LslRRR;
353 break;
354 case kOpLsr:
355 DCHECK_EQ(shift, 0);
356 opcode = (thumb_form) ? kThumbLsrRR : kThumb2LsrRRR;
357 break;
358 case kOpAsr:
359 DCHECK_EQ(shift, 0);
360 opcode = (thumb_form) ? kThumbAsrRR : kThumb2AsrRRR;
361 break;
362 case kOpRor:
363 DCHECK_EQ(shift, 0);
364 opcode = (thumb_form) ? kThumbRorRR : kThumb2RorRRR;
365 break;
366 case kOpAdd:
367 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
368 break;
369 case kOpSub:
370 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
371 break;
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100372 case kOpRev:
373 DCHECK_EQ(shift, 0);
374 if (!thumb_form) {
375 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700376 return NewLIR3(kThumb2RevRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100377 }
378 opcode = kThumbRev;
379 break;
380 case kOpRevsh:
381 DCHECK_EQ(shift, 0);
382 if (!thumb_form) {
383 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700384 return NewLIR3(kThumb2RevshRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100385 }
386 opcode = kThumbRevsh;
387 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700388 case kOp2Byte:
389 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700390 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 8);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700391 case kOp2Short:
392 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700393 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700394 case kOp2Char:
395 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700396 return NewLIR4(kThumb2Ubfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700397 default:
398 LOG(FATAL) << "Bad opcode: " << op;
399 break;
400 }
buzbee409fe942013-10-11 10:49:56 -0700401 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700402 if (EncodingMap[opcode].flags & IS_BINARY_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700403 return NewLIR2(opcode, r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700404 } else if (EncodingMap[opcode].flags & IS_TERTIARY_OP) {
405 if (EncodingMap[opcode].field_loc[2].kind == kFmtShift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700406 return NewLIR3(opcode, r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700407 } else {
Ian Rogerse2143c02014-03-28 08:47:16 -0700408 return NewLIR3(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700409 }
410 } else if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700411 return NewLIR4(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700412 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700413 LOG(FATAL) << "Unexpected encoding operand count";
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700414 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700415 }
416}
417
buzbee2700f7e2014-03-07 09:46:20 -0800418LIR* ArmMir2Lir::OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700419 return OpRegRegShift(op, r_dest_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700420}
421
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100422LIR* ArmMir2Lir::OpMovRegMem(RegStorage r_dest ATTRIBUTE_UNUSED,
423 RegStorage r_base ATTRIBUTE_UNUSED,
424 int offset ATTRIBUTE_UNUSED,
425 MoveType move_type ATTRIBUTE_UNUSED) {
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800426 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700427 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800428}
429
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100430LIR* ArmMir2Lir::OpMovMemReg(RegStorage r_base ATTRIBUTE_UNUSED,
431 int offset ATTRIBUTE_UNUSED,
432 RegStorage r_src ATTRIBUTE_UNUSED,
433 MoveType move_type ATTRIBUTE_UNUSED) {
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800434 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700435 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800436}
437
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100438LIR* ArmMir2Lir::OpCondRegReg(OpKind op ATTRIBUTE_UNUSED,
439 ConditionCode cc ATTRIBUTE_UNUSED,
440 RegStorage r_dest ATTRIBUTE_UNUSED,
441 RegStorage r_src ATTRIBUTE_UNUSED) {
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800442 LOG(FATAL) << "Unexpected use of OpCondRegReg for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700443 UNREACHABLE();
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800444}
445
Ian Rogerse2143c02014-03-28 08:47:16 -0700446LIR* ArmMir2Lir::OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1,
447 RegStorage r_src2, int shift) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700448 ArmOpcode opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700449 bool thumb_form = (shift == 0) && r_dest.Low8() && r_src1.Low8() && r_src2.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700450 switch (op) {
451 case kOpAdd:
452 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
453 break;
454 case kOpSub:
455 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
456 break;
457 case kOpRsub:
458 opcode = kThumb2RsubRRR;
459 break;
460 case kOpAdc:
461 opcode = kThumb2AdcRRR;
462 break;
463 case kOpAnd:
464 opcode = kThumb2AndRRR;
465 break;
466 case kOpBic:
467 opcode = kThumb2BicRRR;
468 break;
469 case kOpXor:
470 opcode = kThumb2EorRRR;
471 break;
472 case kOpMul:
473 DCHECK_EQ(shift, 0);
474 opcode = kThumb2MulRRR;
475 break;
Dave Allison70202782013-10-22 17:52:19 -0700476 case kOpDiv:
477 DCHECK_EQ(shift, 0);
478 opcode = kThumb2SdivRRR;
479 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700480 case kOpOr:
481 opcode = kThumb2OrrRRR;
482 break;
483 case kOpSbc:
484 opcode = kThumb2SbcRRR;
485 break;
486 case kOpLsl:
487 DCHECK_EQ(shift, 0);
488 opcode = kThumb2LslRRR;
489 break;
490 case kOpLsr:
491 DCHECK_EQ(shift, 0);
492 opcode = kThumb2LsrRRR;
493 break;
494 case kOpAsr:
495 DCHECK_EQ(shift, 0);
496 opcode = kThumb2AsrRRR;
497 break;
498 case kOpRor:
499 DCHECK_EQ(shift, 0);
500 opcode = kThumb2RorRRR;
501 break;
502 default:
503 LOG(FATAL) << "Bad opcode: " << op;
504 break;
505 }
buzbee409fe942013-10-11 10:49:56 -0700506 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700507 if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700508 return NewLIR4(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700509 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700510 DCHECK(EncodingMap[opcode].flags & IS_TERTIARY_OP);
Ian Rogerse2143c02014-03-28 08:47:16 -0700511 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700512 }
513}
514
buzbee2700f7e2014-03-07 09:46:20 -0800515LIR* ArmMir2Lir::OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700516 return OpRegRegRegShift(op, r_dest, r_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700517}
518
buzbee2700f7e2014-03-07 09:46:20 -0800519LIR* ArmMir2Lir::OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700521 int32_t abs_value = (neg) ? -value : value;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700522 ArmOpcode opcode = kThumbBkpt;
523 ArmOpcode alt_opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700524 bool all_low_regs = r_dest.Low8() && r_src1.Low8();
buzbee0d829482013-10-11 15:24:55 -0700525 int32_t mod_imm = ModifiedImmediate(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700526
527 switch (op) {
528 case kOpLsl:
529 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800530 return NewLIR3(kThumbLslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700531 else
buzbee2700f7e2014-03-07 09:46:20 -0800532 return NewLIR3(kThumb2LslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 case kOpLsr:
534 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800535 return NewLIR3(kThumbLsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700536 else
buzbee2700f7e2014-03-07 09:46:20 -0800537 return NewLIR3(kThumb2LsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700538 case kOpAsr:
539 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800540 return NewLIR3(kThumbAsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700541 else
buzbee2700f7e2014-03-07 09:46:20 -0800542 return NewLIR3(kThumb2AsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700543 case kOpRor:
buzbee2700f7e2014-03-07 09:46:20 -0800544 return NewLIR3(kThumb2RorRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 case kOpAdd:
buzbee091cc402014-03-31 10:14:40 -0700546 if (r_dest.Low8() && (r_src1 == rs_r13sp) && (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800547 return NewLIR3(kThumbAddSpRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
buzbee091cc402014-03-31 10:14:40 -0700548 } else if (r_dest.Low8() && (r_src1 == rs_r15pc) &&
Brian Carlstrom38f85e42013-07-18 14:45:22 -0700549 (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800550 return NewLIR3(kThumbAddPcRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 }
Ian Rogersfc787ec2014-10-09 21:56:44 -0700552 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700553 case kOpSub:
554 if (all_low_regs && ((abs_value & 0x7) == abs_value)) {
555 if (op == kOpAdd)
556 opcode = (neg) ? kThumbSubRRI3 : kThumbAddRRI3;
557 else
558 opcode = (neg) ? kThumbAddRRI3 : kThumbSubRRI3;
buzbee2700f7e2014-03-07 09:46:20 -0800559 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700560 }
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000561 if (mod_imm < 0) {
562 mod_imm = ModifiedImmediate(-value);
563 if (mod_imm >= 0) {
564 op = (op == kOpAdd) ? kOpSub : kOpAdd;
565 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700566 }
Vladimir Markoa29f6982014-11-25 16:32:34 +0000567 if (mod_imm < 0 && (abs_value >> 12) == 0) {
Vladimir Markodbb8c492014-02-28 17:36:39 +0000568 // This is deliberately used only if modified immediate encoding is inadequate since
569 // we sometimes actually use the flags for small values but not necessarily low regs.
570 if (op == kOpAdd)
571 opcode = (neg) ? kThumb2SubRRI12 : kThumb2AddRRI12;
572 else
573 opcode = (neg) ? kThumb2AddRRI12 : kThumb2SubRRI12;
buzbee2700f7e2014-03-07 09:46:20 -0800574 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Vladimir Markodbb8c492014-02-28 17:36:39 +0000575 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700576 if (op == kOpSub) {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000577 opcode = kThumb2SubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700578 alt_opcode = kThumb2SubRRR;
579 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000580 opcode = kThumb2AddRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700581 alt_opcode = kThumb2AddRRR;
582 }
583 break;
584 case kOpRsub:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000585 opcode = kThumb2RsubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586 alt_opcode = kThumb2RsubRRR;
587 break;
588 case kOpAdc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000589 opcode = kThumb2AdcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700590 alt_opcode = kThumb2AdcRRR;
591 break;
592 case kOpSbc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000593 opcode = kThumb2SbcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 alt_opcode = kThumb2SbcRRR;
595 break;
596 case kOpOr:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000597 opcode = kThumb2OrrRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598 alt_opcode = kThumb2OrrRRR;
Vladimir Markoa29f6982014-11-25 16:32:34 +0000599 if (mod_imm < 0) {
600 mod_imm = ModifiedImmediate(~value);
601 if (mod_imm >= 0) {
602 opcode = kThumb2OrnRRI8M;
603 }
604 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700605 break;
606 case kOpAnd:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000607 if (mod_imm < 0) {
608 mod_imm = ModifiedImmediate(~value);
609 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800610 return NewLIR3(kThumb2BicRRI8M, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000611 }
612 }
613 opcode = kThumb2AndRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700614 alt_opcode = kThumb2AndRRR;
615 break;
616 case kOpXor:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000617 opcode = kThumb2EorRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700618 alt_opcode = kThumb2EorRRR;
619 break;
620 case kOpMul:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700621 // TUNING: power of 2, shift & add
Brian Carlstrom7940e442013-07-12 13:46:57 -0700622 mod_imm = -1;
623 alt_opcode = kThumb2MulRRR;
624 break;
625 case kOpCmp: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700626 LIR* res;
627 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800628 res = NewLIR2(kThumb2CmpRI8M, r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700629 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000630 mod_imm = ModifiedImmediate(-value);
631 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800632 res = NewLIR2(kThumb2CmnRI8M, r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000633 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800634 RegStorage r_tmp = AllocTemp();
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000635 res = LoadConstant(r_tmp, value);
636 OpRegReg(kOpCmp, r_src1, r_tmp);
637 FreeTemp(r_tmp);
638 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 }
640 return res;
641 }
642 default:
643 LOG(FATAL) << "Bad opcode: " << op;
644 }
645
646 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800647 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700648 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800649 RegStorage r_scratch = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 LoadConstant(r_scratch, value);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800651 LIR* res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700652 if (EncodingMap[alt_opcode].flags & IS_QUAD_OP)
buzbee2700f7e2014-03-07 09:46:20 -0800653 res = NewLIR4(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700654 else
buzbee2700f7e2014-03-07 09:46:20 -0800655 res = NewLIR3(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 FreeTemp(r_scratch);
657 return res;
658 }
659}
660
661/* Handle Thumb-only variants here - otherwise punt to OpRegRegImm */
buzbee2700f7e2014-03-07 09:46:20 -0800662LIR* ArmMir2Lir::OpRegImm(OpKind op, RegStorage r_dest_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700663 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700664 int32_t abs_value = (neg) ? -value : value;
buzbee091cc402014-03-31 10:14:40 -0700665 bool short_form = (((abs_value & 0xff) == abs_value) && r_dest_src1.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700666 ArmOpcode opcode = kThumbBkpt;
667 switch (op) {
668 case kOpAdd:
buzbee2700f7e2014-03-07 09:46:20 -0800669 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700670 DCHECK_EQ((value & 0x3), 0);
671 return NewLIR1(kThumbAddSpI7, value >> 2);
672 } else if (short_form) {
673 opcode = (neg) ? kThumbSubRI8 : kThumbAddRI8;
674 }
675 break;
676 case kOpSub:
buzbee2700f7e2014-03-07 09:46:20 -0800677 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700678 DCHECK_EQ((value & 0x3), 0);
679 return NewLIR1(kThumbSubSpI7, value >> 2);
680 } else if (short_form) {
681 opcode = (neg) ? kThumbAddRI8 : kThumbSubRI8;
682 }
683 break;
684 case kOpCmp:
Vladimir Marko22479842013-11-19 17:04:50 +0000685 if (!neg && short_form) {
686 opcode = kThumbCmpRI8;
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700687 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688 short_form = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700689 }
690 break;
691 default:
692 /* Punt to OpRegRegImm - if bad case catch it there */
693 short_form = false;
694 break;
695 }
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700696 if (short_form) {
buzbee2700f7e2014-03-07 09:46:20 -0800697 return NewLIR2(opcode, r_dest_src1.GetReg(), abs_value);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700698 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700699 return OpRegRegImm(op, r_dest_src1, r_dest_src1, value);
700 }
701}
702
buzbee2700f7e2014-03-07 09:46:20 -0800703LIR* ArmMir2Lir::LoadConstantWide(RegStorage r_dest, int64_t value) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700704 LIR* res = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 int32_t val_lo = Low32Bits(value);
706 int32_t val_hi = High32Bits(value);
buzbee091cc402014-03-31 10:14:40 -0700707 if (r_dest.IsFloat()) {
708 DCHECK(!r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700709 if ((val_lo == 0) && (val_hi == 0)) {
710 // TODO: we need better info about the target CPU. a vector exclusive or
711 // would probably be better here if we could rely on its existance.
712 // Load an immediate +2.0 (which encodes to 0)
buzbee091cc402014-03-31 10:14:40 -0700713 NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700714 // +0.0 = +2.0 - +2.0
buzbee091cc402014-03-31 10:14:40 -0700715 res = NewLIR3(kThumb2Vsubd, r_dest.GetReg(), r_dest.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700716 } else {
717 int encoded_imm = EncodeImmDouble(value);
718 if (encoded_imm >= 0) {
buzbee091cc402014-03-31 10:14:40 -0700719 res = NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), encoded_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 }
721 }
722 } else {
buzbee091cc402014-03-31 10:14:40 -0700723 // NOTE: Arm32 assumption here.
724 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 if ((InexpensiveConstantInt(val_lo) && (InexpensiveConstantInt(val_hi)))) {
buzbee2700f7e2014-03-07 09:46:20 -0800726 res = LoadConstantNoClobber(r_dest.GetLow(), val_lo);
727 LoadConstantNoClobber(r_dest.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 }
729 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700730 if (res == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700731 // No short form - load from the literal pool.
732 LIR* data_target = ScanLiteralPoolWide(literal_list_, val_lo, val_hi);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700733 if (data_target == nullptr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 data_target = AddWideData(&literal_list_, val_lo, val_hi);
735 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100736 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
buzbee091cc402014-03-31 10:14:40 -0700737 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700738 res = RawLIR(current_dalvik_offset_, kThumb2Vldrd,
buzbee091cc402014-03-31 10:14:40 -0700739 r_dest.GetReg(), rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700740 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800741 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742 res = RawLIR(current_dalvik_offset_, kThumb2LdrdPcRel8,
buzbee091cc402014-03-31 10:14:40 -0700743 r_dest.GetLowReg(), r_dest.GetHighReg(), rs_r15pc.GetReg(), 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745 AppendLIR(res);
746 }
747 return res;
748}
749
750int ArmMir2Lir::EncodeShift(int code, int amount) {
751 return ((amount & 0x1f) << 2) | code;
752}
753
buzbee2700f7e2014-03-07 09:46:20 -0800754LIR* ArmMir2Lir::LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700755 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700756 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_dest.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700757 LIR* load;
758 ArmOpcode opcode = kThumbBkpt;
759 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800760 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700761
buzbee091cc402014-03-31 10:14:40 -0700762 if (r_dest.IsFloat()) {
763 if (r_dest.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700764 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700765 opcode = kThumb2Vldrs;
766 size = kSingle;
767 } else {
buzbee091cc402014-03-31 10:14:40 -0700768 DCHECK(r_dest.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700769 DCHECK((size == k64) || (size == kDouble));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700770 opcode = kThumb2Vldrd;
771 size = kDouble;
772 }
773 } else {
774 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700775 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700776 }
777
778 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700779 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700780 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700781 case kSingle:
782 reg_ptr = AllocTemp();
783 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800784 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700785 EncodeShift(kArmLsl, scale));
786 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800787 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700788 }
buzbee2700f7e2014-03-07 09:46:20 -0800789 load = NewLIR3(opcode, r_dest.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700790 FreeTemp(reg_ptr);
791 return load;
buzbee695d13a2014-04-19 13:32:20 -0700792 case k32:
793 // Intentional fall-though.
794 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700795 opcode = (thumb_form) ? kThumbLdrRRR : kThumb2LdrRRR;
796 break;
797 case kUnsignedHalf:
798 opcode = (thumb_form) ? kThumbLdrhRRR : kThumb2LdrhRRR;
799 break;
800 case kSignedHalf:
801 opcode = (thumb_form) ? kThumbLdrshRRR : kThumb2LdrshRRR;
802 break;
803 case kUnsignedByte:
804 opcode = (thumb_form) ? kThumbLdrbRRR : kThumb2LdrbRRR;
805 break;
806 case kSignedByte:
807 opcode = (thumb_form) ? kThumbLdrsbRRR : kThumb2LdrsbRRR;
808 break;
809 default:
810 LOG(FATAL) << "Bad size: " << size;
811 }
812 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800813 load = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 else
buzbee2700f7e2014-03-07 09:46:20 -0800815 load = NewLIR4(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700816
817 return load;
818}
819
buzbee2700f7e2014-03-07 09:46:20 -0800820LIR* ArmMir2Lir::StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700821 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700822 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_src.Low8();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700823 LIR* store = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700824 ArmOpcode opcode = kThumbBkpt;
825 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800826 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700827
buzbee091cc402014-03-31 10:14:40 -0700828 if (r_src.IsFloat()) {
829 if (r_src.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700830 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700831 opcode = kThumb2Vstrs;
832 size = kSingle;
833 } else {
buzbee091cc402014-03-31 10:14:40 -0700834 DCHECK(r_src.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700835 DCHECK((size == k64) || (size == kDouble));
buzbee2700f7e2014-03-07 09:46:20 -0800836 DCHECK_EQ((r_src.GetReg() & 0x1), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 opcode = kThumb2Vstrd;
838 size = kDouble;
839 }
840 } else {
841 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700842 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700843 }
844
845 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700846 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700847 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700848 case kSingle:
849 reg_ptr = AllocTemp();
850 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800851 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852 EncodeShift(kArmLsl, scale));
853 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800854 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700855 }
buzbee2700f7e2014-03-07 09:46:20 -0800856 store = NewLIR3(opcode, r_src.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700857 FreeTemp(reg_ptr);
858 return store;
buzbee695d13a2014-04-19 13:32:20 -0700859 case k32:
860 // Intentional fall-though.
861 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 opcode = (thumb_form) ? kThumbStrRRR : kThumb2StrRRR;
863 break;
864 case kUnsignedHalf:
buzbee695d13a2014-04-19 13:32:20 -0700865 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700866 case kSignedHalf:
867 opcode = (thumb_form) ? kThumbStrhRRR : kThumb2StrhRRR;
868 break;
869 case kUnsignedByte:
buzbee695d13a2014-04-19 13:32:20 -0700870 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700871 case kSignedByte:
872 opcode = (thumb_form) ? kThumbStrbRRR : kThumb2StrbRRR;
873 break;
874 default:
875 LOG(FATAL) << "Bad size: " << size;
876 }
877 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800878 store = NewLIR3(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700879 else
buzbee2700f7e2014-03-07 09:46:20 -0800880 store = NewLIR4(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700881
882 return store;
883}
884
Vladimir Markodb9d5232014-06-10 18:15:57 +0100885// Helper function for LoadBaseDispBody()/StoreBaseDispBody().
Vladimir Marko37573972014-06-16 10:32:25 +0100886LIR* ArmMir2Lir::LoadStoreUsingInsnWithOffsetImm8Shl2(ArmOpcode opcode, RegStorage r_base,
887 int displacement, RegStorage r_src_dest,
888 RegStorage r_work) {
Roland Levillain14d90572015-07-16 10:52:26 +0100889 DCHECK_ALIGNED(displacement, 4);
Vladimir Marko37573972014-06-16 10:32:25 +0100890 constexpr int kOffsetMask = 0xff << 2;
891 int encoded_disp = (displacement & kOffsetMask) >> 2; // Within range of the instruction.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100892 RegStorage r_ptr = r_base;
Vladimir Marko37573972014-06-16 10:32:25 +0100893 if ((displacement & ~kOffsetMask) != 0) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100894 r_ptr = r_work.Valid() ? r_work : AllocTemp();
Vladimir Marko37573972014-06-16 10:32:25 +0100895 // Add displacement & ~kOffsetMask to base, it's a single instruction for up to +-256KiB.
896 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement & ~kOffsetMask);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100897 }
898 LIR* lir = nullptr;
899 if (!r_src_dest.IsPair()) {
900 lir = NewLIR3(opcode, r_src_dest.GetReg(), r_ptr.GetReg(), encoded_disp);
901 } else {
902 lir = NewLIR4(opcode, r_src_dest.GetLowReg(), r_src_dest.GetHighReg(), r_ptr.GetReg(),
903 encoded_disp);
904 }
Vladimir Marko37573972014-06-16 10:32:25 +0100905 if ((displacement & ~kOffsetMask) != 0 && !r_work.Valid()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100906 FreeTemp(r_ptr);
907 }
908 return lir;
909}
910
Brian Carlstrom7940e442013-07-12 13:46:57 -0700911/*
912 * Load value from base + displacement. Optionally perform null check
913 * on base (which must have an associated s_reg and MIR). If not
914 * performing null check, incoming MIR can be null.
915 */
buzbee2700f7e2014-03-07 09:46:20 -0800916LIR* ArmMir2Lir::LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100917 OpSize size) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000918 LIR* load = nullptr;
919 ArmOpcode opcode16 = kThumbBkpt; // 16-bit Thumb opcode.
920 ArmOpcode opcode32 = kThumbBkpt; // 32-bit Thumb2 opcode.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700921 bool short_form = false;
buzbee091cc402014-03-31 10:14:40 -0700922 bool all_low = r_dest.Is32Bit() && r_base.Low8() && r_dest.Low8();
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000923 int scale = 0; // Used for opcode16 and some indexed loads.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700924 bool already_generated = false;
925 switch (size) {
926 case kDouble:
buzbee695d13a2014-04-19 13:32:20 -0700927 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100928 case k64:
buzbee091cc402014-03-31 10:14:40 -0700929 if (r_dest.IsFloat()) {
930 DCHECK(!r_dest.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +0100931 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrd, r_base, displacement, r_dest);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100933 DCHECK(r_dest.IsPair());
934 // Use the r_dest.GetLow() for the temporary pointer if needed.
Vladimir Marko37573972014-06-16 10:32:25 +0100935 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2LdrdI8, r_base, displacement, r_dest,
936 r_dest.GetLow());
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100937 }
938 already_generated = true;
buzbee2700f7e2014-03-07 09:46:20 -0800939 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700940 case kSingle:
buzbee695d13a2014-04-19 13:32:20 -0700941 // Intentional fall-though.
942 case k32:
943 // Intentional fall-though.
944 case kReference:
buzbee091cc402014-03-31 10:14:40 -0700945 if (r_dest.IsFloat()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100946 DCHECK(r_dest.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +0100947 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrs, r_base, displacement, r_dest);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100948 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700949 break;
950 }
Roland Levillain14d90572015-07-16 10:52:26 +0100951 DCHECK_ALIGNED(displacement, 4);
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000952 scale = 2;
buzbee091cc402014-03-31 10:14:40 -0700953 if (r_dest.Low8() && (r_base == rs_rARM_PC) && (displacement <= 1020) &&
954 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700955 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000956 opcode16 = kThumbLdrPcRel;
buzbee091cc402014-03-31 10:14:40 -0700957 } else if (r_dest.Low8() && (r_base == rs_rARM_SP) && (displacement <= 1020) &&
958 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700959 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000960 opcode16 = kThumbLdrSpRel;
961 } else {
962 short_form = all_low && (displacement >> (5 + scale)) == 0;
963 opcode16 = kThumbLdrRRI5;
964 opcode32 = kThumb2LdrRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700965 }
966 break;
967 case kUnsignedHalf:
Roland Levillain14d90572015-07-16 10:52:26 +0100968 DCHECK_ALIGNED(displacement, 2);
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000969 scale = 1;
970 short_form = all_low && (displacement >> (5 + scale)) == 0;
971 opcode16 = kThumbLdrhRRI5;
972 opcode32 = kThumb2LdrhRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700973 break;
974 case kSignedHalf:
Roland Levillain14d90572015-07-16 10:52:26 +0100975 DCHECK_ALIGNED(displacement, 2);
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000976 scale = 1;
977 DCHECK_EQ(opcode16, kThumbBkpt); // Not available.
978 opcode32 = kThumb2LdrshRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979 break;
980 case kUnsignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000981 DCHECK_EQ(scale, 0); // Keep scale = 0.
982 short_form = all_low && (displacement >> (5 + scale)) == 0;
983 opcode16 = kThumbLdrbRRI5;
984 opcode32 = kThumb2LdrbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700985 break;
986 case kSignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000987 DCHECK_EQ(scale, 0); // Keep scale = 0.
988 DCHECK_EQ(opcode16, kThumbBkpt); // Not available.
989 opcode32 = kThumb2LdrsbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700990 break;
991 default:
992 LOG(FATAL) << "Bad size: " << size;
993 }
994
995 if (!already_generated) {
996 if (short_form) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000997 load = NewLIR3(opcode16, r_dest.GetReg(), r_base.GetReg(), displacement >> scale);
998 } else if ((displacement >> 12) == 0) { // Thumb2 form.
999 load = NewLIR3(opcode32, r_dest.GetReg(), r_base.GetReg(), displacement);
1000 } else if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) &&
1001 InexpensiveConstantInt(displacement & ~0x00000fff, Instruction::ADD_INT)) {
1002 // In this case, using LoadIndexed would emit 3 insns (movw+movt+ldr) but we can
1003 // actually do it in two because we know that the kOpAdd is a single insn. On the
1004 // other hand, we introduce an extra dependency, so this is not necessarily faster.
1005 if (opcode16 != kThumbBkpt && r_dest.Low8() &&
1006 InexpensiveConstantInt(displacement & ~(0x1f << scale), Instruction::ADD_INT)) {
1007 // We can use the 16-bit Thumb opcode for the load.
1008 OpRegRegImm(kOpAdd, r_dest, r_base, displacement & ~(0x1f << scale));
1009 load = NewLIR3(opcode16, r_dest.GetReg(), r_dest.GetReg(), (displacement >> scale) & 0x1f);
1010 } else {
1011 DCHECK_NE(opcode32, kThumbBkpt);
1012 OpRegRegImm(kOpAdd, r_dest, r_base, displacement & ~0x00000fff);
1013 load = NewLIR3(opcode32, r_dest.GetReg(), r_dest.GetReg(), displacement & 0x00000fff);
1014 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001015 } else {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001016 if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) ||
1017 (scale != 0 && InexpensiveConstantInt(displacement, Instruction::CONST))) {
1018 scale = 0; // Prefer unscaled indexing if the same number of insns.
1019 }
buzbee2700f7e2014-03-07 09:46:20 -08001020 RegStorage reg_offset = AllocTemp();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001021 LoadConstant(reg_offset, displacement >> scale);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001022 DCHECK(!r_dest.IsFloat());
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001023 load = LoadBaseIndexed(r_base, reg_offset, r_dest, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001024 FreeTemp(reg_offset);
1025 }
1026 }
1027
1028 // TODO: in future may need to differentiate Dalvik accesses w/ spills
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001029 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001030 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001031 AnnotateDalvikRegAccess(load, displacement >> 2, true /* is_load */, r_dest.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001032 }
1033 return load;
1034}
1035
Vladimir Marko674744e2014-04-24 15:18:26 +01001036LIR* ArmMir2Lir::LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001037 OpSize size, VolatileKind is_volatile) {
buzbee695d13a2014-04-19 13:32:20 -07001038 // TODO: base this on target.
1039 if (size == kWord) {
1040 size = k32;
1041 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001042 LIR* load;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001043 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1044 !cu_->compiler_driver->GetInstructionSetFeatures()->
Ian Rogersd582fa42014-11-05 23:46:43 -08001045 AsArmInstructionSetFeatures()->HasAtomicLdrdAndStrd()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001046 // Only 64-bit load needs special handling.
1047 // If the cpu supports LPAE, aligned LDRD is atomic - fall through to LoadBaseDisp().
1048 DCHECK(!r_dest.IsFloat()); // See RegClassForFieldLoadSave().
1049 // Use LDREXD for the atomic load. (Expect displacement > 0, don't optimize for == 0.)
1050 RegStorage r_ptr = AllocTemp();
1051 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
Vladimir Markoee5e2732015-01-13 17:34:28 +00001052 load = NewLIR3(kThumb2Ldrexd, r_dest.GetLowReg(), r_dest.GetHighReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001053 FreeTemp(r_ptr);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001054 } else {
1055 load = LoadBaseDispBody(r_base, displacement, r_dest, size);
1056 }
1057
1058 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001059 GenMemBarrier(kLoadAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001060 }
1061
1062 return load;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001063}
1064
Brian Carlstrom7940e442013-07-12 13:46:57 -07001065
buzbee2700f7e2014-03-07 09:46:20 -08001066LIR* ArmMir2Lir::StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src,
1067 OpSize size) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001068 LIR* store = nullptr;
1069 ArmOpcode opcode16 = kThumbBkpt; // 16-bit Thumb opcode.
1070 ArmOpcode opcode32 = kThumbBkpt; // 32-bit Thumb2 opcode.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001071 bool short_form = false;
buzbee091cc402014-03-31 10:14:40 -07001072 bool all_low = r_src.Is32Bit() && r_base.Low8() && r_src.Low8();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001073 int scale = 0; // Used for opcode16 and some indexed loads.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001074 bool already_generated = false;
1075 switch (size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001076 case kDouble:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001077 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +01001078 case k64:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001079 if (r_src.IsFloat()) {
Zheng Xu5667fdb2014-10-23 18:29:55 +08001080 // Note: If the register is retrieved by register allocator, it should never be a pair.
1081 // But some functions in mir2lir assume 64-bit registers are 32-bit register pairs.
1082 // TODO: Rework Mir2Lir::LoadArg() and Mir2Lir::LoadArgDirect().
1083 if (r_src.IsPair()) {
1084 r_src = As64BitFloatReg(r_src);
1085 }
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001086 DCHECK(!r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001087 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrd, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001088 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +01001089 DCHECK(r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001090 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2StrdI8, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001091 }
1092 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001093 break;
1094 case kSingle:
buzbee091cc402014-03-31 10:14:40 -07001095 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001096 case k32:
buzbee091cc402014-03-31 10:14:40 -07001097 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001098 case kReference:
buzbee091cc402014-03-31 10:14:40 -07001099 if (r_src.IsFloat()) {
1100 DCHECK(r_src.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +01001101 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrs, r_base, displacement, r_src);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001102 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001103 break;
1104 }
Roland Levillain14d90572015-07-16 10:52:26 +01001105 DCHECK_ALIGNED(displacement, 4);
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001106 scale = 2;
buzbee091cc402014-03-31 10:14:40 -07001107 if (r_src.Low8() && (r_base == rs_r13sp) && (displacement <= 1020) && (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001108 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001109 opcode16 = kThumbStrSpRel;
1110 } else {
1111 short_form = all_low && (displacement >> (5 + scale)) == 0;
1112 opcode16 = kThumbStrRRI5;
1113 opcode32 = kThumb2StrRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001114 }
1115 break;
1116 case kUnsignedHalf:
1117 case kSignedHalf:
Roland Levillain14d90572015-07-16 10:52:26 +01001118 DCHECK_ALIGNED(displacement, 2);
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001119 scale = 1;
1120 short_form = all_low && (displacement >> (5 + scale)) == 0;
1121 opcode16 = kThumbStrhRRI5;
1122 opcode32 = kThumb2StrhRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001123 break;
1124 case kUnsignedByte:
1125 case kSignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001126 DCHECK_EQ(scale, 0); // Keep scale = 0.
1127 short_form = all_low && (displacement >> (5 + scale)) == 0;
1128 opcode16 = kThumbStrbRRI5;
1129 opcode32 = kThumb2StrbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001130 break;
1131 default:
1132 LOG(FATAL) << "Bad size: " << size;
1133 }
1134 if (!already_generated) {
1135 if (short_form) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001136 store = NewLIR3(opcode16, r_src.GetReg(), r_base.GetReg(), displacement >> scale);
1137 } else if ((displacement >> 12) == 0) {
1138 store = NewLIR3(opcode32, r_src.GetReg(), r_base.GetReg(), displacement);
1139 } else if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) &&
1140 InexpensiveConstantInt(displacement & ~0x00000fff, Instruction::ADD_INT)) {
1141 // In this case, using StoreIndexed would emit 3 insns (movw+movt+str) but we can
1142 // actually do it in two because we know that the kOpAdd is a single insn. On the
1143 // other hand, we introduce an extra dependency, so this is not necessarily faster.
buzbee2700f7e2014-03-07 09:46:20 -08001144 RegStorage r_scratch = AllocTemp();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001145 if (opcode16 != kThumbBkpt && r_src.Low8() && r_scratch.Low8() &&
1146 InexpensiveConstantInt(displacement & ~(0x1f << scale), Instruction::ADD_INT)) {
1147 // We can use the 16-bit Thumb opcode for the load.
1148 OpRegRegImm(kOpAdd, r_scratch, r_base, displacement & ~(0x1f << scale));
1149 store = NewLIR3(opcode16, r_src.GetReg(), r_scratch.GetReg(),
1150 (displacement >> scale) & 0x1f);
1151 } else {
1152 DCHECK_NE(opcode32, kThumbBkpt);
1153 OpRegRegImm(kOpAdd, r_scratch, r_base, displacement & ~0x00000fff);
1154 store = NewLIR3(opcode32, r_src.GetReg(), r_scratch.GetReg(), displacement & 0x00000fff);
1155 }
1156 FreeTemp(r_scratch);
1157 } else {
1158 if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) ||
1159 (scale != 0 && InexpensiveConstantInt(displacement, Instruction::CONST))) {
1160 scale = 0; // Prefer unscaled indexing if the same number of insns.
1161 }
1162 RegStorage r_scratch = AllocTemp();
1163 LoadConstant(r_scratch, displacement >> scale);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001164 DCHECK(!r_src.IsFloat());
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001165 store = StoreBaseIndexed(r_base, r_scratch, r_src, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001166 FreeTemp(r_scratch);
1167 }
1168 }
1169
1170 // TODO: In future, may need to differentiate Dalvik & spill accesses
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001171 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001172 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001173 AnnotateDalvikRegAccess(store, displacement >> 2, false /* is_load */, r_src.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001174 }
1175 return store;
1176}
1177
Andreas Gampede686762014-06-24 18:42:06 +00001178LIR* ArmMir2Lir::StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001179 OpSize size, VolatileKind is_volatile) {
1180 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001181 // Ensure that prior accesses become visible to other threads first.
1182 GenMemBarrier(kAnyStore);
Andreas Gampe2689fba2014-06-23 13:23:04 -07001183 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001184
Vladimir Markoee5e2732015-01-13 17:34:28 +00001185 LIR* null_ck_insn;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001186 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1187 !cu_->compiler_driver->GetInstructionSetFeatures()->
Ian Rogersd582fa42014-11-05 23:46:43 -08001188 AsArmInstructionSetFeatures()->HasAtomicLdrdAndStrd()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001189 // Only 64-bit store needs special handling.
1190 // If the cpu supports LPAE, aligned STRD is atomic - fall through to StoreBaseDisp().
1191 // Use STREXD for the atomic store. (Expect displacement > 0, don't optimize for == 0.)
1192 DCHECK(!r_src.IsFloat()); // See RegClassForFieldLoadSave().
1193 RegStorage r_ptr = AllocTemp();
1194 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1195 LIR* fail_target = NewLIR0(kPseudoTargetLabel);
1196 // We have only 5 temporary registers available and if r_base, r_src and r_ptr already
1197 // take 4, we can't directly allocate 2 more for LDREXD temps. In that case clobber r_ptr
1198 // in LDREXD and recalculate it from r_base.
1199 RegStorage r_temp = AllocTemp();
Serguei Katkov9ee45192014-07-17 14:39:03 +07001200 RegStorage r_temp_high = AllocTemp(false); // We may not have another temp.
Andreas Gampe3c12c512014-06-24 18:46:29 +00001201 if (r_temp_high.Valid()) {
Vladimir Markoee5e2732015-01-13 17:34:28 +00001202 null_ck_insn = NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_temp_high.GetReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001203 FreeTemp(r_temp_high);
1204 FreeTemp(r_temp);
1205 } else {
1206 // If we don't have another temp, clobber r_ptr in LDREXD and reload it.
Vladimir Markoee5e2732015-01-13 17:34:28 +00001207 null_ck_insn = NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_ptr.GetReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001208 FreeTemp(r_temp); // May need the temp for kOpAdd.
1209 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1210 }
Vladimir Markoee5e2732015-01-13 17:34:28 +00001211 NewLIR4(kThumb2Strexd, r_temp.GetReg(), r_src.GetLowReg(), r_src.GetHighReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001212 OpCmpImmBranch(kCondNe, r_temp, 0, fail_target);
1213 FreeTemp(r_ptr);
1214 } else {
1215 // TODO: base this on target.
1216 if (size == kWord) {
1217 size = k32;
1218 }
1219
Vladimir Markoee5e2732015-01-13 17:34:28 +00001220 null_ck_insn = StoreBaseDispBody(r_base, displacement, r_src, size);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001221 }
1222
1223 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001224 // Preserve order with respect to any subsequent volatile loads.
1225 // We need StoreLoad, but that generally requires the most expensive barrier.
1226 GenMemBarrier(kAnyAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001227 }
1228
Vladimir Markoee5e2732015-01-13 17:34:28 +00001229 return null_ck_insn;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230}
1231
buzbee2700f7e2014-03-07 09:46:20 -08001232LIR* ArmMir2Lir::OpFpRegCopy(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001233 int opcode;
buzbee091cc402014-03-31 10:14:40 -07001234 DCHECK_EQ(r_dest.IsDouble(), r_src.IsDouble());
1235 if (r_dest.IsDouble()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001236 opcode = kThumb2Vmovd;
1237 } else {
buzbee091cc402014-03-31 10:14:40 -07001238 if (r_dest.IsSingle()) {
1239 opcode = r_src.IsSingle() ? kThumb2Vmovs : kThumb2Fmsr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001240 } else {
buzbee091cc402014-03-31 10:14:40 -07001241 DCHECK(r_src.IsSingle());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001242 opcode = kThumb2Fmrs;
1243 }
1244 }
buzbee2700f7e2014-03-07 09:46:20 -08001245 LIR* res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001246 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) {
1247 res->flags.is_nop = true;
1248 }
1249 return res;
1250}
1251
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001252LIR* ArmMir2Lir::OpMem(OpKind op ATTRIBUTE_UNUSED,
1253 RegStorage r_base ATTRIBUTE_UNUSED,
1254 int disp ATTRIBUTE_UNUSED) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001255 LOG(FATAL) << "Unexpected use of OpMem for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001256 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001257}
1258
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001259LIR* ArmMir2Lir::InvokeTrampoline(OpKind op,
1260 RegStorage r_tgt,
1261 // The address of the trampoline is already loaded into r_tgt.
1262 QuickEntrypointEnum trampoline ATTRIBUTE_UNUSED) {
Andreas Gampe98430592014-07-27 19:44:50 -07001263 return OpReg(op, r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001264}
1265
Serban Constantinescu63999682014-07-15 17:44:21 +01001266size_t ArmMir2Lir::GetInstructionOffset(LIR* lir) {
1267 uint64_t check_flags = GetTargetInstFlags(lir->opcode);
1268 DCHECK((check_flags & IS_LOAD) || (check_flags & IS_STORE));
1269 size_t offset = (check_flags & IS_TERTIARY_OP) ? lir->operands[2] : 0;
1270
1271 if (check_flags & SCALED_OFFSET_X2) {
1272 offset = offset * 2;
1273 } else if (check_flags & SCALED_OFFSET_X4) {
1274 offset = offset * 4;
1275 }
1276 return offset;
1277}
1278
Vladimir Markocc234812015-04-07 09:36:09 +01001279void ArmMir2Lir::CountRefs(RefCounts* core_counts, RefCounts* fp_counts, size_t num_regs) {
1280 // Start with the default counts.
1281 Mir2Lir::CountRefs(core_counts, fp_counts, num_regs);
1282
1283 if (pc_rel_temp_ != nullptr) {
1284 // Now, if the dex cache array base temp is used only once outside any loops (weight = 1),
Vladimir Marko1961b602015-04-08 20:51:48 +01001285 // avoid the promotion, otherwise boost the weight by factor 3 because the full PC-relative
1286 // load sequence is 4 instructions long and by promoting the PC base we save up to 3
1287 // instructions per use.
Vladimir Markocc234812015-04-07 09:36:09 +01001288 int p_map_idx = SRegToPMap(pc_rel_temp_->s_reg_low);
1289 if (core_counts[p_map_idx].count == 1) {
1290 core_counts[p_map_idx].count = 0;
1291 } else {
Vladimir Marko1961b602015-04-08 20:51:48 +01001292 core_counts[p_map_idx].count *= 3;
Vladimir Markocc234812015-04-07 09:36:09 +01001293 }
1294 }
1295}
1296
1297void ArmMir2Lir::DoPromotion() {
1298 if (CanUseOpPcRelDexCacheArrayLoad()) {
1299 pc_rel_temp_ = mir_graph_->GetNewCompilerTemp(kCompilerTempBackend, false);
1300 }
1301
1302 Mir2Lir::DoPromotion();
1303
1304 if (pc_rel_temp_ != nullptr) {
1305 // Now, if the dex cache array base temp is promoted, remember the register but
1306 // always remove the temp's stack location to avoid unnecessarily bloating the stack.
1307 dex_cache_arrays_base_reg_ = mir_graph_->reg_location_[pc_rel_temp_->s_reg_low].reg;
1308 DCHECK(!dex_cache_arrays_base_reg_.Valid() || !dex_cache_arrays_base_reg_.IsFloat());
1309 mir_graph_->RemoveLastCompilerTemp(kCompilerTempBackend, false, pc_rel_temp_);
1310 pc_rel_temp_ = nullptr;
1311 }
1312}
1313
Brian Carlstrom7940e442013-07-12 13:46:57 -07001314} // namespace art