blob: c3371cf3293a502e837150103487416bf2a5c6ca [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstrom7940e442013-07-12 13:46:57 -070017#include "codegen_arm.h"
Ian Rogersd582fa42014-11-05 23:46:43 -080018
19#include "arch/arm/instruction_set_features_arm.h"
20#include "arm_lir.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080021#include "base/logging.h"
Vladimir Markocc234812015-04-07 09:36:09 +010022#include "dex/mir_graph.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include "dex/quick/mir_to_lir-inl.h"
buzbeeb5860fb2014-06-21 15:31:01 -070024#include "dex/reg_storage_eq.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080025#include "driver/compiler_driver.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070026
27namespace art {
28
29/* This file contains codegen for the Thumb ISA. */
30
buzbee0d829482013-10-11 15:24:55 -070031static int32_t EncodeImmSingle(int32_t value) {
32 int32_t res;
33 int32_t bit_a = (value & 0x80000000) >> 31;
34 int32_t not_bit_b = (value & 0x40000000) >> 30;
35 int32_t bit_b = (value & 0x20000000) >> 29;
36 int32_t b_smear = (value & 0x3e000000) >> 25;
37 int32_t slice = (value & 0x01f80000) >> 19;
38 int32_t zeroes = (value & 0x0007ffff);
Brian Carlstrom7940e442013-07-12 13:46:57 -070039 if (zeroes != 0)
40 return -1;
41 if (bit_b) {
42 if ((not_bit_b != 0) || (b_smear != 0x1f))
43 return -1;
44 } else {
45 if ((not_bit_b != 1) || (b_smear != 0x0))
46 return -1;
47 }
48 res = (bit_a << 7) | (bit_b << 6) | slice;
49 return res;
50}
51
52/*
53 * Determine whether value can be encoded as a Thumb2 floating point
54 * immediate. If not, return -1. If so return encoded 8-bit value.
55 */
buzbee0d829482013-10-11 15:24:55 -070056static int32_t EncodeImmDouble(int64_t value) {
57 int32_t res;
Ian Rogers0f678472014-03-10 16:18:37 -070058 int32_t bit_a = (value & INT64_C(0x8000000000000000)) >> 63;
59 int32_t not_bit_b = (value & INT64_C(0x4000000000000000)) >> 62;
60 int32_t bit_b = (value & INT64_C(0x2000000000000000)) >> 61;
61 int32_t b_smear = (value & INT64_C(0x3fc0000000000000)) >> 54;
62 int32_t slice = (value & INT64_C(0x003f000000000000)) >> 48;
63 uint64_t zeroes = (value & INT64_C(0x0000ffffffffffff));
buzbee0d829482013-10-11 15:24:55 -070064 if (zeroes != 0ull)
Brian Carlstrom7940e442013-07-12 13:46:57 -070065 return -1;
66 if (bit_b) {
67 if ((not_bit_b != 0) || (b_smear != 0xff))
68 return -1;
69 } else {
70 if ((not_bit_b != 1) || (b_smear != 0x0))
71 return -1;
72 }
73 res = (bit_a << 7) | (bit_b << 6) | slice;
74 return res;
75}
76
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070077LIR* ArmMir2Lir::LoadFPConstantValue(int r_dest, int value) {
buzbee091cc402014-03-31 10:14:40 -070078 DCHECK(RegStorage::IsSingle(r_dest));
Brian Carlstrom7940e442013-07-12 13:46:57 -070079 if (value == 0) {
80 // TODO: we need better info about the target CPU. a vector exclusive or
81 // would probably be better here if we could rely on its existance.
82 // Load an immediate +2.0 (which encodes to 0)
83 NewLIR2(kThumb2Vmovs_IMM8, r_dest, 0);
84 // +0.0 = +2.0 - +2.0
85 return NewLIR3(kThumb2Vsubs, r_dest, r_dest, r_dest);
86 } else {
87 int encoded_imm = EncodeImmSingle(value);
88 if (encoded_imm >= 0) {
89 return NewLIR2(kThumb2Vmovs_IMM8, r_dest, encoded_imm);
90 }
91 }
92 LIR* data_target = ScanLiteralPool(literal_list_, value, 0);
93 if (data_target == NULL) {
94 data_target = AddWordData(&literal_list_, value);
95 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +010096 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -070097 LIR* load_pc_rel = RawLIR(current_dalvik_offset_, kThumb2Vldrs,
buzbee091cc402014-03-31 10:14:40 -070098 r_dest, rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -070099 AppendLIR(load_pc_rel);
100 return load_pc_rel;
101}
102
Brian Carlstrom7940e442013-07-12 13:46:57 -0700103/*
104 * Determine whether value can be encoded as a Thumb2 modified
105 * immediate. If not, return -1. If so, return i:imm3:a:bcdefgh form.
106 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700107int ArmMir2Lir::ModifiedImmediate(uint32_t value) {
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700108 uint32_t b0 = value & 0xff;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700109
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700110 /* Note: case of value==0 must use 0:000:0:0000000 encoding */
111 if (value <= 0xFF)
112 return b0; // 0:000:a:bcdefgh
113 if (value == ((b0 << 16) | b0))
114 return (0x1 << 8) | b0; /* 0:001:a:bcdefgh */
115 if (value == ((b0 << 24) | (b0 << 16) | (b0 << 8) | b0))
116 return (0x3 << 8) | b0; /* 0:011:a:bcdefgh */
117 b0 = (value >> 8) & 0xff;
118 if (value == ((b0 << 24) | (b0 << 8)))
119 return (0x2 << 8) | b0; /* 0:010:a:bcdefgh */
120 /* Can we do it with rotation? */
Vladimir Markoa29f6982014-11-25 16:32:34 +0000121 int z_leading = CLZ(value);
122 int z_trailing = CTZ(value);
Brian Carlstrom6f485c62013-07-18 15:35:35 -0700123 /* A run of eight or fewer active bits? */
124 if ((z_leading + z_trailing) < 24)
125 return -1; /* No - bail */
126 /* left-justify the constant, discarding msb (known to be 1) */
127 value <<= z_leading + 1;
128 /* Create bcdefgh */
129 value >>= 25;
130 /* Put it all together */
131 return value | ((0x8 + z_leading) << 7); /* [01000..11111]:bcdefgh */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700132}
133
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700134bool ArmMir2Lir::InexpensiveConstantInt(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
136}
137
Vladimir Markoa29f6982014-11-25 16:32:34 +0000138bool ArmMir2Lir::InexpensiveConstantInt(int32_t value, Instruction::Code opcode) {
139 switch (opcode) {
140 case Instruction::ADD_INT:
141 case Instruction::ADD_INT_2ADDR:
142 case Instruction::SUB_INT:
143 case Instruction::SUB_INT_2ADDR:
144 if ((value >> 12) == (value >> 31)) { // Signed 12-bit, RRI12 versions of ADD/SUB.
145 return true;
146 }
147 FALLTHROUGH_INTENDED;
148 case Instruction::IF_EQ:
149 case Instruction::IF_NE:
150 case Instruction::IF_LT:
151 case Instruction::IF_GE:
152 case Instruction::IF_GT:
153 case Instruction::IF_LE:
154 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(-value) >= 0);
155 case Instruction::SHL_INT:
156 case Instruction::SHL_INT_2ADDR:
157 case Instruction::SHR_INT:
158 case Instruction::SHR_INT_2ADDR:
159 case Instruction::USHR_INT:
160 case Instruction::USHR_INT_2ADDR:
161 return true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000162 case Instruction::CONST:
163 case Instruction::CONST_4:
164 case Instruction::CONST_16:
165 if ((value >> 16) == 0) {
166 return true; // movw, 16-bit unsigned.
167 }
168 FALLTHROUGH_INTENDED;
Vladimir Markoa29f6982014-11-25 16:32:34 +0000169 case Instruction::AND_INT:
170 case Instruction::AND_INT_2ADDR:
171 case Instruction::AND_INT_LIT16:
172 case Instruction::AND_INT_LIT8:
173 case Instruction::OR_INT:
174 case Instruction::OR_INT_2ADDR:
175 case Instruction::OR_INT_LIT16:
176 case Instruction::OR_INT_LIT8:
177 return (ModifiedImmediate(value) >= 0) || (ModifiedImmediate(~value) >= 0);
178 case Instruction::XOR_INT:
179 case Instruction::XOR_INT_2ADDR:
180 case Instruction::XOR_INT_LIT16:
181 case Instruction::XOR_INT_LIT8:
182 return (ModifiedImmediate(value) >= 0);
183 case Instruction::MUL_INT:
184 case Instruction::MUL_INT_2ADDR:
185 case Instruction::MUL_INT_LIT8:
186 case Instruction::MUL_INT_LIT16:
187 case Instruction::DIV_INT:
188 case Instruction::DIV_INT_2ADDR:
189 case Instruction::DIV_INT_LIT8:
190 case Instruction::DIV_INT_LIT16:
191 case Instruction::REM_INT:
192 case Instruction::REM_INT_2ADDR:
193 case Instruction::REM_INT_LIT8:
194 case Instruction::REM_INT_LIT16: {
195 EasyMultiplyOp ops[2];
196 return GetEasyMultiplyTwoOps(value, ops);
197 }
198 default:
199 return false;
200 }
201}
202
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700203bool ArmMir2Lir::InexpensiveConstantFloat(int32_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700204 return EncodeImmSingle(value) >= 0;
205}
206
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700207bool ArmMir2Lir::InexpensiveConstantLong(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700208 return InexpensiveConstantInt(High32Bits(value)) && InexpensiveConstantInt(Low32Bits(value));
209}
210
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700211bool ArmMir2Lir::InexpensiveConstantDouble(int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700212 return EncodeImmDouble(value) >= 0;
213}
214
215/*
216 * Load a immediate using a shortcut if possible; otherwise
217 * grab from the per-translation literal pool.
218 *
219 * No additional register clobbering operation performed. Use this version when
220 * 1) r_dest is freshly returned from AllocTemp or
221 * 2) The codegen is under fixed register usage
222 */
buzbee2700f7e2014-03-07 09:46:20 -0800223LIR* ArmMir2Lir::LoadConstantNoClobber(RegStorage r_dest, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700224 LIR* res;
225 int mod_imm;
226
buzbee091cc402014-03-31 10:14:40 -0700227 if (r_dest.IsFloat()) {
buzbee2700f7e2014-03-07 09:46:20 -0800228 return LoadFPConstantValue(r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700229 }
230
231 /* See if the value can be constructed cheaply */
buzbee091cc402014-03-31 10:14:40 -0700232 if (r_dest.Low8() && (value >= 0) && (value <= 255)) {
buzbee2700f7e2014-03-07 09:46:20 -0800233 return NewLIR2(kThumbMovImm, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700234 }
235 /* Check Modified immediate special cases */
236 mod_imm = ModifiedImmediate(value);
237 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800238 res = NewLIR2(kThumb2MovI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700239 return res;
240 }
241 mod_imm = ModifiedImmediate(~value);
242 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800243 res = NewLIR2(kThumb2MvnI8M, r_dest.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700244 return res;
245 }
246 /* 16-bit immediate? */
247 if ((value & 0xffff) == value) {
buzbee2700f7e2014-03-07 09:46:20 -0800248 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700249 return res;
250 }
251 /* Do a low/high pair */
buzbee2700f7e2014-03-07 09:46:20 -0800252 res = NewLIR2(kThumb2MovImm16, r_dest.GetReg(), Low16Bits(value));
253 NewLIR2(kThumb2MovImm16H, r_dest.GetReg(), High16Bits(value));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700254 return res;
255}
256
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700257LIR* ArmMir2Lir::OpUnconditionalBranch(LIR* target) {
buzbee091cc402014-03-31 10:14:40 -0700258 LIR* res = NewLIR1(kThumbBUncond, 0 /* offset to be patched during assembly */);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700259 res->target = target;
260 return res;
261}
262
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700263LIR* ArmMir2Lir::OpCondBranch(ConditionCode cc, LIR* target) {
Vladimir Marko174636d2014-11-26 12:33:45 +0000264 LIR* branch = NewLIR2(kThumbBCond, 0 /* offset to be patched */,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700265 ArmConditionEncoding(cc));
266 branch->target = target;
267 return branch;
268}
269
buzbee2700f7e2014-03-07 09:46:20 -0800270LIR* ArmMir2Lir::OpReg(OpKind op, RegStorage r_dest_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700271 ArmOpcode opcode = kThumbBkpt;
272 switch (op) {
273 case kOpBlx:
274 opcode = kThumbBlxR;
275 break;
Brian Carlstrom60d7a652014-03-13 18:10:08 -0700276 case kOpBx:
277 opcode = kThumbBx;
278 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700279 default:
280 LOG(FATAL) << "Bad opcode " << op;
281 }
buzbee2700f7e2014-03-07 09:46:20 -0800282 return NewLIR1(opcode, r_dest_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700283}
284
Ian Rogerse2143c02014-03-28 08:47:16 -0700285LIR* ArmMir2Lir::OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700286 int shift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700287 bool thumb_form =
buzbee091cc402014-03-31 10:14:40 -0700288 ((shift == 0) && r_dest_src1.Low8() && r_src2.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700289 ArmOpcode opcode = kThumbBkpt;
290 switch (op) {
291 case kOpAdc:
292 opcode = (thumb_form) ? kThumbAdcRR : kThumb2AdcRRR;
293 break;
294 case kOpAnd:
295 opcode = (thumb_form) ? kThumbAndRR : kThumb2AndRRR;
296 break;
297 case kOpBic:
298 opcode = (thumb_form) ? kThumbBicRR : kThumb2BicRRR;
299 break;
300 case kOpCmn:
301 DCHECK_EQ(shift, 0);
302 opcode = (thumb_form) ? kThumbCmnRR : kThumb2CmnRR;
303 break;
304 case kOpCmp:
305 if (thumb_form)
306 opcode = kThumbCmpRR;
buzbee091cc402014-03-31 10:14:40 -0700307 else if ((shift == 0) && !r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700308 opcode = kThumbCmpHH;
buzbee091cc402014-03-31 10:14:40 -0700309 else if ((shift == 0) && r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700310 opcode = kThumbCmpLH;
311 else if (shift == 0)
312 opcode = kThumbCmpHL;
313 else
314 opcode = kThumb2CmpRR;
315 break;
316 case kOpXor:
317 opcode = (thumb_form) ? kThumbEorRR : kThumb2EorRRR;
318 break;
319 case kOpMov:
320 DCHECK_EQ(shift, 0);
buzbee091cc402014-03-31 10:14:40 -0700321 if (r_dest_src1.Low8() && r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700322 opcode = kThumbMovRR;
buzbee091cc402014-03-31 10:14:40 -0700323 else if (!r_dest_src1.Low8() && !r_src2.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700324 opcode = kThumbMovRR_H2H;
buzbee091cc402014-03-31 10:14:40 -0700325 else if (r_dest_src1.Low8())
Brian Carlstrom7940e442013-07-12 13:46:57 -0700326 opcode = kThumbMovRR_H2L;
327 else
328 opcode = kThumbMovRR_L2H;
329 break;
330 case kOpMul:
331 DCHECK_EQ(shift, 0);
332 opcode = (thumb_form) ? kThumbMul : kThumb2MulRRR;
333 break;
334 case kOpMvn:
335 opcode = (thumb_form) ? kThumbMvn : kThumb2MnvRR;
336 break;
337 case kOpNeg:
338 DCHECK_EQ(shift, 0);
339 opcode = (thumb_form) ? kThumbNeg : kThumb2NegRR;
340 break;
341 case kOpOr:
342 opcode = (thumb_form) ? kThumbOrr : kThumb2OrrRRR;
343 break;
344 case kOpSbc:
345 opcode = (thumb_form) ? kThumbSbc : kThumb2SbcRRR;
346 break;
347 case kOpTst:
348 opcode = (thumb_form) ? kThumbTst : kThumb2TstRR;
349 break;
350 case kOpLsl:
351 DCHECK_EQ(shift, 0);
352 opcode = (thumb_form) ? kThumbLslRR : kThumb2LslRRR;
353 break;
354 case kOpLsr:
355 DCHECK_EQ(shift, 0);
356 opcode = (thumb_form) ? kThumbLsrRR : kThumb2LsrRRR;
357 break;
358 case kOpAsr:
359 DCHECK_EQ(shift, 0);
360 opcode = (thumb_form) ? kThumbAsrRR : kThumb2AsrRRR;
361 break;
362 case kOpRor:
363 DCHECK_EQ(shift, 0);
364 opcode = (thumb_form) ? kThumbRorRR : kThumb2RorRRR;
365 break;
366 case kOpAdd:
367 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
368 break;
369 case kOpSub:
370 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
371 break;
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100372 case kOpRev:
373 DCHECK_EQ(shift, 0);
374 if (!thumb_form) {
375 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700376 return NewLIR3(kThumb2RevRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100377 }
378 opcode = kThumbRev;
379 break;
380 case kOpRevsh:
381 DCHECK_EQ(shift, 0);
382 if (!thumb_form) {
383 // Binary, but rm is encoded twice.
Ian Rogerse2143c02014-03-28 08:47:16 -0700384 return NewLIR3(kThumb2RevshRR, r_dest_src1.GetReg(), r_src2.GetReg(), r_src2.GetReg());
Vladimir Markoa8b4caf2013-10-24 15:08:57 +0100385 }
386 opcode = kThumbRevsh;
387 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700388 case kOp2Byte:
389 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700390 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 8);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700391 case kOp2Short:
392 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700393 return NewLIR4(kThumb2Sbfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700394 case kOp2Char:
395 DCHECK_EQ(shift, 0);
Ian Rogerse2143c02014-03-28 08:47:16 -0700396 return NewLIR4(kThumb2Ubfx, r_dest_src1.GetReg(), r_src2.GetReg(), 0, 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700397 default:
398 LOG(FATAL) << "Bad opcode: " << op;
399 break;
400 }
buzbee409fe942013-10-11 10:49:56 -0700401 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700402 if (EncodingMap[opcode].flags & IS_BINARY_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700403 return NewLIR2(opcode, r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700404 } else if (EncodingMap[opcode].flags & IS_TERTIARY_OP) {
405 if (EncodingMap[opcode].field_loc[2].kind == kFmtShift) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700406 return NewLIR3(opcode, r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700407 } else {
Ian Rogerse2143c02014-03-28 08:47:16 -0700408 return NewLIR3(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700409 }
410 } else if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700411 return NewLIR4(opcode, r_dest_src1.GetReg(), r_dest_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700412 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700413 LOG(FATAL) << "Unexpected encoding operand count";
414 return NULL;
415 }
416}
417
buzbee2700f7e2014-03-07 09:46:20 -0800418LIR* ArmMir2Lir::OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700419 return OpRegRegShift(op, r_dest_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700420}
421
buzbee2700f7e2014-03-07 09:46:20 -0800422LIR* ArmMir2Lir::OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700423 UNUSED(r_dest, r_base, offset, move_type);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800424 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700425 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800426}
427
buzbee2700f7e2014-03-07 09:46:20 -0800428LIR* ArmMir2Lir::OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700429 UNUSED(r_base, offset, r_src, move_type);
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800430 UNIMPLEMENTED(FATAL);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700431 UNREACHABLE();
Razvan A Lupusoru2c498d12014-01-29 16:02:57 -0800432}
433
buzbee2700f7e2014-03-07 09:46:20 -0800434LIR* ArmMir2Lir::OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700435 UNUSED(op, cc, r_dest, r_src);
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800436 LOG(FATAL) << "Unexpected use of OpCondRegReg for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700437 UNREACHABLE();
Razvan A Lupusorubd288c22013-12-20 17:27:23 -0800438}
439
Ian Rogerse2143c02014-03-28 08:47:16 -0700440LIR* ArmMir2Lir::OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1,
441 RegStorage r_src2, int shift) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700442 ArmOpcode opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700443 bool thumb_form = (shift == 0) && r_dest.Low8() && r_src1.Low8() && r_src2.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700444 switch (op) {
445 case kOpAdd:
446 opcode = (thumb_form) ? kThumbAddRRR : kThumb2AddRRR;
447 break;
448 case kOpSub:
449 opcode = (thumb_form) ? kThumbSubRRR : kThumb2SubRRR;
450 break;
451 case kOpRsub:
452 opcode = kThumb2RsubRRR;
453 break;
454 case kOpAdc:
455 opcode = kThumb2AdcRRR;
456 break;
457 case kOpAnd:
458 opcode = kThumb2AndRRR;
459 break;
460 case kOpBic:
461 opcode = kThumb2BicRRR;
462 break;
463 case kOpXor:
464 opcode = kThumb2EorRRR;
465 break;
466 case kOpMul:
467 DCHECK_EQ(shift, 0);
468 opcode = kThumb2MulRRR;
469 break;
Dave Allison70202782013-10-22 17:52:19 -0700470 case kOpDiv:
471 DCHECK_EQ(shift, 0);
472 opcode = kThumb2SdivRRR;
473 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700474 case kOpOr:
475 opcode = kThumb2OrrRRR;
476 break;
477 case kOpSbc:
478 opcode = kThumb2SbcRRR;
479 break;
480 case kOpLsl:
481 DCHECK_EQ(shift, 0);
482 opcode = kThumb2LslRRR;
483 break;
484 case kOpLsr:
485 DCHECK_EQ(shift, 0);
486 opcode = kThumb2LsrRRR;
487 break;
488 case kOpAsr:
489 DCHECK_EQ(shift, 0);
490 opcode = kThumb2AsrRRR;
491 break;
492 case kOpRor:
493 DCHECK_EQ(shift, 0);
494 opcode = kThumb2RorRRR;
495 break;
496 default:
497 LOG(FATAL) << "Bad opcode: " << op;
498 break;
499 }
buzbee409fe942013-10-11 10:49:56 -0700500 DCHECK(!IsPseudoLirOp(opcode));
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700501 if (EncodingMap[opcode].flags & IS_QUAD_OP) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700502 return NewLIR4(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg(), shift);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700503 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700504 DCHECK(EncodingMap[opcode].flags & IS_TERTIARY_OP);
Ian Rogerse2143c02014-03-28 08:47:16 -0700505 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), r_src2.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700506 }
507}
508
buzbee2700f7e2014-03-07 09:46:20 -0800509LIR* ArmMir2Lir::OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2) {
Ian Rogerse2143c02014-03-28 08:47:16 -0700510 return OpRegRegRegShift(op, r_dest, r_src1, r_src2, 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700511}
512
buzbee2700f7e2014-03-07 09:46:20 -0800513LIR* ArmMir2Lir::OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700514 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700515 int32_t abs_value = (neg) ? -value : value;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700516 ArmOpcode opcode = kThumbBkpt;
517 ArmOpcode alt_opcode = kThumbBkpt;
buzbee091cc402014-03-31 10:14:40 -0700518 bool all_low_regs = r_dest.Low8() && r_src1.Low8();
buzbee0d829482013-10-11 15:24:55 -0700519 int32_t mod_imm = ModifiedImmediate(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520
521 switch (op) {
522 case kOpLsl:
523 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800524 return NewLIR3(kThumbLslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700525 else
buzbee2700f7e2014-03-07 09:46:20 -0800526 return NewLIR3(kThumb2LslRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700527 case kOpLsr:
528 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800529 return NewLIR3(kThumbLsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700530 else
buzbee2700f7e2014-03-07 09:46:20 -0800531 return NewLIR3(kThumb2LsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 case kOpAsr:
533 if (all_low_regs)
buzbee2700f7e2014-03-07 09:46:20 -0800534 return NewLIR3(kThumbAsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700535 else
buzbee2700f7e2014-03-07 09:46:20 -0800536 return NewLIR3(kThumb2AsrRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700537 case kOpRor:
buzbee2700f7e2014-03-07 09:46:20 -0800538 return NewLIR3(kThumb2RorRRI5, r_dest.GetReg(), r_src1.GetReg(), value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700539 case kOpAdd:
buzbee091cc402014-03-31 10:14:40 -0700540 if (r_dest.Low8() && (r_src1 == rs_r13sp) && (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800541 return NewLIR3(kThumbAddSpRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
buzbee091cc402014-03-31 10:14:40 -0700542 } else if (r_dest.Low8() && (r_src1 == rs_r15pc) &&
Brian Carlstrom38f85e42013-07-18 14:45:22 -0700543 (value <= 1020) && ((value & 0x3) == 0)) {
buzbee2700f7e2014-03-07 09:46:20 -0800544 return NewLIR3(kThumbAddPcRel, r_dest.GetReg(), r_src1.GetReg(), value >> 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 }
Ian Rogersfc787ec2014-10-09 21:56:44 -0700546 FALLTHROUGH_INTENDED;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700547 case kOpSub:
548 if (all_low_regs && ((abs_value & 0x7) == abs_value)) {
549 if (op == kOpAdd)
550 opcode = (neg) ? kThumbSubRRI3 : kThumbAddRRI3;
551 else
552 opcode = (neg) ? kThumbAddRRI3 : kThumbSubRRI3;
buzbee2700f7e2014-03-07 09:46:20 -0800553 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700554 }
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000555 if (mod_imm < 0) {
556 mod_imm = ModifiedImmediate(-value);
557 if (mod_imm >= 0) {
558 op = (op == kOpAdd) ? kOpSub : kOpAdd;
559 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700560 }
Vladimir Markoa29f6982014-11-25 16:32:34 +0000561 if (mod_imm < 0 && (abs_value >> 12) == 0) {
Vladimir Markodbb8c492014-02-28 17:36:39 +0000562 // This is deliberately used only if modified immediate encoding is inadequate since
563 // we sometimes actually use the flags for small values but not necessarily low regs.
564 if (op == kOpAdd)
565 opcode = (neg) ? kThumb2SubRRI12 : kThumb2AddRRI12;
566 else
567 opcode = (neg) ? kThumb2AddRRI12 : kThumb2SubRRI12;
buzbee2700f7e2014-03-07 09:46:20 -0800568 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), abs_value);
Vladimir Markodbb8c492014-02-28 17:36:39 +0000569 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 if (op == kOpSub) {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000571 opcode = kThumb2SubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700572 alt_opcode = kThumb2SubRRR;
573 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000574 opcode = kThumb2AddRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700575 alt_opcode = kThumb2AddRRR;
576 }
577 break;
578 case kOpRsub:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000579 opcode = kThumb2RsubRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 alt_opcode = kThumb2RsubRRR;
581 break;
582 case kOpAdc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000583 opcode = kThumb2AdcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700584 alt_opcode = kThumb2AdcRRR;
585 break;
586 case kOpSbc:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000587 opcode = kThumb2SbcRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700588 alt_opcode = kThumb2SbcRRR;
589 break;
590 case kOpOr:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000591 opcode = kThumb2OrrRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700592 alt_opcode = kThumb2OrrRRR;
Vladimir Markoa29f6982014-11-25 16:32:34 +0000593 if (mod_imm < 0) {
594 mod_imm = ModifiedImmediate(~value);
595 if (mod_imm >= 0) {
596 opcode = kThumb2OrnRRI8M;
597 }
598 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700599 break;
600 case kOpAnd:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000601 if (mod_imm < 0) {
602 mod_imm = ModifiedImmediate(~value);
603 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800604 return NewLIR3(kThumb2BicRRI8M, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000605 }
606 }
607 opcode = kThumb2AndRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700608 alt_opcode = kThumb2AndRRR;
609 break;
610 case kOpXor:
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000611 opcode = kThumb2EorRRI8M;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612 alt_opcode = kThumb2EorRRR;
613 break;
614 case kOpMul:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700615 // TUNING: power of 2, shift & add
Brian Carlstrom7940e442013-07-12 13:46:57 -0700616 mod_imm = -1;
617 alt_opcode = kThumb2MulRRR;
618 break;
619 case kOpCmp: {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700620 LIR* res;
621 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800622 res = NewLIR2(kThumb2CmpRI8M, r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700623 } else {
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000624 mod_imm = ModifiedImmediate(-value);
625 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800626 res = NewLIR2(kThumb2CmnRI8M, r_src1.GetReg(), mod_imm);
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000627 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800628 RegStorage r_tmp = AllocTemp();
Vladimir Marko332b7aa2013-11-18 12:01:54 +0000629 res = LoadConstant(r_tmp, value);
630 OpRegReg(kOpCmp, r_src1, r_tmp);
631 FreeTemp(r_tmp);
632 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633 }
634 return res;
635 }
636 default:
637 LOG(FATAL) << "Bad opcode: " << op;
638 }
639
640 if (mod_imm >= 0) {
buzbee2700f7e2014-03-07 09:46:20 -0800641 return NewLIR3(opcode, r_dest.GetReg(), r_src1.GetReg(), mod_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700642 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800643 RegStorage r_scratch = AllocTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700644 LoadConstant(r_scratch, value);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800645 LIR* res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700646 if (EncodingMap[alt_opcode].flags & IS_QUAD_OP)
buzbee2700f7e2014-03-07 09:46:20 -0800647 res = NewLIR4(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700648 else
buzbee2700f7e2014-03-07 09:46:20 -0800649 res = NewLIR3(alt_opcode, r_dest.GetReg(), r_src1.GetReg(), r_scratch.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700650 FreeTemp(r_scratch);
651 return res;
652 }
653}
654
655/* Handle Thumb-only variants here - otherwise punt to OpRegRegImm */
buzbee2700f7e2014-03-07 09:46:20 -0800656LIR* ArmMir2Lir::OpRegImm(OpKind op, RegStorage r_dest_src1, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 bool neg = (value < 0);
buzbee0d829482013-10-11 15:24:55 -0700658 int32_t abs_value = (neg) ? -value : value;
buzbee091cc402014-03-31 10:14:40 -0700659 bool short_form = (((abs_value & 0xff) == abs_value) && r_dest_src1.Low8());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700660 ArmOpcode opcode = kThumbBkpt;
661 switch (op) {
662 case kOpAdd:
buzbee2700f7e2014-03-07 09:46:20 -0800663 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700664 DCHECK_EQ((value & 0x3), 0);
665 return NewLIR1(kThumbAddSpI7, value >> 2);
666 } else if (short_form) {
667 opcode = (neg) ? kThumbSubRI8 : kThumbAddRI8;
668 }
669 break;
670 case kOpSub:
buzbee2700f7e2014-03-07 09:46:20 -0800671 if (!neg && (r_dest_src1 == rs_r13sp) && (value <= 508)) { /* sp */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700672 DCHECK_EQ((value & 0x3), 0);
673 return NewLIR1(kThumbSubSpI7, value >> 2);
674 } else if (short_form) {
675 opcode = (neg) ? kThumbAddRI8 : kThumbSubRI8;
676 }
677 break;
678 case kOpCmp:
Vladimir Marko22479842013-11-19 17:04:50 +0000679 if (!neg && short_form) {
680 opcode = kThumbCmpRI8;
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700681 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700682 short_form = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700683 }
684 break;
685 default:
686 /* Punt to OpRegRegImm - if bad case catch it there */
687 short_form = false;
688 break;
689 }
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700690 if (short_form) {
buzbee2700f7e2014-03-07 09:46:20 -0800691 return NewLIR2(opcode, r_dest_src1.GetReg(), abs_value);
Brian Carlstrom9b7085a2013-07-18 15:15:21 -0700692 } else {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 return OpRegRegImm(op, r_dest_src1, r_dest_src1, value);
694 }
695}
696
buzbee2700f7e2014-03-07 09:46:20 -0800697LIR* ArmMir2Lir::LoadConstantWide(RegStorage r_dest, int64_t value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700698 LIR* res = NULL;
699 int32_t val_lo = Low32Bits(value);
700 int32_t val_hi = High32Bits(value);
buzbee091cc402014-03-31 10:14:40 -0700701 if (r_dest.IsFloat()) {
702 DCHECK(!r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700703 if ((val_lo == 0) && (val_hi == 0)) {
704 // TODO: we need better info about the target CPU. a vector exclusive or
705 // would probably be better here if we could rely on its existance.
706 // Load an immediate +2.0 (which encodes to 0)
buzbee091cc402014-03-31 10:14:40 -0700707 NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700708 // +0.0 = +2.0 - +2.0
buzbee091cc402014-03-31 10:14:40 -0700709 res = NewLIR3(kThumb2Vsubd, r_dest.GetReg(), r_dest.GetReg(), r_dest.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700710 } else {
711 int encoded_imm = EncodeImmDouble(value);
712 if (encoded_imm >= 0) {
buzbee091cc402014-03-31 10:14:40 -0700713 res = NewLIR2(kThumb2Vmovd_IMM8, r_dest.GetReg(), encoded_imm);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700714 }
715 }
716 } else {
buzbee091cc402014-03-31 10:14:40 -0700717 // NOTE: Arm32 assumption here.
718 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 if ((InexpensiveConstantInt(val_lo) && (InexpensiveConstantInt(val_hi)))) {
buzbee2700f7e2014-03-07 09:46:20 -0800720 res = LoadConstantNoClobber(r_dest.GetLow(), val_lo);
721 LoadConstantNoClobber(r_dest.GetHigh(), val_hi);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 }
723 }
724 if (res == NULL) {
725 // No short form - load from the literal pool.
726 LIR* data_target = ScanLiteralPoolWide(literal_list_, val_lo, val_hi);
727 if (data_target == NULL) {
728 data_target = AddWideData(&literal_list_, val_lo, val_hi);
729 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100730 ScopedMemRefType mem_ref_type(this, ResourceMask::kLiteral);
buzbee091cc402014-03-31 10:14:40 -0700731 if (r_dest.IsFloat()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700732 res = RawLIR(current_dalvik_offset_, kThumb2Vldrd,
buzbee091cc402014-03-31 10:14:40 -0700733 r_dest.GetReg(), rs_r15pc.GetReg(), 0, 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700734 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800735 DCHECK(r_dest.IsPair());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700736 res = RawLIR(current_dalvik_offset_, kThumb2LdrdPcRel8,
buzbee091cc402014-03-31 10:14:40 -0700737 r_dest.GetLowReg(), r_dest.GetHighReg(), rs_r15pc.GetReg(), 0, 0, data_target);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700738 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700739 AppendLIR(res);
740 }
741 return res;
742}
743
744int ArmMir2Lir::EncodeShift(int code, int amount) {
745 return ((amount & 0x1f) << 2) | code;
746}
747
buzbee2700f7e2014-03-07 09:46:20 -0800748LIR* ArmMir2Lir::LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700749 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700750 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_dest.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700751 LIR* load;
752 ArmOpcode opcode = kThumbBkpt;
753 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800754 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700755
buzbee091cc402014-03-31 10:14:40 -0700756 if (r_dest.IsFloat()) {
757 if (r_dest.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700758 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700759 opcode = kThumb2Vldrs;
760 size = kSingle;
761 } else {
buzbee091cc402014-03-31 10:14:40 -0700762 DCHECK(r_dest.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700763 DCHECK((size == k64) || (size == kDouble));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700764 opcode = kThumb2Vldrd;
765 size = kDouble;
766 }
767 } else {
768 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700769 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700770 }
771
772 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700773 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700774 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 case kSingle:
776 reg_ptr = AllocTemp();
777 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800778 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779 EncodeShift(kArmLsl, scale));
780 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800781 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700782 }
buzbee2700f7e2014-03-07 09:46:20 -0800783 load = NewLIR3(opcode, r_dest.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 FreeTemp(reg_ptr);
785 return load;
buzbee695d13a2014-04-19 13:32:20 -0700786 case k32:
787 // Intentional fall-though.
788 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789 opcode = (thumb_form) ? kThumbLdrRRR : kThumb2LdrRRR;
790 break;
791 case kUnsignedHalf:
792 opcode = (thumb_form) ? kThumbLdrhRRR : kThumb2LdrhRRR;
793 break;
794 case kSignedHalf:
795 opcode = (thumb_form) ? kThumbLdrshRRR : kThumb2LdrshRRR;
796 break;
797 case kUnsignedByte:
798 opcode = (thumb_form) ? kThumbLdrbRRR : kThumb2LdrbRRR;
799 break;
800 case kSignedByte:
801 opcode = (thumb_form) ? kThumbLdrsbRRR : kThumb2LdrsbRRR;
802 break;
803 default:
804 LOG(FATAL) << "Bad size: " << size;
805 }
806 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800807 load = NewLIR3(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700808 else
buzbee2700f7e2014-03-07 09:46:20 -0800809 load = NewLIR4(opcode, r_dest.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700810
811 return load;
812}
813
buzbee2700f7e2014-03-07 09:46:20 -0800814LIR* ArmMir2Lir::StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700815 int scale, OpSize size) {
buzbee091cc402014-03-31 10:14:40 -0700816 bool all_low_regs = r_base.Low8() && r_index.Low8() && r_src.Low8();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700817 LIR* store = NULL;
818 ArmOpcode opcode = kThumbBkpt;
819 bool thumb_form = (all_low_regs && (scale == 0));
buzbee2700f7e2014-03-07 09:46:20 -0800820 RegStorage reg_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700821
buzbee091cc402014-03-31 10:14:40 -0700822 if (r_src.IsFloat()) {
823 if (r_src.IsSingle()) {
buzbeefd698e62014-04-27 19:33:22 -0700824 DCHECK((size == k32) || (size == kSingle) || (size == kReference));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700825 opcode = kThumb2Vstrs;
826 size = kSingle;
827 } else {
buzbee091cc402014-03-31 10:14:40 -0700828 DCHECK(r_src.IsDouble());
buzbee695d13a2014-04-19 13:32:20 -0700829 DCHECK((size == k64) || (size == kDouble));
buzbee2700f7e2014-03-07 09:46:20 -0800830 DCHECK_EQ((r_src.GetReg() & 0x1), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700831 opcode = kThumb2Vstrd;
832 size = kDouble;
833 }
834 } else {
835 if (size == kSingle)
buzbee695d13a2014-04-19 13:32:20 -0700836 size = k32;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 }
838
839 switch (size) {
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700840 case kDouble: // fall-through
buzbee695d13a2014-04-19 13:32:20 -0700841 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 case kSingle:
843 reg_ptr = AllocTemp();
844 if (scale) {
buzbee2700f7e2014-03-07 09:46:20 -0800845 NewLIR4(kThumb2AddRRR, reg_ptr.GetReg(), r_base.GetReg(), r_index.GetReg(),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700846 EncodeShift(kArmLsl, scale));
847 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800848 OpRegRegReg(kOpAdd, reg_ptr, r_base, r_index);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849 }
buzbee2700f7e2014-03-07 09:46:20 -0800850 store = NewLIR3(opcode, r_src.GetReg(), reg_ptr.GetReg(), 0);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700851 FreeTemp(reg_ptr);
852 return store;
buzbee695d13a2014-04-19 13:32:20 -0700853 case k32:
854 // Intentional fall-though.
855 case kReference:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856 opcode = (thumb_form) ? kThumbStrRRR : kThumb2StrRRR;
857 break;
858 case kUnsignedHalf:
buzbee695d13a2014-04-19 13:32:20 -0700859 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700860 case kSignedHalf:
861 opcode = (thumb_form) ? kThumbStrhRRR : kThumb2StrhRRR;
862 break;
863 case kUnsignedByte:
buzbee695d13a2014-04-19 13:32:20 -0700864 // Intentional fall-though.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700865 case kSignedByte:
866 opcode = (thumb_form) ? kThumbStrbRRR : kThumb2StrbRRR;
867 break;
868 default:
869 LOG(FATAL) << "Bad size: " << size;
870 }
871 if (thumb_form)
buzbee2700f7e2014-03-07 09:46:20 -0800872 store = NewLIR3(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700873 else
buzbee2700f7e2014-03-07 09:46:20 -0800874 store = NewLIR4(opcode, r_src.GetReg(), r_base.GetReg(), r_index.GetReg(), scale);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875
876 return store;
877}
878
Vladimir Markodb9d5232014-06-10 18:15:57 +0100879// Helper function for LoadBaseDispBody()/StoreBaseDispBody().
Vladimir Marko37573972014-06-16 10:32:25 +0100880LIR* ArmMir2Lir::LoadStoreUsingInsnWithOffsetImm8Shl2(ArmOpcode opcode, RegStorage r_base,
881 int displacement, RegStorage r_src_dest,
882 RegStorage r_work) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100883 DCHECK_EQ(displacement & 3, 0);
Vladimir Marko37573972014-06-16 10:32:25 +0100884 constexpr int kOffsetMask = 0xff << 2;
885 int encoded_disp = (displacement & kOffsetMask) >> 2; // Within range of the instruction.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100886 RegStorage r_ptr = r_base;
Vladimir Marko37573972014-06-16 10:32:25 +0100887 if ((displacement & ~kOffsetMask) != 0) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100888 r_ptr = r_work.Valid() ? r_work : AllocTemp();
Vladimir Marko37573972014-06-16 10:32:25 +0100889 // Add displacement & ~kOffsetMask to base, it's a single instruction for up to +-256KiB.
890 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement & ~kOffsetMask);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100891 }
892 LIR* lir = nullptr;
893 if (!r_src_dest.IsPair()) {
894 lir = NewLIR3(opcode, r_src_dest.GetReg(), r_ptr.GetReg(), encoded_disp);
895 } else {
896 lir = NewLIR4(opcode, r_src_dest.GetLowReg(), r_src_dest.GetHighReg(), r_ptr.GetReg(),
897 encoded_disp);
898 }
Vladimir Marko37573972014-06-16 10:32:25 +0100899 if ((displacement & ~kOffsetMask) != 0 && !r_work.Valid()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100900 FreeTemp(r_ptr);
901 }
902 return lir;
903}
904
Brian Carlstrom7940e442013-07-12 13:46:57 -0700905/*
906 * Load value from base + displacement. Optionally perform null check
907 * on base (which must have an associated s_reg and MIR). If not
908 * performing null check, incoming MIR can be null.
909 */
buzbee2700f7e2014-03-07 09:46:20 -0800910LIR* ArmMir2Lir::LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest,
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100911 OpSize size) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000912 LIR* load = nullptr;
913 ArmOpcode opcode16 = kThumbBkpt; // 16-bit Thumb opcode.
914 ArmOpcode opcode32 = kThumbBkpt; // 32-bit Thumb2 opcode.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700915 bool short_form = false;
buzbee091cc402014-03-31 10:14:40 -0700916 bool all_low = r_dest.Is32Bit() && r_base.Low8() && r_dest.Low8();
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000917 int scale = 0; // Used for opcode16 and some indexed loads.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 bool already_generated = false;
919 switch (size) {
920 case kDouble:
buzbee695d13a2014-04-19 13:32:20 -0700921 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +0100922 case k64:
buzbee091cc402014-03-31 10:14:40 -0700923 if (r_dest.IsFloat()) {
924 DCHECK(!r_dest.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +0100925 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrd, r_base, displacement, r_dest);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700926 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100927 DCHECK(r_dest.IsPair());
928 // Use the r_dest.GetLow() for the temporary pointer if needed.
Vladimir Marko37573972014-06-16 10:32:25 +0100929 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2LdrdI8, r_base, displacement, r_dest,
930 r_dest.GetLow());
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100931 }
932 already_generated = true;
buzbee2700f7e2014-03-07 09:46:20 -0800933 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700934 case kSingle:
buzbee695d13a2014-04-19 13:32:20 -0700935 // Intentional fall-though.
936 case k32:
937 // Intentional fall-though.
938 case kReference:
buzbee091cc402014-03-31 10:14:40 -0700939 if (r_dest.IsFloat()) {
Vladimir Markodb9d5232014-06-10 18:15:57 +0100940 DCHECK(r_dest.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +0100941 load = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vldrs, r_base, displacement, r_dest);
Vladimir Markodb9d5232014-06-10 18:15:57 +0100942 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700943 break;
944 }
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000945 DCHECK_EQ((displacement & 0x3), 0);
946 scale = 2;
buzbee091cc402014-03-31 10:14:40 -0700947 if (r_dest.Low8() && (r_base == rs_rARM_PC) && (displacement <= 1020) &&
948 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700949 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000950 opcode16 = kThumbLdrPcRel;
buzbee091cc402014-03-31 10:14:40 -0700951 } else if (r_dest.Low8() && (r_base == rs_rARM_SP) && (displacement <= 1020) &&
952 (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700953 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000954 opcode16 = kThumbLdrSpRel;
955 } else {
956 short_form = all_low && (displacement >> (5 + scale)) == 0;
957 opcode16 = kThumbLdrRRI5;
958 opcode32 = kThumb2LdrRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700959 }
960 break;
961 case kUnsignedHalf:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000962 DCHECK_EQ((displacement & 0x1), 0);
963 scale = 1;
964 short_form = all_low && (displacement >> (5 + scale)) == 0;
965 opcode16 = kThumbLdrhRRI5;
966 opcode32 = kThumb2LdrhRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700967 break;
968 case kSignedHalf:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000969 DCHECK_EQ((displacement & 0x1), 0);
970 scale = 1;
971 DCHECK_EQ(opcode16, kThumbBkpt); // Not available.
972 opcode32 = kThumb2LdrshRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700973 break;
974 case kUnsignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000975 DCHECK_EQ(scale, 0); // Keep scale = 0.
976 short_form = all_low && (displacement >> (5 + scale)) == 0;
977 opcode16 = kThumbLdrbRRI5;
978 opcode32 = kThumb2LdrbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979 break;
980 case kSignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000981 DCHECK_EQ(scale, 0); // Keep scale = 0.
982 DCHECK_EQ(opcode16, kThumbBkpt); // Not available.
983 opcode32 = kThumb2LdrsbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700984 break;
985 default:
986 LOG(FATAL) << "Bad size: " << size;
987 }
988
989 if (!already_generated) {
990 if (short_form) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +0000991 load = NewLIR3(opcode16, r_dest.GetReg(), r_base.GetReg(), displacement >> scale);
992 } else if ((displacement >> 12) == 0) { // Thumb2 form.
993 load = NewLIR3(opcode32, r_dest.GetReg(), r_base.GetReg(), displacement);
994 } else if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) &&
995 InexpensiveConstantInt(displacement & ~0x00000fff, Instruction::ADD_INT)) {
996 // In this case, using LoadIndexed would emit 3 insns (movw+movt+ldr) but we can
997 // actually do it in two because we know that the kOpAdd is a single insn. On the
998 // other hand, we introduce an extra dependency, so this is not necessarily faster.
999 if (opcode16 != kThumbBkpt && r_dest.Low8() &&
1000 InexpensiveConstantInt(displacement & ~(0x1f << scale), Instruction::ADD_INT)) {
1001 // We can use the 16-bit Thumb opcode for the load.
1002 OpRegRegImm(kOpAdd, r_dest, r_base, displacement & ~(0x1f << scale));
1003 load = NewLIR3(opcode16, r_dest.GetReg(), r_dest.GetReg(), (displacement >> scale) & 0x1f);
1004 } else {
1005 DCHECK_NE(opcode32, kThumbBkpt);
1006 OpRegRegImm(kOpAdd, r_dest, r_base, displacement & ~0x00000fff);
1007 load = NewLIR3(opcode32, r_dest.GetReg(), r_dest.GetReg(), displacement & 0x00000fff);
1008 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001009 } else {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001010 if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) ||
1011 (scale != 0 && InexpensiveConstantInt(displacement, Instruction::CONST))) {
1012 scale = 0; // Prefer unscaled indexing if the same number of insns.
1013 }
buzbee2700f7e2014-03-07 09:46:20 -08001014 RegStorage reg_offset = AllocTemp();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001015 LoadConstant(reg_offset, displacement >> scale);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001016 DCHECK(!r_dest.IsFloat());
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001017 load = LoadBaseIndexed(r_base, reg_offset, r_dest, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001018 FreeTemp(reg_offset);
1019 }
1020 }
1021
1022 // TODO: in future may need to differentiate Dalvik accesses w/ spills
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001023 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001024 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001025 AnnotateDalvikRegAccess(load, displacement >> 2, true /* is_load */, r_dest.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001026 }
1027 return load;
1028}
1029
Vladimir Marko674744e2014-04-24 15:18:26 +01001030LIR* ArmMir2Lir::LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001031 OpSize size, VolatileKind is_volatile) {
buzbee695d13a2014-04-19 13:32:20 -07001032 // TODO: base this on target.
1033 if (size == kWord) {
1034 size = k32;
1035 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001036 LIR* load;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001037 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1038 !cu_->compiler_driver->GetInstructionSetFeatures()->
Ian Rogersd582fa42014-11-05 23:46:43 -08001039 AsArmInstructionSetFeatures()->HasAtomicLdrdAndStrd()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001040 // Only 64-bit load needs special handling.
1041 // If the cpu supports LPAE, aligned LDRD is atomic - fall through to LoadBaseDisp().
1042 DCHECK(!r_dest.IsFloat()); // See RegClassForFieldLoadSave().
1043 // Use LDREXD for the atomic load. (Expect displacement > 0, don't optimize for == 0.)
1044 RegStorage r_ptr = AllocTemp();
1045 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
Vladimir Markoee5e2732015-01-13 17:34:28 +00001046 load = NewLIR3(kThumb2Ldrexd, r_dest.GetLowReg(), r_dest.GetHighReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001047 FreeTemp(r_ptr);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001048 } else {
1049 load = LoadBaseDispBody(r_base, displacement, r_dest, size);
1050 }
1051
1052 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001053 GenMemBarrier(kLoadAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001054 }
1055
1056 return load;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057}
1058
Brian Carlstrom7940e442013-07-12 13:46:57 -07001059
buzbee2700f7e2014-03-07 09:46:20 -08001060LIR* ArmMir2Lir::StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src,
1061 OpSize size) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001062 LIR* store = nullptr;
1063 ArmOpcode opcode16 = kThumbBkpt; // 16-bit Thumb opcode.
1064 ArmOpcode opcode32 = kThumbBkpt; // 32-bit Thumb2 opcode.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001065 bool short_form = false;
buzbee091cc402014-03-31 10:14:40 -07001066 bool all_low = r_src.Is32Bit() && r_base.Low8() && r_src.Low8();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001067 int scale = 0; // Used for opcode16 and some indexed loads.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001068 bool already_generated = false;
1069 switch (size) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001070 case kDouble:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001071 // Intentional fall-though.
Vladimir Markodb9d5232014-06-10 18:15:57 +01001072 case k64:
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001073 if (r_src.IsFloat()) {
Zheng Xu5667fdb2014-10-23 18:29:55 +08001074 // Note: If the register is retrieved by register allocator, it should never be a pair.
1075 // But some functions in mir2lir assume 64-bit registers are 32-bit register pairs.
1076 // TODO: Rework Mir2Lir::LoadArg() and Mir2Lir::LoadArgDirect().
1077 if (r_src.IsPair()) {
1078 r_src = As64BitFloatReg(r_src);
1079 }
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001080 DCHECK(!r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001081 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrd, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001082 } else {
Vladimir Markodb9d5232014-06-10 18:15:57 +01001083 DCHECK(r_src.IsPair());
Vladimir Marko37573972014-06-16 10:32:25 +01001084 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2StrdI8, r_base, displacement, r_src);
Vladimir Marko3bf7c602014-05-07 14:55:43 +01001085 }
1086 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087 break;
1088 case kSingle:
buzbee091cc402014-03-31 10:14:40 -07001089 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001090 case k32:
buzbee091cc402014-03-31 10:14:40 -07001091 // Intentional fall-through.
buzbee695d13a2014-04-19 13:32:20 -07001092 case kReference:
buzbee091cc402014-03-31 10:14:40 -07001093 if (r_src.IsFloat()) {
1094 DCHECK(r_src.IsSingle());
Vladimir Marko37573972014-06-16 10:32:25 +01001095 store = LoadStoreUsingInsnWithOffsetImm8Shl2(kThumb2Vstrs, r_base, displacement, r_src);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001096 already_generated = true;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001097 break;
1098 }
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001099 DCHECK_EQ((displacement & 0x3), 0);
1100 scale = 2;
buzbee091cc402014-03-31 10:14:40 -07001101 if (r_src.Low8() && (r_base == rs_r13sp) && (displacement <= 1020) && (displacement >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001102 short_form = true;
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001103 opcode16 = kThumbStrSpRel;
1104 } else {
1105 short_form = all_low && (displacement >> (5 + scale)) == 0;
1106 opcode16 = kThumbStrRRI5;
1107 opcode32 = kThumb2StrRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001108 }
1109 break;
1110 case kUnsignedHalf:
1111 case kSignedHalf:
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001112 DCHECK_EQ((displacement & 0x1), 0);
1113 scale = 1;
1114 short_form = all_low && (displacement >> (5 + scale)) == 0;
1115 opcode16 = kThumbStrhRRI5;
1116 opcode32 = kThumb2StrhRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001117 break;
1118 case kUnsignedByte:
1119 case kSignedByte:
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001120 DCHECK_EQ(scale, 0); // Keep scale = 0.
1121 short_form = all_low && (displacement >> (5 + scale)) == 0;
1122 opcode16 = kThumbStrbRRI5;
1123 opcode32 = kThumb2StrbRRI12;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001124 break;
1125 default:
1126 LOG(FATAL) << "Bad size: " << size;
1127 }
1128 if (!already_generated) {
1129 if (short_form) {
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001130 store = NewLIR3(opcode16, r_src.GetReg(), r_base.GetReg(), displacement >> scale);
1131 } else if ((displacement >> 12) == 0) {
1132 store = NewLIR3(opcode32, r_src.GetReg(), r_base.GetReg(), displacement);
1133 } else if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) &&
1134 InexpensiveConstantInt(displacement & ~0x00000fff, Instruction::ADD_INT)) {
1135 // In this case, using StoreIndexed would emit 3 insns (movw+movt+str) but we can
1136 // actually do it in two because we know that the kOpAdd is a single insn. On the
1137 // other hand, we introduce an extra dependency, so this is not necessarily faster.
buzbee2700f7e2014-03-07 09:46:20 -08001138 RegStorage r_scratch = AllocTemp();
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001139 if (opcode16 != kThumbBkpt && r_src.Low8() && r_scratch.Low8() &&
1140 InexpensiveConstantInt(displacement & ~(0x1f << scale), Instruction::ADD_INT)) {
1141 // We can use the 16-bit Thumb opcode for the load.
1142 OpRegRegImm(kOpAdd, r_scratch, r_base, displacement & ~(0x1f << scale));
1143 store = NewLIR3(opcode16, r_src.GetReg(), r_scratch.GetReg(),
1144 (displacement >> scale) & 0x1f);
1145 } else {
1146 DCHECK_NE(opcode32, kThumbBkpt);
1147 OpRegRegImm(kOpAdd, r_scratch, r_base, displacement & ~0x00000fff);
1148 store = NewLIR3(opcode32, r_src.GetReg(), r_scratch.GetReg(), displacement & 0x00000fff);
1149 }
1150 FreeTemp(r_scratch);
1151 } else {
1152 if (!InexpensiveConstantInt(displacement >> scale, Instruction::CONST) ||
1153 (scale != 0 && InexpensiveConstantInt(displacement, Instruction::CONST))) {
1154 scale = 0; // Prefer unscaled indexing if the same number of insns.
1155 }
1156 RegStorage r_scratch = AllocTemp();
1157 LoadConstant(r_scratch, displacement >> scale);
Vladimir Markodb9d5232014-06-10 18:15:57 +01001158 DCHECK(!r_src.IsFloat());
Vladimir Markoaed3ad72014-12-03 12:16:56 +00001159 store = StoreBaseIndexed(r_base, r_scratch, r_src, scale, size);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001160 FreeTemp(r_scratch);
1161 }
1162 }
1163
1164 // TODO: In future, may need to differentiate Dalvik & spill accesses
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001165 if (mem_ref_type_ == ResourceMask::kDalvikReg) {
Ian Rogersb28c1c02014-11-08 11:21:21 -08001166 DCHECK_EQ(r_base, rs_rARM_SP);
buzbee2700f7e2014-03-07 09:46:20 -08001167 AnnotateDalvikRegAccess(store, displacement >> 2, false /* is_load */, r_src.Is64Bit());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001168 }
1169 return store;
1170}
1171
Andreas Gampede686762014-06-24 18:42:06 +00001172LIR* ArmMir2Lir::StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
Andreas Gampe3c12c512014-06-24 18:46:29 +00001173 OpSize size, VolatileKind is_volatile) {
1174 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001175 // Ensure that prior accesses become visible to other threads first.
1176 GenMemBarrier(kAnyStore);
Andreas Gampe2689fba2014-06-23 13:23:04 -07001177 }
Andreas Gampe3c12c512014-06-24 18:46:29 +00001178
Vladimir Markoee5e2732015-01-13 17:34:28 +00001179 LIR* null_ck_insn;
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001180 if (is_volatile == kVolatile && (size == k64 || size == kDouble) &&
1181 !cu_->compiler_driver->GetInstructionSetFeatures()->
Ian Rogersd582fa42014-11-05 23:46:43 -08001182 AsArmInstructionSetFeatures()->HasAtomicLdrdAndStrd()) {
Andreas Gampe3c12c512014-06-24 18:46:29 +00001183 // Only 64-bit store needs special handling.
1184 // If the cpu supports LPAE, aligned STRD is atomic - fall through to StoreBaseDisp().
1185 // Use STREXD for the atomic store. (Expect displacement > 0, don't optimize for == 0.)
1186 DCHECK(!r_src.IsFloat()); // See RegClassForFieldLoadSave().
1187 RegStorage r_ptr = AllocTemp();
1188 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1189 LIR* fail_target = NewLIR0(kPseudoTargetLabel);
1190 // We have only 5 temporary registers available and if r_base, r_src and r_ptr already
1191 // take 4, we can't directly allocate 2 more for LDREXD temps. In that case clobber r_ptr
1192 // in LDREXD and recalculate it from r_base.
1193 RegStorage r_temp = AllocTemp();
Serguei Katkov9ee45192014-07-17 14:39:03 +07001194 RegStorage r_temp_high = AllocTemp(false); // We may not have another temp.
Andreas Gampe3c12c512014-06-24 18:46:29 +00001195 if (r_temp_high.Valid()) {
Vladimir Markoee5e2732015-01-13 17:34:28 +00001196 null_ck_insn = NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_temp_high.GetReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001197 FreeTemp(r_temp_high);
1198 FreeTemp(r_temp);
1199 } else {
1200 // If we don't have another temp, clobber r_ptr in LDREXD and reload it.
Vladimir Markoee5e2732015-01-13 17:34:28 +00001201 null_ck_insn = NewLIR3(kThumb2Ldrexd, r_temp.GetReg(), r_ptr.GetReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001202 FreeTemp(r_temp); // May need the temp for kOpAdd.
1203 OpRegRegImm(kOpAdd, r_ptr, r_base, displacement);
1204 }
Vladimir Markoee5e2732015-01-13 17:34:28 +00001205 NewLIR4(kThumb2Strexd, r_temp.GetReg(), r_src.GetLowReg(), r_src.GetHighReg(), r_ptr.GetReg());
Andreas Gampe3c12c512014-06-24 18:46:29 +00001206 OpCmpImmBranch(kCondNe, r_temp, 0, fail_target);
1207 FreeTemp(r_ptr);
1208 } else {
1209 // TODO: base this on target.
1210 if (size == kWord) {
1211 size = k32;
1212 }
1213
Vladimir Markoee5e2732015-01-13 17:34:28 +00001214 null_ck_insn = StoreBaseDispBody(r_base, displacement, r_src, size);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001215 }
1216
1217 if (UNLIKELY(is_volatile == kVolatile)) {
Hans Boehm48f5c472014-06-27 14:50:10 -07001218 // Preserve order with respect to any subsequent volatile loads.
1219 // We need StoreLoad, but that generally requires the most expensive barrier.
1220 GenMemBarrier(kAnyAny);
Andreas Gampe3c12c512014-06-24 18:46:29 +00001221 }
1222
Vladimir Markoee5e2732015-01-13 17:34:28 +00001223 return null_ck_insn;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001224}
1225
buzbee2700f7e2014-03-07 09:46:20 -08001226LIR* ArmMir2Lir::OpFpRegCopy(RegStorage r_dest, RegStorage r_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001227 int opcode;
buzbee091cc402014-03-31 10:14:40 -07001228 DCHECK_EQ(r_dest.IsDouble(), r_src.IsDouble());
1229 if (r_dest.IsDouble()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001230 opcode = kThumb2Vmovd;
1231 } else {
buzbee091cc402014-03-31 10:14:40 -07001232 if (r_dest.IsSingle()) {
1233 opcode = r_src.IsSingle() ? kThumb2Vmovs : kThumb2Fmsr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001234 } else {
buzbee091cc402014-03-31 10:14:40 -07001235 DCHECK(r_src.IsSingle());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001236 opcode = kThumb2Fmrs;
1237 }
1238 }
buzbee2700f7e2014-03-07 09:46:20 -08001239 LIR* res = RawLIR(current_dalvik_offset_, opcode, r_dest.GetReg(), r_src.GetReg());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001240 if (!(cu_->disable_opt & (1 << kSafeOptimizations)) && r_dest == r_src) {
1241 res->flags.is_nop = true;
1242 }
1243 return res;
1244}
1245
buzbee2700f7e2014-03-07 09:46:20 -08001246LIR* ArmMir2Lir::OpMem(OpKind op, RegStorage r_base, int disp) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001247 UNUSED(op, r_base, disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001248 LOG(FATAL) << "Unexpected use of OpMem for Arm";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001249 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001250}
1251
Andreas Gampe98430592014-07-27 19:44:50 -07001252LIR* ArmMir2Lir::InvokeTrampoline(OpKind op, RegStorage r_tgt, QuickEntrypointEnum trampoline) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001253 UNUSED(trampoline); // The address of the trampoline is already loaded into r_tgt.
Andreas Gampe98430592014-07-27 19:44:50 -07001254 return OpReg(op, r_tgt);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001255}
1256
Serban Constantinescu63999682014-07-15 17:44:21 +01001257size_t ArmMir2Lir::GetInstructionOffset(LIR* lir) {
1258 uint64_t check_flags = GetTargetInstFlags(lir->opcode);
1259 DCHECK((check_flags & IS_LOAD) || (check_flags & IS_STORE));
1260 size_t offset = (check_flags & IS_TERTIARY_OP) ? lir->operands[2] : 0;
1261
1262 if (check_flags & SCALED_OFFSET_X2) {
1263 offset = offset * 2;
1264 } else if (check_flags & SCALED_OFFSET_X4) {
1265 offset = offset * 4;
1266 }
1267 return offset;
1268}
1269
Vladimir Markocc234812015-04-07 09:36:09 +01001270void ArmMir2Lir::CountRefs(RefCounts* core_counts, RefCounts* fp_counts, size_t num_regs) {
1271 // Start with the default counts.
1272 Mir2Lir::CountRefs(core_counts, fp_counts, num_regs);
1273
1274 if (pc_rel_temp_ != nullptr) {
1275 // Now, if the dex cache array base temp is used only once outside any loops (weight = 1),
1276 // avoid the promotion, otherwise boost the weight by factor 4 because the full PC-relative
1277 // load sequence is 4 instructions long.
1278 int p_map_idx = SRegToPMap(pc_rel_temp_->s_reg_low);
1279 if (core_counts[p_map_idx].count == 1) {
1280 core_counts[p_map_idx].count = 0;
1281 } else {
1282 core_counts[p_map_idx].count *= 4;
1283 }
1284 }
1285}
1286
1287void ArmMir2Lir::DoPromotion() {
1288 if (CanUseOpPcRelDexCacheArrayLoad()) {
1289 pc_rel_temp_ = mir_graph_->GetNewCompilerTemp(kCompilerTempBackend, false);
1290 }
1291
1292 Mir2Lir::DoPromotion();
1293
1294 if (pc_rel_temp_ != nullptr) {
1295 // Now, if the dex cache array base temp is promoted, remember the register but
1296 // always remove the temp's stack location to avoid unnecessarily bloating the stack.
1297 dex_cache_arrays_base_reg_ = mir_graph_->reg_location_[pc_rel_temp_->s_reg_low].reg;
1298 DCHECK(!dex_cache_arrays_base_reg_.Valid() || !dex_cache_arrays_base_reg_.IsFloat());
1299 mir_graph_->RemoveLastCompilerTemp(kCompilerTempBackend, false, pc_rel_temp_);
1300 pc_rel_temp_ = nullptr;
1301 }
1302}
1303
Brian Carlstrom7940e442013-07-12 13:46:57 -07001304} // namespace art