blob: df56820c4cfbb574154bf9620d38a321dc639477 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex/dataflow_iterator-inl.h"
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080019#include "dex/quick/dex_file_method_inliner.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070020#include "mir_to_lir-inl.h"
21#include "object_utils.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023
24namespace art {
25
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080026void Mir2Lir::LockArg(int in_position, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080027 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
28 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
29 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080030
buzbee2700f7e2014-03-07 09:46:20 -080031 if (reg_arg_low.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080032 LockTemp(reg_arg_low);
33 }
buzbee2700f7e2014-03-07 09:46:20 -080034 if (reg_arg_high.Valid() && reg_arg_low != reg_arg_high) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080035 LockTemp(reg_arg_high);
36 }
37}
38
buzbee2700f7e2014-03-07 09:46:20 -080039// TODO: needs revisit for 64-bit.
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010040RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) {
buzbee2700f7e2014-03-07 09:46:20 -080041 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
42 RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) :
43 RegStorage::InvalidReg();
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080044
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000045 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +070046 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080047 /*
48 * When doing a call for x86, it moves the stack pointer in order to push return.
49 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
50 * TODO: This needs revisited for 64-bit.
51 */
52 offset += sizeof(uint32_t);
53 }
54
55 // If the VR is wide and there is no register for high part, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -080056 if (wide && !reg_arg_high.Valid()) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080057 // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg.
buzbee2700f7e2014-03-07 09:46:20 -080058 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010059 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
Vladimir Marko3bf7c602014-05-07 14:55:43 +010060 LoadBaseDisp(TargetReg(kSp), offset, new_regs, k64);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010061 return new_regs; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080062 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010063 // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory,
64 // i.e. the low part is in a core reg. Load the second part in a core reg as well for now.
65 DCHECK(!reg_arg_low.IsFloat());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080066 reg_arg_high = AllocTemp();
67 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -070068 Load32Disp(TargetReg(kSp), offset_high, reg_arg_high);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010069 // Continue below to check the reg_class.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080070 }
71 }
72
73 // If the low part is not in a register yet, we need to load it.
buzbee2700f7e2014-03-07 09:46:20 -080074 if (!reg_arg_low.Valid()) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010075 // Assume that if the low part of a wide arg is passed in memory, so is the high part,
76 // thus we don't get here for wide args as it's handled above. Big-endian ABIs could
77 // conceivably break this assumption but Android supports only little-endian architectures.
78 DCHECK(!wide);
79 reg_arg_low = AllocTypedTemp(false, reg_class);
buzbee695d13a2014-04-19 13:32:20 -070080 Load32Disp(TargetReg(kSp), offset, reg_arg_low);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010081 return reg_arg_low; // The reg_class is OK, we can return.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080082 }
83
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010084 RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low;
85 // Check if we need to copy the arg to a different reg_class.
86 if (!RegClassMatches(reg_class, reg_arg)) {
87 if (wide) {
88 RegStorage new_regs = AllocTypedTempWide(false, reg_class);
89 OpRegCopyWide(new_regs, reg_arg);
90 reg_arg = new_regs;
91 } else {
92 RegStorage new_reg = AllocTypedTemp(false, reg_class);
93 OpRegCopy(new_reg, reg_arg);
94 reg_arg = new_reg;
95 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080096 }
Vladimir Markoc93ac8b2014-05-13 17:53:49 +010097 return reg_arg;
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -080098}
99
100void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000101 int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set);
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700102 if (cu_->instruction_set == kX86 || cu_->instruction_set == kX86_64) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800103 /*
104 * When doing a call for x86, it moves the stack pointer in order to push return.
105 * Thus, we add another 4 bytes to figure out the out of caller (in of callee).
106 * TODO: This needs revisited for 64-bit.
107 */
108 offset += sizeof(uint32_t);
109 }
110
111 if (!rl_dest.wide) {
buzbee2700f7e2014-03-07 09:46:20 -0800112 RegStorage reg = GetArgMappingToPhysicalReg(in_position);
113 if (reg.Valid()) {
114 OpRegCopy(rl_dest.reg, reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800115 } else {
buzbee695d13a2014-04-19 13:32:20 -0700116 Load32Disp(TargetReg(kSp), offset, rl_dest.reg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800117 }
118 } else {
buzbee2700f7e2014-03-07 09:46:20 -0800119 RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position);
120 RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800121
buzbee2700f7e2014-03-07 09:46:20 -0800122 if (reg_arg_low.Valid() && reg_arg_high.Valid()) {
123 OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high));
124 } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) {
125 OpRegCopy(rl_dest.reg, reg_arg_low);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800126 int offset_high = offset + sizeof(uint32_t);
buzbee695d13a2014-04-19 13:32:20 -0700127 Load32Disp(TargetReg(kSp), offset_high, rl_dest.reg.GetHigh());
buzbee2700f7e2014-03-07 09:46:20 -0800128 } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) {
129 OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high);
buzbee695d13a2014-04-19 13:32:20 -0700130 Load32Disp(TargetReg(kSp), offset, rl_dest.reg.GetLow());
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800131 } else {
Vladimir Marko3bf7c602014-05-07 14:55:43 +0100132 LoadBaseDisp(TargetReg(kSp), offset, rl_dest.reg, k64);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800133 }
134 }
135}
136
137bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) {
138 // FastInstance() already checked by DexFileMethodInliner.
139 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100140 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800141 // The object is not "this" and has to be null-checked.
142 return false;
143 }
144
Vladimir Markoe3e02602014-03-12 15:42:41 +0000145 bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100146 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
147 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100148 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
149 return false;
150 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100151
Vladimir Markoe3e02602014-03-12 15:42:41 +0000152 // The inliner doesn't distinguish kDouble or kFloat, use shorty.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800153 bool double_or_float = cu_->shorty[0] == 'F' || cu_->shorty[0] == 'D';
154
155 // Point of no return - no aborts after this
156 GenPrintLabel(mir);
157 LockArg(data.object_arg);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100158 RegStorage reg_obj = LoadArg(data.object_arg, kCoreReg);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800159 RegLocation rl_dest = wide ? GetReturnWide(double_or_float) : GetReturn(double_or_float);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100160 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
161 RegStorage r_result = rl_dest.reg;
162 if (!RegClassMatches(reg_class, r_result)) {
163 r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class)
164 : AllocTypedTemp(rl_dest.fp, reg_class);
165 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800166 if (data.is_volatile) {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100167 LoadBaseDispVolatile(reg_obj, data.field_offset, r_result, size);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800168 // Without context sensitive analysis, we must issue the most conservative barriers.
169 // In this case, either a load or store may follow so we issue both barriers.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800170 GenMemBarrier(kLoadLoad);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800171 GenMemBarrier(kLoadStore);
Vladimir Marko674744e2014-04-24 15:18:26 +0100172 } else {
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100173 LoadBaseDisp(reg_obj, data.field_offset, r_result, size);
174 }
175 if (r_result != rl_dest.reg) {
176 if (wide) {
177 OpRegCopyWide(rl_dest.reg, r_result);
178 } else {
179 OpRegCopy(rl_dest.reg, r_result);
180 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800181 }
182 return true;
183}
184
185bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) {
186 // FastInstance() already checked by DexFileMethodInliner.
187 const InlineIGetIPutData& data = special.d.ifield_data;
Vladimir Markoe1fced12014-04-04 14:52:53 +0100188 if (data.method_is_static != 0u || data.object_arg != 0u) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800189 // The object is not "this" and has to be null-checked.
190 return false;
191 }
Vladimir Markoe1fced12014-04-04 14:52:53 +0100192 if (data.return_arg_plus1 != 0u) {
193 // The setter returns a method argument which we don't support here.
194 return false;
195 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800196
Vladimir Markoe3e02602014-03-12 15:42:41 +0000197 bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE));
Vladimir Marko455759b2014-05-06 20:49:36 +0100198 bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT));
199 OpSize size = LoadStoreOpSize(wide, ref);
Vladimir Marko674744e2014-04-24 15:18:26 +0100200 if (data.is_volatile && !SupportsVolatileLoadStore(size)) {
201 return false;
202 }
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800203
204 // Point of no return - no aborts after this
205 GenPrintLabel(mir);
206 LockArg(data.object_arg);
207 LockArg(data.src_arg, wide);
Vladimir Markoc93ac8b2014-05-13 17:53:49 +0100208 RegStorage reg_obj = LoadArg(data.object_arg, kCoreReg);
209 RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile);
210 RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800211 if (data.is_volatile) {
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800212 // There might have been a store before this volatile one so insert StoreStore barrier.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800213 GenMemBarrier(kStoreStore);
Vladimir Marko674744e2014-04-24 15:18:26 +0100214 StoreBaseDispVolatile(reg_obj, data.field_offset, reg_src, size);
Razvan A Lupusoru99ad7232014-02-25 17:41:08 -0800215 // A load might follow the volatile store so insert a StoreLoad barrier.
216 GenMemBarrier(kStoreLoad);
Vladimir Marko674744e2014-04-24 15:18:26 +0100217 } else {
218 StoreBaseDisp(reg_obj, data.field_offset, reg_src, size);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800219 }
Vladimir Marko455759b2014-05-06 20:49:36 +0100220 if (ref) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800221 MarkGCCard(reg_src, reg_obj);
222 }
223 return true;
224}
225
226bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) {
227 const InlineReturnArgData& data = special.d.return_data;
Vladimir Markoe3e02602014-03-12 15:42:41 +0000228 bool wide = (data.is_wide != 0u);
229 // The inliner doesn't distinguish kDouble or kFloat, use shorty.
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800230 bool double_or_float = cu_->shorty[0] == 'F' || cu_->shorty[0] == 'D';
231
232 // Point of no return - no aborts after this
233 GenPrintLabel(mir);
234 LockArg(data.arg, wide);
235 RegLocation rl_dest = wide ? GetReturnWide(double_or_float) : GetReturn(double_or_float);
236 LoadArgDirect(data.arg, rl_dest);
237 return true;
238}
239
240/*
241 * Special-case code generation for simple non-throwing leaf methods.
242 */
243bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) {
244 DCHECK(special.flags & kInlineSpecial);
245 current_dalvik_offset_ = mir->offset;
246 MIR* return_mir = nullptr;
247 bool successful = false;
248
249 switch (special.opcode) {
250 case kInlineOpNop:
251 successful = true;
252 DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID);
253 return_mir = mir;
254 break;
255 case kInlineOpNonWideConst: {
256 successful = true;
257 RegLocation rl_dest = GetReturn(cu_->shorty[0] == 'F');
258 GenPrintLabel(mir);
buzbee2700f7e2014-03-07 09:46:20 -0800259 LoadConstant(rl_dest.reg, static_cast<int>(special.d.data));
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700260 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800261 break;
262 }
263 case kInlineOpReturnArg:
264 successful = GenSpecialIdentity(mir, special);
265 return_mir = mir;
266 break;
267 case kInlineOpIGet:
268 successful = GenSpecialIGet(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700269 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800270 break;
271 case kInlineOpIPut:
272 successful = GenSpecialIPut(mir, special);
Jean Christophe Beylercdacac42014-03-13 14:54:59 -0700273 return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800274 break;
275 default:
276 break;
277 }
278
279 if (successful) {
Vladimir Marko39d95e62014-02-28 12:51:24 +0000280 if (kIsDebugBuild) {
281 // Clear unreachable catch entries.
282 mir_graph_->catches_.clear();
283 }
284
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800285 // Handle verbosity for return MIR.
286 if (return_mir != nullptr) {
287 current_dalvik_offset_ = return_mir->offset;
288 // Not handling special identity case because it already generated code as part
289 // of the return. The label should have been added before any code was generated.
290 if (special.opcode != kInlineOpReturnArg) {
291 GenPrintLabel(return_mir);
292 }
293 }
294 GenSpecialExitSequence();
295
296 core_spill_mask_ = 0;
297 num_core_spills_ = 0;
298 fp_spill_mask_ = 0;
299 num_fp_spills_ = 0;
300 frame_size_ = 0;
301 core_vmap_table_.clear();
302 fp_vmap_table_.clear();
303 }
304
305 return successful;
306}
307
Brian Carlstrom7940e442013-07-12 13:46:57 -0700308/*
309 * Target-independent code generation. Use only high-level
310 * load/store utilities here, or target-dependent genXX() handlers
311 * when necessary.
312 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700313void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700314 RegLocation rl_src[3];
315 RegLocation rl_dest = mir_graph_->GetBadLoc();
316 RegLocation rl_result = mir_graph_->GetBadLoc();
317 Instruction::Code opcode = mir->dalvikInsn.opcode;
318 int opt_flags = mir->optimization_flags;
319 uint32_t vB = mir->dalvikInsn.vB;
320 uint32_t vC = mir->dalvikInsn.vC;
buzbee082833c2014-05-17 23:16:26 -0700321 DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:"
322 << std::hex << current_dalvik_offset_;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323
324 // Prep Src and Dest locations.
325 int next_sreg = 0;
326 int next_loc = 0;
Jean Christophe Beylercc794c32014-05-02 09:34:13 -0700327 uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700328 rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc();
329 if (attrs & DF_UA) {
330 if (attrs & DF_A_WIDE) {
331 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
332 next_sreg+= 2;
333 } else {
334 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
335 next_sreg++;
336 }
337 }
338 if (attrs & DF_UB) {
339 if (attrs & DF_B_WIDE) {
340 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
341 next_sreg+= 2;
342 } else {
343 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
344 next_sreg++;
345 }
346 }
347 if (attrs & DF_UC) {
348 if (attrs & DF_C_WIDE) {
349 rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg);
350 } else {
351 rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg);
352 }
353 }
354 if (attrs & DF_DA) {
355 if (attrs & DF_A_WIDE) {
356 rl_dest = mir_graph_->GetDestWide(mir);
357 } else {
358 rl_dest = mir_graph_->GetDest(mir);
359 }
360 }
361 switch (opcode) {
362 case Instruction::NOP:
363 break;
364
365 case Instruction::MOVE_EXCEPTION:
366 GenMoveException(rl_dest);
367 break;
368
369 case Instruction::RETURN_VOID:
370 if (((cu_->access_flags & kAccConstructor) != 0) &&
371 cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file,
372 cu_->class_def_idx)) {
373 GenMemBarrier(kStoreStore);
374 }
375 if (!mir_graph_->MethodIsLeaf()) {
376 GenSuspendTest(opt_flags);
377 }
378 break;
379
380 case Instruction::RETURN:
381 case Instruction::RETURN_OBJECT:
382 if (!mir_graph_->MethodIsLeaf()) {
383 GenSuspendTest(opt_flags);
384 }
385 StoreValue(GetReturn(cu_->shorty[0] == 'F'), rl_src[0]);
386 break;
387
388 case Instruction::RETURN_WIDE:
389 if (!mir_graph_->MethodIsLeaf()) {
390 GenSuspendTest(opt_flags);
391 }
392 StoreValueWide(GetReturnWide(cu_->shorty[0] == 'D'), rl_src[0]);
393 break;
394
395 case Instruction::MOVE_RESULT_WIDE:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000396 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700397 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000398 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700399 StoreValueWide(rl_dest, GetReturnWide(rl_dest.fp));
400 break;
401
402 case Instruction::MOVE_RESULT:
403 case Instruction::MOVE_RESULT_OBJECT:
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000404 if ((opt_flags & MIR_INLINED) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700405 break; // Nop - combined w/ previous invoke.
Vladimir Marko9820b7c2014-01-02 16:40:37 +0000406 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700407 StoreValue(rl_dest, GetReturn(rl_dest.fp));
408 break;
409
410 case Instruction::MOVE:
411 case Instruction::MOVE_OBJECT:
412 case Instruction::MOVE_16:
413 case Instruction::MOVE_OBJECT_16:
414 case Instruction::MOVE_FROM16:
415 case Instruction::MOVE_OBJECT_FROM16:
416 StoreValue(rl_dest, rl_src[0]);
417 break;
418
419 case Instruction::MOVE_WIDE:
420 case Instruction::MOVE_WIDE_16:
421 case Instruction::MOVE_WIDE_FROM16:
422 StoreValueWide(rl_dest, rl_src[0]);
423 break;
424
425 case Instruction::CONST:
426 case Instruction::CONST_4:
427 case Instruction::CONST_16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400428 GenConst(rl_dest, vB);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700429 break;
430
431 case Instruction::CONST_HIGH16:
Mark Mendelle87f9b52014-04-30 14:13:18 -0400432 GenConst(rl_dest, vB << 16);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700433 break;
434
435 case Instruction::CONST_WIDE_16:
436 case Instruction::CONST_WIDE_32:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000437 GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700438 break;
439
440 case Instruction::CONST_WIDE:
Bill Buzbeed61ba4b2014-01-13 21:44:01 +0000441 GenConstWide(rl_dest, mir->dalvikInsn.vB_wide);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700442 break;
443
444 case Instruction::CONST_WIDE_HIGH16:
445 rl_result = EvalLoc(rl_dest, kAnyReg, true);
buzbee2700f7e2014-03-07 09:46:20 -0800446 LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700447 StoreValueWide(rl_dest, rl_result);
448 break;
449
450 case Instruction::MONITOR_ENTER:
451 GenMonitorEnter(opt_flags, rl_src[0]);
452 break;
453
454 case Instruction::MONITOR_EXIT:
455 GenMonitorExit(opt_flags, rl_src[0]);
456 break;
457
458 case Instruction::CHECK_CAST: {
459 GenCheckCast(mir->offset, vB, rl_src[0]);
460 break;
461 }
462 case Instruction::INSTANCE_OF:
463 GenInstanceof(vC, rl_dest, rl_src[0]);
464 break;
465
466 case Instruction::NEW_INSTANCE:
467 GenNewInstance(vB, rl_dest);
468 break;
469
470 case Instruction::THROW:
471 GenThrow(rl_src[0]);
472 break;
473
474 case Instruction::ARRAY_LENGTH:
475 int len_offset;
476 len_offset = mirror::Array::LengthOffset().Int32Value();
477 rl_src[0] = LoadValue(rl_src[0], kCoreReg);
buzbee2700f7e2014-03-07 09:46:20 -0800478 GenNullCheck(rl_src[0].reg, opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 rl_result = EvalLoc(rl_dest, kCoreReg, true);
buzbee695d13a2014-04-19 13:32:20 -0700480 Load32Disp(rl_src[0].reg, len_offset, rl_result.reg);
Dave Allisonf9439142014-03-27 15:10:22 -0700481 MarkPossibleNullPointerException(opt_flags);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700482 StoreValue(rl_dest, rl_result);
483 break;
484
485 case Instruction::CONST_STRING:
486 case Instruction::CONST_STRING_JUMBO:
487 GenConstString(vB, rl_dest);
488 break;
489
490 case Instruction::CONST_CLASS:
491 GenConstClass(vB, rl_dest);
492 break;
493
494 case Instruction::FILL_ARRAY_DATA:
495 GenFillArrayData(vB, rl_src[0]);
496 break;
497
498 case Instruction::FILLED_NEW_ARRAY:
499 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
500 false /* not range */));
501 break;
502
503 case Instruction::FILLED_NEW_ARRAY_RANGE:
504 GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic,
505 true /* range */));
506 break;
507
508 case Instruction::NEW_ARRAY:
509 GenNewArray(vC, rl_dest, rl_src[0]);
510 break;
511
512 case Instruction::GOTO:
513 case Instruction::GOTO_16:
514 case Instruction::GOTO_32:
buzbee9329e6d2013-08-19 12:55:10 -0700515 if (mir_graph_->IsBackedge(bb, bb->taken)) {
buzbee0d829482013-10-11 15:24:55 -0700516 GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700517 } else {
buzbee0d829482013-10-11 15:24:55 -0700518 OpUnconditionalBranch(&label_list[bb->taken]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 }
520 break;
521
522 case Instruction::PACKED_SWITCH:
523 GenPackedSwitch(mir, vB, rl_src[0]);
524 break;
525
526 case Instruction::SPARSE_SWITCH:
527 GenSparseSwitch(mir, vB, rl_src[0]);
528 break;
529
530 case Instruction::CMPL_FLOAT:
531 case Instruction::CMPG_FLOAT:
532 case Instruction::CMPL_DOUBLE:
533 case Instruction::CMPG_DOUBLE:
534 GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]);
535 break;
536
537 case Instruction::CMP_LONG:
538 GenCmpLong(rl_dest, rl_src[0], rl_src[1]);
539 break;
540
541 case Instruction::IF_EQ:
542 case Instruction::IF_NE:
543 case Instruction::IF_LT:
544 case Instruction::IF_GE:
545 case Instruction::IF_GT:
546 case Instruction::IF_LE: {
buzbee0d829482013-10-11 15:24:55 -0700547 LIR* taken = &label_list[bb->taken];
548 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700549 // Result known at compile time?
550 if (rl_src[0].is_const && rl_src[1].is_const) {
551 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg),
552 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
buzbee0d829482013-10-11 15:24:55 -0700553 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
554 if (mir_graph_->IsBackedge(bb, target_id)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700555 GenSuspendTest(opt_flags);
556 }
buzbee0d829482013-10-11 15:24:55 -0700557 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 } else {
buzbee9329e6d2013-08-19 12:55:10 -0700559 if (mir_graph_->IsBackwardsBranch(bb)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700560 GenSuspendTest(opt_flags);
561 }
buzbee0d829482013-10-11 15:24:55 -0700562 GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700563 }
564 break;
565 }
566
567 case Instruction::IF_EQZ:
568 case Instruction::IF_NEZ:
569 case Instruction::IF_LTZ:
570 case Instruction::IF_GEZ:
571 case Instruction::IF_GTZ:
572 case Instruction::IF_LEZ: {
buzbee0d829482013-10-11 15:24:55 -0700573 LIR* taken = &label_list[bb->taken];
574 LIR* fall_through = &label_list[bb->fall_through];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700575 // Result known at compile time?
576 if (rl_src[0].is_const) {
577 bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0);
buzbee0d829482013-10-11 15:24:55 -0700578 BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through;
579 if (mir_graph_->IsBackedge(bb, target_id)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 GenSuspendTest(opt_flags);
581 }
buzbee0d829482013-10-11 15:24:55 -0700582 OpUnconditionalBranch(&label_list[target_id]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700583 } else {
buzbee9329e6d2013-08-19 12:55:10 -0700584 if (mir_graph_->IsBackwardsBranch(bb)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700585 GenSuspendTest(opt_flags);
586 }
587 GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through);
588 }
589 break;
590 }
591
592 case Instruction::AGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700593 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3);
594 break;
595 case Instruction::AGET_OBJECT:
596 GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700597 break;
598 case Instruction::AGET:
buzbee695d13a2014-04-19 13:32:20 -0700599 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700600 break;
601 case Instruction::AGET_BOOLEAN:
602 GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0);
603 break;
604 case Instruction::AGET_BYTE:
605 GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0);
606 break;
607 case Instruction::AGET_CHAR:
608 GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
609 break;
610 case Instruction::AGET_SHORT:
611 GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1);
612 break;
613 case Instruction::APUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700614 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700615 break;
616 case Instruction::APUT:
buzbee695d13a2014-04-19 13:32:20 -0700617 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700618 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700619 case Instruction::APUT_OBJECT: {
620 bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]);
621 bool is_safe = is_null; // Always safe to store null.
622 if (!is_safe) {
623 // Check safety from verifier type information.
Vladimir Marko2730db02014-01-27 11:15:17 +0000624 const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit();
625 is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset);
Ian Rogersa9a82542013-10-04 11:17:26 -0700626 }
627 if (is_null || is_safe) {
628 // Store of constant null doesn't require an assignability test and can be generated inline
629 // without fixed register usage or a card mark.
buzbee695d13a2014-04-19 13:32:20 -0700630 GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null);
Ian Rogersa9a82542013-10-04 11:17:26 -0700631 } else {
632 GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]);
633 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700634 break;
Ian Rogersa9a82542013-10-04 11:17:26 -0700635 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700636 case Instruction::APUT_SHORT:
637 case Instruction::APUT_CHAR:
Ian Rogersa9a82542013-10-04 11:17:26 -0700638 GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 break;
640 case Instruction::APUT_BYTE:
641 case Instruction::APUT_BOOLEAN:
Ian Rogersa9a82542013-10-04 11:17:26 -0700642 GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643 break;
644
645 case Instruction::IGET_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700646 GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700647 break;
648
649 case Instruction::IGET_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700650 GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700651 break;
652
653 case Instruction::IGET:
buzbee695d13a2014-04-19 13:32:20 -0700654 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700655 break;
656
657 case Instruction::IGET_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000658 GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700659 break;
660
661 case Instruction::IGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000662 GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700663 break;
664
665 case Instruction::IGET_BOOLEAN:
666 case Instruction::IGET_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000667 GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700668 break;
669
670 case Instruction::IPUT_WIDE:
buzbee695d13a2014-04-19 13:32:20 -0700671 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700672 break;
673
674 case Instruction::IPUT_OBJECT:
buzbee695d13a2014-04-19 13:32:20 -0700675 GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700676 break;
677
678 case Instruction::IPUT:
buzbee695d13a2014-04-19 13:32:20 -0700679 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700680 break;
681
682 case Instruction::IPUT_BOOLEAN:
683 case Instruction::IPUT_BYTE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000684 GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700685 break;
686
687 case Instruction::IPUT_CHAR:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000688 GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700689 break;
690
691 case Instruction::IPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000692 GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700693 break;
694
695 case Instruction::SGET_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000696 GenSget(mir, rl_dest, false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700697 break;
698 case Instruction::SGET:
699 case Instruction::SGET_BOOLEAN:
700 case Instruction::SGET_BYTE:
701 case Instruction::SGET_CHAR:
702 case Instruction::SGET_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000703 GenSget(mir, rl_dest, false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700704 break;
705
706 case Instruction::SGET_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000707 GenSget(mir, rl_dest, true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700708 break;
709
710 case Instruction::SPUT_OBJECT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000711 GenSput(mir, rl_src[0], false, true);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700712 break;
713
714 case Instruction::SPUT:
715 case Instruction::SPUT_BOOLEAN:
716 case Instruction::SPUT_BYTE:
717 case Instruction::SPUT_CHAR:
718 case Instruction::SPUT_SHORT:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000719 GenSput(mir, rl_src[0], false, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720 break;
721
722 case Instruction::SPUT_WIDE:
Vladimir Markobe0e5462014-02-26 11:24:15 +0000723 GenSput(mir, rl_src[0], true, false);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700724 break;
725
726 case Instruction::INVOKE_STATIC_RANGE:
727 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true));
728 break;
729 case Instruction::INVOKE_STATIC:
730 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false));
731 break;
732
733 case Instruction::INVOKE_DIRECT:
734 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false));
735 break;
736 case Instruction::INVOKE_DIRECT_RANGE:
737 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true));
738 break;
739
740 case Instruction::INVOKE_VIRTUAL:
741 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false));
742 break;
743 case Instruction::INVOKE_VIRTUAL_RANGE:
744 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true));
745 break;
746
747 case Instruction::INVOKE_SUPER:
748 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false));
749 break;
750 case Instruction::INVOKE_SUPER_RANGE:
751 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true));
752 break;
753
754 case Instruction::INVOKE_INTERFACE:
755 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false));
756 break;
757 case Instruction::INVOKE_INTERFACE_RANGE:
758 GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true));
759 break;
760
761 case Instruction::NEG_INT:
762 case Instruction::NOT_INT:
763 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]);
764 break;
765
766 case Instruction::NEG_LONG:
767 case Instruction::NOT_LONG:
768 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]);
769 break;
770
771 case Instruction::NEG_FLOAT:
772 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]);
773 break;
774
775 case Instruction::NEG_DOUBLE:
776 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]);
777 break;
778
779 case Instruction::INT_TO_LONG:
780 GenIntToLong(rl_dest, rl_src[0]);
781 break;
782
783 case Instruction::LONG_TO_INT:
784 rl_src[0] = UpdateLocWide(rl_src[0]);
785 rl_src[0] = WideToNarrow(rl_src[0]);
786 StoreValue(rl_dest, rl_src[0]);
787 break;
788
789 case Instruction::INT_TO_BYTE:
790 case Instruction::INT_TO_SHORT:
791 case Instruction::INT_TO_CHAR:
792 GenIntNarrowing(opcode, rl_dest, rl_src[0]);
793 break;
794
795 case Instruction::INT_TO_FLOAT:
796 case Instruction::INT_TO_DOUBLE:
797 case Instruction::LONG_TO_FLOAT:
798 case Instruction::LONG_TO_DOUBLE:
799 case Instruction::FLOAT_TO_INT:
800 case Instruction::FLOAT_TO_LONG:
801 case Instruction::FLOAT_TO_DOUBLE:
802 case Instruction::DOUBLE_TO_INT:
803 case Instruction::DOUBLE_TO_LONG:
804 case Instruction::DOUBLE_TO_FLOAT:
805 GenConversion(opcode, rl_dest, rl_src[0]);
806 break;
807
808
809 case Instruction::ADD_INT:
810 case Instruction::ADD_INT_2ADDR:
811 case Instruction::MUL_INT:
812 case Instruction::MUL_INT_2ADDR:
813 case Instruction::AND_INT:
814 case Instruction::AND_INT_2ADDR:
815 case Instruction::OR_INT:
816 case Instruction::OR_INT_2ADDR:
817 case Instruction::XOR_INT:
818 case Instruction::XOR_INT_2ADDR:
819 if (rl_src[0].is_const &&
820 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) {
821 GenArithOpIntLit(opcode, rl_dest, rl_src[1],
822 mir_graph_->ConstantValue(rl_src[0].orig_sreg));
823 } else if (rl_src[1].is_const &&
824 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
825 GenArithOpIntLit(opcode, rl_dest, rl_src[0],
826 mir_graph_->ConstantValue(rl_src[1].orig_sreg));
827 } else {
828 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
829 }
830 break;
831
832 case Instruction::SUB_INT:
833 case Instruction::SUB_INT_2ADDR:
834 case Instruction::DIV_INT:
835 case Instruction::DIV_INT_2ADDR:
836 case Instruction::REM_INT:
837 case Instruction::REM_INT_2ADDR:
838 case Instruction::SHL_INT:
839 case Instruction::SHL_INT_2ADDR:
840 case Instruction::SHR_INT:
841 case Instruction::SHR_INT_2ADDR:
842 case Instruction::USHR_INT:
843 case Instruction::USHR_INT_2ADDR:
844 if (rl_src[1].is_const &&
845 InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) {
846 GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1]));
847 } else {
848 GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]);
849 }
850 break;
851
852 case Instruction::ADD_LONG:
853 case Instruction::SUB_LONG:
854 case Instruction::AND_LONG:
855 case Instruction::OR_LONG:
856 case Instruction::XOR_LONG:
857 case Instruction::ADD_LONG_2ADDR:
858 case Instruction::SUB_LONG_2ADDR:
859 case Instruction::AND_LONG_2ADDR:
860 case Instruction::OR_LONG_2ADDR:
861 case Instruction::XOR_LONG_2ADDR:
862 if (rl_src[0].is_const || rl_src[1].is_const) {
863 GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
864 break;
865 }
866 // Note: intentional fallthrough.
867
868 case Instruction::MUL_LONG:
869 case Instruction::DIV_LONG:
870 case Instruction::REM_LONG:
871 case Instruction::MUL_LONG_2ADDR:
872 case Instruction::DIV_LONG_2ADDR:
873 case Instruction::REM_LONG_2ADDR:
874 GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
875 break;
876
877 case Instruction::SHL_LONG:
878 case Instruction::SHR_LONG:
879 case Instruction::USHR_LONG:
880 case Instruction::SHL_LONG_2ADDR:
881 case Instruction::SHR_LONG_2ADDR:
882 case Instruction::USHR_LONG_2ADDR:
883 if (rl_src[1].is_const) {
884 GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
885 } else {
886 GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]);
887 }
888 break;
889
890 case Instruction::ADD_FLOAT:
891 case Instruction::SUB_FLOAT:
892 case Instruction::MUL_FLOAT:
893 case Instruction::DIV_FLOAT:
894 case Instruction::REM_FLOAT:
895 case Instruction::ADD_FLOAT_2ADDR:
896 case Instruction::SUB_FLOAT_2ADDR:
897 case Instruction::MUL_FLOAT_2ADDR:
898 case Instruction::DIV_FLOAT_2ADDR:
899 case Instruction::REM_FLOAT_2ADDR:
900 GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]);
901 break;
902
903 case Instruction::ADD_DOUBLE:
904 case Instruction::SUB_DOUBLE:
905 case Instruction::MUL_DOUBLE:
906 case Instruction::DIV_DOUBLE:
907 case Instruction::REM_DOUBLE:
908 case Instruction::ADD_DOUBLE_2ADDR:
909 case Instruction::SUB_DOUBLE_2ADDR:
910 case Instruction::MUL_DOUBLE_2ADDR:
911 case Instruction::DIV_DOUBLE_2ADDR:
912 case Instruction::REM_DOUBLE_2ADDR:
913 GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]);
914 break;
915
916 case Instruction::RSUB_INT:
917 case Instruction::ADD_INT_LIT16:
918 case Instruction::MUL_INT_LIT16:
919 case Instruction::DIV_INT_LIT16:
920 case Instruction::REM_INT_LIT16:
921 case Instruction::AND_INT_LIT16:
922 case Instruction::OR_INT_LIT16:
923 case Instruction::XOR_INT_LIT16:
924 case Instruction::ADD_INT_LIT8:
925 case Instruction::RSUB_INT_LIT8:
926 case Instruction::MUL_INT_LIT8:
927 case Instruction::DIV_INT_LIT8:
928 case Instruction::REM_INT_LIT8:
929 case Instruction::AND_INT_LIT8:
930 case Instruction::OR_INT_LIT8:
931 case Instruction::XOR_INT_LIT8:
932 case Instruction::SHL_INT_LIT8:
933 case Instruction::SHR_INT_LIT8:
934 case Instruction::USHR_INT_LIT8:
935 GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC);
936 break;
937
938 default:
939 LOG(FATAL) << "Unexpected opcode: " << opcode;
940 }
buzbee082833c2014-05-17 23:16:26 -0700941 DCHECK(CheckCorePoolSanity());
Brian Carlstrom1895ea32013-07-18 13:28:37 -0700942} // NOLINT(readability/fn_size)
Brian Carlstrom7940e442013-07-12 13:46:57 -0700943
944// Process extended MIR instructions
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700945void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700946 switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) {
947 case kMirOpCopy: {
948 RegLocation rl_src = mir_graph_->GetSrc(mir, 0);
949 RegLocation rl_dest = mir_graph_->GetDest(mir);
950 StoreValue(rl_dest, rl_src);
951 break;
952 }
953 case kMirOpFusedCmplFloat:
954 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/);
955 break;
956 case kMirOpFusedCmpgFloat:
957 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/);
958 break;
959 case kMirOpFusedCmplDouble:
960 GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/);
961 break;
962 case kMirOpFusedCmpgDouble:
963 GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/);
964 break;
965 case kMirOpFusedCmpLong:
966 GenFusedLongCmpBranch(bb, mir);
967 break;
968 case kMirOpSelect:
969 GenSelect(bb, mir);
970 break;
Mark Mendelld65c51a2014-04-29 16:55:20 -0400971 case kMirOpPhi:
972 case kMirOpNop:
973 case kMirOpNullCheck:
974 case kMirOpRangeCheck:
975 case kMirOpDivZeroCheck:
976 case kMirOpCheck:
977 case kMirOpCheckPart2:
978 // Ignore these known opcodes
979 break;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700980 default:
Mark Mendelld65c51a2014-04-29 16:55:20 -0400981 // Give the backends a chance to handle unknown extended MIR opcodes.
982 GenMachineSpecificExtendedMethodMIR(bb, mir);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700983 break;
984 }
985}
986
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800987void Mir2Lir::GenPrintLabel(MIR* mir) {
988 // Mark the beginning of a Dalvik instruction for line tracking.
989 if (cu_->verbose) {
990 char* inst_str = mir_graph_->GetDalvikDisassembly(mir);
991 MarkBoundary(mir->offset, inst_str);
992 }
993}
994
Brian Carlstrom7940e442013-07-12 13:46:57 -0700995// Handle the content in each basic block.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700996bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700997 if (bb->block_type == kDead) return false;
998 current_dalvik_offset_ = bb->start_offset;
999 MIR* mir;
1000 int block_id = bb->id;
1001
1002 block_label_list_[block_id].operands[0] = bb->start_offset;
1003
1004 // Insert the block label.
1005 block_label_list_[block_id].opcode = kPseudoNormalBlockLabel;
buzbeeb48819d2013-09-14 16:15:25 -07001006 block_label_list_[block_id].flags.fixup = kFixupLabel;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001007 AppendLIR(&block_label_list_[block_id]);
1008
1009 LIR* head_lir = NULL;
1010
1011 // If this is a catch block, export the start address.
1012 if (bb->catch_entry) {
1013 head_lir = NewLIR0(kPseudoExportedPC);
1014 }
1015
1016 // Free temp registers and reset redundant store tracking.
buzbeeba574512014-05-12 15:13:16 -07001017 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001018
1019 if (bb->block_type == kEntryBlock) {
buzbee56c71782013-09-05 17:13:19 -07001020 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001021 int start_vreg = cu_->num_dalvik_registers - cu_->num_ins;
1022 GenEntrySequence(&mir_graph_->reg_location_[start_vreg],
1023 mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]);
1024 } else if (bb->block_type == kExitBlock) {
buzbee56c71782013-09-05 17:13:19 -07001025 ResetRegPool();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001026 GenExitSequence();
1027 }
1028
1029 for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) {
1030 ResetRegPool();
1031 if (cu_->disable_opt & (1 << kTrackLiveTemps)) {
buzbeeba574512014-05-12 15:13:16 -07001032 ClobberAllTemps();
buzbee7a11ab02014-04-28 20:02:38 -07001033 // Reset temp allocation to minimize differences when A/B testing.
buzbee091cc402014-03-31 10:14:40 -07001034 reg_pool_->ResetNextTemp();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001035 }
1036
1037 if (cu_->disable_opt & (1 << kSuppressLoads)) {
1038 ResetDefTracking();
1039 }
1040
1041 // Reset temp tracking sanity check.
1042 if (kIsDebugBuild) {
1043 live_sreg_ = INVALID_SREG;
1044 }
1045
1046 current_dalvik_offset_ = mir->offset;
1047 int opcode = mir->dalvikInsn.opcode;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001048
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001049 GenPrintLabel(mir);
1050
Brian Carlstrom7940e442013-07-12 13:46:57 -07001051 // Remember the first LIR for this block.
1052 if (head_lir == NULL) {
buzbee252254b2013-09-08 16:20:53 -07001053 head_lir = &block_label_list_[bb->id];
1054 // Set the first label as a scheduling barrier.
buzbeeb48819d2013-09-14 16:15:25 -07001055 DCHECK(!head_lir->flags.use_def_invalid);
1056 head_lir->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057 }
1058
1059 if (opcode == kMirOpCheck) {
1060 // Combine check and work halves of throwing instruction.
1061 MIR* work_half = mir->meta.throw_insn;
1062 mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode;
Vladimir Marko4376c872014-01-23 12:39:29 +00001063 mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 opcode = work_half->dalvikInsn.opcode;
1065 SSARepresentation* ssa_rep = work_half->ssa_rep;
1066 work_half->ssa_rep = mir->ssa_rep;
1067 mir->ssa_rep = ssa_rep;
1068 work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2);
Vladimir Marko4376c872014-01-23 12:39:29 +00001069 work_half->meta.throw_insn = mir;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001070 }
1071
buzbee35ba7f32014-05-31 08:59:01 -07001072 if (MIRGraph::IsPseudoMirOp(opcode)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001073 HandleExtendedMethodMIR(bb, mir);
1074 continue;
1075 }
1076
1077 CompileDalvikInstruction(mir, bb, block_label_list_);
1078 }
1079
1080 if (head_lir) {
1081 // Eliminate redundant loads/stores and delay stores into later slots.
1082 ApplyLocalOptimizations(head_lir, last_lir_insn_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001083 }
1084 return false;
1085}
1086
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001087bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) {
Vladimir Marko5816ed42013-11-27 17:04:20 +00001088 cu_->NewTimingSplit("SpecialMIR2LIR");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001089 // Find the first DalvikByteCode block.
1090 int num_reachable_blocks = mir_graph_->GetNumReachableBlocks();
1091 BasicBlock*bb = NULL;
1092 for (int idx = 0; idx < num_reachable_blocks; idx++) {
1093 // TODO: no direct access of growable lists.
1094 int dfs_index = mir_graph_->GetDfsOrder()->Get(idx);
1095 bb = mir_graph_->GetBasicBlock(dfs_index);
1096 if (bb->block_type == kDalvikByteCode) {
1097 break;
1098 }
1099 }
1100 if (bb == NULL) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001101 return false;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001102 }
1103 DCHECK_EQ(bb->start_offset, 0);
1104 DCHECK(bb->first_mir_insn != NULL);
1105
1106 // Get the first instruction.
1107 MIR* mir = bb->first_mir_insn;
1108
1109 // Free temp registers and reset redundant store tracking.
1110 ResetRegPool();
1111 ResetDefTracking();
buzbeeba574512014-05-12 15:13:16 -07001112 ClobberAllTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001113
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001114 return GenSpecialCase(bb, mir, special);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001115}
1116
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001117void Mir2Lir::MethodMIR2LIR() {
buzbeea61f4952013-08-23 14:27:06 -07001118 cu_->NewTimingSplit("MIR2LIR");
1119
Brian Carlstrom7940e442013-07-12 13:46:57 -07001120 // Hold the labels of each block.
1121 block_label_list_ =
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001122 static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(),
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001123 kArenaAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001124
buzbee56c71782013-09-05 17:13:19 -07001125 PreOrderDfsIterator iter(mir_graph_);
buzbee252254b2013-09-08 16:20:53 -07001126 BasicBlock* curr_bb = iter.Next();
1127 BasicBlock* next_bb = iter.Next();
1128 while (curr_bb != NULL) {
1129 MethodBlockCodeGen(curr_bb);
1130 // If the fall_through block is no longer laid out consecutively, drop in a branch.
buzbee0d829482013-10-11 15:24:55 -07001131 BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through);
1132 if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) {
1133 OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]);
buzbee252254b2013-09-08 16:20:53 -07001134 }
1135 curr_bb = next_bb;
1136 do {
1137 next_bb = iter.Next();
1138 } while ((next_bb != NULL) && (next_bb->block_type == kDead));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001139 }
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001140 HandleSlowPaths();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001141}
1142
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001143//
1144// LIR Slow Path
1145//
1146
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001147LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) {
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001148 m2l_->SetCurrentDexPc(current_dex_pc_);
Mingyao Yang6ffcfa02014-04-25 11:06:00 -07001149 LIR* target = m2l_->NewLIR0(opcode);
Vladimir Marko3bc86152014-03-13 14:11:28 +00001150 fromfast_->target = target;
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001151 return target;
1152}
Vladimir Marko3bc86152014-03-13 14:11:28 +00001153
Brian Carlstrom7940e442013-07-12 13:46:57 -07001154} // namespace art