Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "dex/compiler_internals.h" |
| 18 | #include "dex/dataflow_iterator-inl.h" |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 19 | #include "dex/quick/dex_file_method_inliner.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 20 | #include "mir_to_lir-inl.h" |
| 21 | #include "object_utils.h" |
Ian Rogers | 02ed4c0 | 2013-09-06 13:10:04 -0700 | [diff] [blame] | 22 | #include "thread-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 23 | |
| 24 | namespace art { |
| 25 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 26 | RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) { |
| 27 | RegisterClass res; |
| 28 | switch (shorty_type) { |
| 29 | case 'L': |
| 30 | res = kRefReg; |
| 31 | break; |
| 32 | case 'F': |
| 33 | // Expected fallthrough. |
| 34 | case 'D': |
| 35 | res = kFPReg; |
| 36 | break; |
| 37 | default: |
| 38 | res = kCoreReg; |
| 39 | } |
| 40 | return res; |
| 41 | } |
| 42 | |
| 43 | RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) { |
| 44 | RegisterClass res; |
| 45 | if (loc.fp) { |
| 46 | DCHECK(!loc.ref) << "At most, one of ref/fp may be set"; |
| 47 | res = kFPReg; |
| 48 | } else if (loc.ref) { |
| 49 | res = kRefReg; |
| 50 | } else { |
| 51 | res = kCoreReg; |
| 52 | } |
| 53 | return res; |
| 54 | } |
| 55 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 56 | void Mir2Lir::LockArg(int in_position, bool wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 57 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 58 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 59 | RegStorage::InvalidReg(); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 60 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 61 | if (reg_arg_low.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 62 | LockTemp(reg_arg_low); |
| 63 | } |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 64 | if (reg_arg_high.Valid() && reg_arg_low.NotExactlyEquals(reg_arg_high)) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 65 | LockTemp(reg_arg_high); |
| 66 | } |
| 67 | } |
| 68 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 69 | // TODO: simplify when 32-bit targets go hard-float. |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 70 | RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 71 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 72 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 73 | |
| 74 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 75 | /* |
| 76 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 77 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 78 | */ |
| 79 | offset += sizeof(uint32_t); |
| 80 | } |
| 81 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 82 | if (cu_->instruction_set == kX86_64) { |
| 83 | /* |
| 84 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 85 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 86 | */ |
| 87 | offset += sizeof(uint64_t); |
| 88 | } |
| 89 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 90 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 91 | RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position); |
| 92 | if (!reg_arg.Valid()) { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 93 | RegStorage new_reg = |
| 94 | wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 95 | LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 96 | return new_reg; |
| 97 | } else { |
| 98 | // Check if we need to copy the arg to a different reg_class. |
| 99 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 100 | if (wide) { |
| 101 | RegStorage new_reg = AllocTypedTempWide(false, reg_class); |
| 102 | OpRegCopyWide(new_reg, reg_arg); |
| 103 | reg_arg = new_reg; |
| 104 | } else { |
| 105 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 106 | OpRegCopy(new_reg, reg_arg); |
| 107 | reg_arg = new_reg; |
| 108 | } |
| 109 | } |
| 110 | } |
| 111 | return reg_arg; |
| 112 | } |
| 113 | |
| 114 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 115 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 116 | RegStorage::InvalidReg(); |
| 117 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 118 | // If the VR is wide and there is no register for high part, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 119 | if (wide && !reg_arg_high.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 120 | // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 121 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 122 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 123 | LoadBaseDisp(TargetPtrReg(kSp), offset, new_regs, k64, kNotVolatile); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 124 | return new_regs; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 125 | } else { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 126 | // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory, |
| 127 | // i.e. the low part is in a core reg. Load the second part in a core reg as well for now. |
| 128 | DCHECK(!reg_arg_low.IsFloat()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 129 | reg_arg_high = AllocTemp(); |
| 130 | int offset_high = offset + sizeof(uint32_t); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 131 | Load32Disp(TargetPtrReg(kSp), offset_high, reg_arg_high); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 132 | // Continue below to check the reg_class. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 133 | } |
| 134 | } |
| 135 | |
| 136 | // If the low part is not in a register yet, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 137 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 138 | // Assume that if the low part of a wide arg is passed in memory, so is the high part, |
| 139 | // thus we don't get here for wide args as it's handled above. Big-endian ABIs could |
| 140 | // conceivably break this assumption but Android supports only little-endian architectures. |
| 141 | DCHECK(!wide); |
| 142 | reg_arg_low = AllocTypedTemp(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 143 | Load32Disp(TargetPtrReg(kSp), offset, reg_arg_low); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 144 | return reg_arg_low; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 145 | } |
| 146 | |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 147 | RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low; |
| 148 | // Check if we need to copy the arg to a different reg_class. |
| 149 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 150 | if (wide) { |
| 151 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
| 152 | OpRegCopyWide(new_regs, reg_arg); |
| 153 | reg_arg = new_regs; |
| 154 | } else { |
| 155 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 156 | OpRegCopy(new_reg, reg_arg); |
| 157 | reg_arg = new_reg; |
| 158 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 159 | } |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 160 | return reg_arg; |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 161 | } |
| 162 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 163 | // TODO: simpilfy when 32-bit targets go hard float. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 164 | void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 165 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 166 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 167 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 168 | /* |
| 169 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 170 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 171 | */ |
| 172 | offset += sizeof(uint32_t); |
| 173 | } |
| 174 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 175 | if (cu_->instruction_set == kX86_64) { |
| 176 | /* |
| 177 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 178 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 179 | */ |
| 180 | offset += sizeof(uint64_t); |
| 181 | } |
| 182 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 183 | if (!rl_dest.wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 184 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 185 | if (reg.Valid()) { |
| 186 | OpRegCopy(rl_dest.reg, reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 187 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 188 | Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 189 | } |
| 190 | } else { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 191 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 192 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 193 | if (reg.Valid()) { |
| 194 | OpRegCopy(rl_dest.reg, reg); |
| 195 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 196 | LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 197 | } |
| 198 | return; |
| 199 | } |
| 200 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 201 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 202 | RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 203 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 204 | if (reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 205 | OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high)); |
| 206 | } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) { |
| 207 | OpRegCopy(rl_dest.reg, reg_arg_low); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 208 | int offset_high = offset + sizeof(uint32_t); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 209 | Load32Disp(TargetPtrReg(kSp), offset_high, rl_dest.reg.GetHigh()); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 210 | } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 211 | OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 212 | Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg.GetLow()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 213 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 214 | LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 215 | } |
| 216 | } |
| 217 | } |
| 218 | |
| 219 | bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) { |
| 220 | // FastInstance() already checked by DexFileMethodInliner. |
| 221 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 222 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 223 | // The object is not "this" and has to be null-checked. |
| 224 | return false; |
| 225 | } |
| 226 | |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 227 | bool wide = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE)); |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 228 | bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT)); |
| 229 | OpSize size = LoadStoreOpSize(wide, ref); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 230 | if (data.is_volatile && !SupportsVolatileLoadStore(size)) { |
| 231 | return false; |
| 232 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 233 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 234 | // Point of no return - no aborts after this |
| 235 | GenPrintLabel(mir); |
| 236 | LockArg(data.object_arg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 237 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 238 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 239 | RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 240 | RegLocation rl_dest = wide ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 241 | RegStorage r_result = rl_dest.reg; |
| 242 | if (!RegClassMatches(reg_class, r_result)) { |
| 243 | r_result = wide ? AllocTypedTempWide(rl_dest.fp, reg_class) |
| 244 | : AllocTypedTemp(rl_dest.fp, reg_class); |
| 245 | } |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 246 | if (ref) { |
| 247 | LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 248 | } else { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 249 | LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile : |
| 250 | kNotVolatile); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 251 | } |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 252 | if (r_result.NotExactlyEquals(rl_dest.reg)) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 253 | if (wide) { |
| 254 | OpRegCopyWide(rl_dest.reg, r_result); |
| 255 | } else { |
| 256 | OpRegCopy(rl_dest.reg, r_result); |
| 257 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 258 | } |
| 259 | return true; |
| 260 | } |
| 261 | |
| 262 | bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) { |
| 263 | // FastInstance() already checked by DexFileMethodInliner. |
| 264 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 265 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 266 | // The object is not "this" and has to be null-checked. |
| 267 | return false; |
| 268 | } |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 269 | if (data.return_arg_plus1 != 0u) { |
| 270 | // The setter returns a method argument which we don't support here. |
| 271 | return false; |
| 272 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 273 | |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 274 | bool wide = (data.op_variant == InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE)); |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 275 | bool ref = (data.op_variant == InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT)); |
| 276 | OpSize size = LoadStoreOpSize(wide, ref); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 277 | if (data.is_volatile && !SupportsVolatileLoadStore(size)) { |
| 278 | return false; |
| 279 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 280 | |
| 281 | // Point of no return - no aborts after this |
| 282 | GenPrintLabel(mir); |
| 283 | LockArg(data.object_arg); |
| 284 | LockArg(data.src_arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 285 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 286 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
| 287 | RegStorage reg_src = LoadArg(data.src_arg, reg_class, wide); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 288 | if (ref) { |
| 289 | StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 290 | } else { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 291 | StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile : |
| 292 | kNotVolatile); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 293 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 294 | if (ref) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 295 | MarkGCCard(reg_src, reg_obj); |
| 296 | } |
| 297 | return true; |
| 298 | } |
| 299 | |
| 300 | bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) { |
| 301 | const InlineReturnArgData& data = special.d.return_data; |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 302 | bool wide = (data.is_wide != 0u); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 303 | |
| 304 | // Point of no return - no aborts after this |
| 305 | GenPrintLabel(mir); |
| 306 | LockArg(data.arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 307 | RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 308 | RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 309 | LoadArgDirect(data.arg, rl_dest); |
| 310 | return true; |
| 311 | } |
| 312 | |
| 313 | /* |
| 314 | * Special-case code generation for simple non-throwing leaf methods. |
| 315 | */ |
| 316 | bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) { |
| 317 | DCHECK(special.flags & kInlineSpecial); |
| 318 | current_dalvik_offset_ = mir->offset; |
| 319 | MIR* return_mir = nullptr; |
| 320 | bool successful = false; |
| 321 | |
| 322 | switch (special.opcode) { |
| 323 | case kInlineOpNop: |
| 324 | successful = true; |
| 325 | DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID); |
| 326 | return_mir = mir; |
| 327 | break; |
| 328 | case kInlineOpNonWideConst: { |
| 329 | successful = true; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 330 | RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0])); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 331 | GenPrintLabel(mir); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 332 | LoadConstant(rl_dest.reg, static_cast<int>(special.d.data)); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 333 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 334 | break; |
| 335 | } |
| 336 | case kInlineOpReturnArg: |
| 337 | successful = GenSpecialIdentity(mir, special); |
| 338 | return_mir = mir; |
| 339 | break; |
| 340 | case kInlineOpIGet: |
| 341 | successful = GenSpecialIGet(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 342 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 343 | break; |
| 344 | case kInlineOpIPut: |
| 345 | successful = GenSpecialIPut(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 346 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 347 | break; |
| 348 | default: |
| 349 | break; |
| 350 | } |
| 351 | |
| 352 | if (successful) { |
Vladimir Marko | 39d95e6 | 2014-02-28 12:51:24 +0000 | [diff] [blame] | 353 | if (kIsDebugBuild) { |
| 354 | // Clear unreachable catch entries. |
| 355 | mir_graph_->catches_.clear(); |
| 356 | } |
| 357 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 358 | // Handle verbosity for return MIR. |
| 359 | if (return_mir != nullptr) { |
| 360 | current_dalvik_offset_ = return_mir->offset; |
| 361 | // Not handling special identity case because it already generated code as part |
| 362 | // of the return. The label should have been added before any code was generated. |
| 363 | if (special.opcode != kInlineOpReturnArg) { |
| 364 | GenPrintLabel(return_mir); |
| 365 | } |
| 366 | } |
| 367 | GenSpecialExitSequence(); |
| 368 | |
| 369 | core_spill_mask_ = 0; |
| 370 | num_core_spills_ = 0; |
| 371 | fp_spill_mask_ = 0; |
| 372 | num_fp_spills_ = 0; |
| 373 | frame_size_ = 0; |
| 374 | core_vmap_table_.clear(); |
| 375 | fp_vmap_table_.clear(); |
| 376 | } |
| 377 | |
| 378 | return successful; |
| 379 | } |
| 380 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 381 | /* |
| 382 | * Target-independent code generation. Use only high-level |
| 383 | * load/store utilities here, or target-dependent genXX() handlers |
| 384 | * when necessary. |
| 385 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 386 | void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 387 | RegLocation rl_src[3]; |
| 388 | RegLocation rl_dest = mir_graph_->GetBadLoc(); |
| 389 | RegLocation rl_result = mir_graph_->GetBadLoc(); |
| 390 | Instruction::Code opcode = mir->dalvikInsn.opcode; |
| 391 | int opt_flags = mir->optimization_flags; |
| 392 | uint32_t vB = mir->dalvikInsn.vB; |
| 393 | uint32_t vC = mir->dalvikInsn.vC; |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 394 | DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:" |
| 395 | << std::hex << current_dalvik_offset_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 396 | |
| 397 | // Prep Src and Dest locations. |
| 398 | int next_sreg = 0; |
| 399 | int next_loc = 0; |
Jean Christophe Beyler | cc794c3 | 2014-05-02 09:34:13 -0700 | [diff] [blame] | 400 | uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 401 | rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc(); |
| 402 | if (attrs & DF_UA) { |
| 403 | if (attrs & DF_A_WIDE) { |
| 404 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 405 | next_sreg+= 2; |
| 406 | } else { |
| 407 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 408 | next_sreg++; |
| 409 | } |
| 410 | } |
| 411 | if (attrs & DF_UB) { |
| 412 | if (attrs & DF_B_WIDE) { |
| 413 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 414 | next_sreg+= 2; |
| 415 | } else { |
| 416 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 417 | next_sreg++; |
| 418 | } |
| 419 | } |
| 420 | if (attrs & DF_UC) { |
| 421 | if (attrs & DF_C_WIDE) { |
| 422 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 423 | } else { |
| 424 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 425 | } |
| 426 | } |
| 427 | if (attrs & DF_DA) { |
| 428 | if (attrs & DF_A_WIDE) { |
| 429 | rl_dest = mir_graph_->GetDestWide(mir); |
| 430 | } else { |
| 431 | rl_dest = mir_graph_->GetDest(mir); |
| 432 | } |
| 433 | } |
| 434 | switch (opcode) { |
| 435 | case Instruction::NOP: |
| 436 | break; |
| 437 | |
| 438 | case Instruction::MOVE_EXCEPTION: |
| 439 | GenMoveException(rl_dest); |
| 440 | break; |
| 441 | |
| 442 | case Instruction::RETURN_VOID: |
| 443 | if (((cu_->access_flags & kAccConstructor) != 0) && |
| 444 | cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file, |
| 445 | cu_->class_def_idx)) { |
| 446 | GenMemBarrier(kStoreStore); |
| 447 | } |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 448 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 449 | GenSuspendTest(opt_flags); |
| 450 | } |
| 451 | break; |
| 452 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 453 | case Instruction::RETURN_OBJECT: |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 454 | DCHECK(rl_src[0].ref); |
| 455 | // Intentional fallthrough. |
| 456 | case Instruction::RETURN: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 457 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 458 | GenSuspendTest(opt_flags); |
| 459 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 460 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 461 | StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 462 | break; |
| 463 | |
| 464 | case Instruction::RETURN_WIDE: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 465 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 466 | GenSuspendTest(opt_flags); |
| 467 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 468 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 469 | StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 470 | break; |
| 471 | |
| 472 | case Instruction::MOVE_RESULT_WIDE: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 473 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 474 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 475 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 476 | StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 477 | break; |
| 478 | |
| 479 | case Instruction::MOVE_RESULT: |
| 480 | case Instruction::MOVE_RESULT_OBJECT: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 481 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 482 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 483 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 484 | StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 485 | break; |
| 486 | |
| 487 | case Instruction::MOVE: |
| 488 | case Instruction::MOVE_OBJECT: |
| 489 | case Instruction::MOVE_16: |
| 490 | case Instruction::MOVE_OBJECT_16: |
| 491 | case Instruction::MOVE_FROM16: |
| 492 | case Instruction::MOVE_OBJECT_FROM16: |
| 493 | StoreValue(rl_dest, rl_src[0]); |
| 494 | break; |
| 495 | |
| 496 | case Instruction::MOVE_WIDE: |
| 497 | case Instruction::MOVE_WIDE_16: |
| 498 | case Instruction::MOVE_WIDE_FROM16: |
| 499 | StoreValueWide(rl_dest, rl_src[0]); |
| 500 | break; |
| 501 | |
| 502 | case Instruction::CONST: |
| 503 | case Instruction::CONST_4: |
| 504 | case Instruction::CONST_16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 505 | GenConst(rl_dest, vB); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 506 | break; |
| 507 | |
| 508 | case Instruction::CONST_HIGH16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 509 | GenConst(rl_dest, vB << 16); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 510 | break; |
| 511 | |
| 512 | case Instruction::CONST_WIDE_16: |
| 513 | case Instruction::CONST_WIDE_32: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 514 | GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 515 | break; |
| 516 | |
| 517 | case Instruction::CONST_WIDE: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 518 | GenConstWide(rl_dest, mir->dalvikInsn.vB_wide); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 519 | break; |
| 520 | |
| 521 | case Instruction::CONST_WIDE_HIGH16: |
| 522 | rl_result = EvalLoc(rl_dest, kAnyReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 523 | LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 524 | StoreValueWide(rl_dest, rl_result); |
| 525 | break; |
| 526 | |
| 527 | case Instruction::MONITOR_ENTER: |
| 528 | GenMonitorEnter(opt_flags, rl_src[0]); |
| 529 | break; |
| 530 | |
| 531 | case Instruction::MONITOR_EXIT: |
| 532 | GenMonitorExit(opt_flags, rl_src[0]); |
| 533 | break; |
| 534 | |
| 535 | case Instruction::CHECK_CAST: { |
| 536 | GenCheckCast(mir->offset, vB, rl_src[0]); |
| 537 | break; |
| 538 | } |
| 539 | case Instruction::INSTANCE_OF: |
| 540 | GenInstanceof(vC, rl_dest, rl_src[0]); |
| 541 | break; |
| 542 | |
| 543 | case Instruction::NEW_INSTANCE: |
| 544 | GenNewInstance(vB, rl_dest); |
| 545 | break; |
| 546 | |
| 547 | case Instruction::THROW: |
| 548 | GenThrow(rl_src[0]); |
| 549 | break; |
| 550 | |
| 551 | case Instruction::ARRAY_LENGTH: |
| 552 | int len_offset; |
| 553 | len_offset = mirror::Array::LengthOffset().Int32Value(); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 554 | rl_src[0] = LoadValue(rl_src[0], kRefReg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 555 | GenNullCheck(rl_src[0].reg, opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 556 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 557 | Load32Disp(rl_src[0].reg, len_offset, rl_result.reg); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 558 | MarkPossibleNullPointerException(opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 559 | StoreValue(rl_dest, rl_result); |
| 560 | break; |
| 561 | |
| 562 | case Instruction::CONST_STRING: |
| 563 | case Instruction::CONST_STRING_JUMBO: |
| 564 | GenConstString(vB, rl_dest); |
| 565 | break; |
| 566 | |
| 567 | case Instruction::CONST_CLASS: |
| 568 | GenConstClass(vB, rl_dest); |
| 569 | break; |
| 570 | |
| 571 | case Instruction::FILL_ARRAY_DATA: |
| 572 | GenFillArrayData(vB, rl_src[0]); |
| 573 | break; |
| 574 | |
| 575 | case Instruction::FILLED_NEW_ARRAY: |
| 576 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 577 | false /* not range */)); |
| 578 | break; |
| 579 | |
| 580 | case Instruction::FILLED_NEW_ARRAY_RANGE: |
| 581 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 582 | true /* range */)); |
| 583 | break; |
| 584 | |
| 585 | case Instruction::NEW_ARRAY: |
| 586 | GenNewArray(vC, rl_dest, rl_src[0]); |
| 587 | break; |
| 588 | |
| 589 | case Instruction::GOTO: |
| 590 | case Instruction::GOTO_16: |
| 591 | case Instruction::GOTO_32: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 592 | if (mir_graph_->IsBackedge(bb, bb->taken) && |
| 593 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 594 | GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 595 | } else { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 596 | OpUnconditionalBranch(&label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 597 | } |
| 598 | break; |
| 599 | |
| 600 | case Instruction::PACKED_SWITCH: |
| 601 | GenPackedSwitch(mir, vB, rl_src[0]); |
| 602 | break; |
| 603 | |
| 604 | case Instruction::SPARSE_SWITCH: |
| 605 | GenSparseSwitch(mir, vB, rl_src[0]); |
| 606 | break; |
| 607 | |
| 608 | case Instruction::CMPL_FLOAT: |
| 609 | case Instruction::CMPG_FLOAT: |
| 610 | case Instruction::CMPL_DOUBLE: |
| 611 | case Instruction::CMPG_DOUBLE: |
| 612 | GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 613 | break; |
| 614 | |
| 615 | case Instruction::CMP_LONG: |
| 616 | GenCmpLong(rl_dest, rl_src[0], rl_src[1]); |
| 617 | break; |
| 618 | |
| 619 | case Instruction::IF_EQ: |
| 620 | case Instruction::IF_NE: |
| 621 | case Instruction::IF_LT: |
| 622 | case Instruction::IF_GE: |
| 623 | case Instruction::IF_GT: |
| 624 | case Instruction::IF_LE: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 625 | LIR* taken = &label_list[bb->taken]; |
| 626 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 627 | // Result known at compile time? |
| 628 | if (rl_src[0].is_const && rl_src[1].is_const) { |
| 629 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), |
| 630 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 631 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 632 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 633 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 634 | GenSuspendTest(opt_flags); |
| 635 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 636 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 637 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 638 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 639 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 640 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 641 | GenSuspendTest(opt_flags); |
| 642 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 643 | GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 644 | } |
| 645 | break; |
| 646 | } |
| 647 | |
| 648 | case Instruction::IF_EQZ: |
| 649 | case Instruction::IF_NEZ: |
| 650 | case Instruction::IF_LTZ: |
| 651 | case Instruction::IF_GEZ: |
| 652 | case Instruction::IF_GTZ: |
| 653 | case Instruction::IF_LEZ: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 654 | LIR* taken = &label_list[bb->taken]; |
| 655 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 656 | // Result known at compile time? |
| 657 | if (rl_src[0].is_const) { |
| 658 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 659 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 660 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 661 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 662 | GenSuspendTest(opt_flags); |
| 663 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 664 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 665 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 666 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 667 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 668 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 669 | GenSuspendTest(opt_flags); |
| 670 | } |
| 671 | GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through); |
| 672 | } |
| 673 | break; |
| 674 | } |
| 675 | |
| 676 | case Instruction::AGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 677 | GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3); |
| 678 | break; |
| 679 | case Instruction::AGET_OBJECT: |
| 680 | GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 681 | break; |
| 682 | case Instruction::AGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 683 | GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 684 | break; |
| 685 | case Instruction::AGET_BOOLEAN: |
| 686 | GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 687 | break; |
| 688 | case Instruction::AGET_BYTE: |
| 689 | GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 690 | break; |
| 691 | case Instruction::AGET_CHAR: |
| 692 | GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 693 | break; |
| 694 | case Instruction::AGET_SHORT: |
| 695 | GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 696 | break; |
| 697 | case Instruction::APUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 698 | GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 699 | break; |
| 700 | case Instruction::APUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 701 | GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 702 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 703 | case Instruction::APUT_OBJECT: { |
| 704 | bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]); |
| 705 | bool is_safe = is_null; // Always safe to store null. |
| 706 | if (!is_safe) { |
| 707 | // Check safety from verifier type information. |
Vladimir Marko | 2730db0 | 2014-01-27 11:15:17 +0000 | [diff] [blame] | 708 | const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit(); |
| 709 | is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 710 | } |
| 711 | if (is_null || is_safe) { |
| 712 | // Store of constant null doesn't require an assignability test and can be generated inline |
| 713 | // without fixed register usage or a card mark. |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 714 | GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 715 | } else { |
| 716 | GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]); |
| 717 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 718 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 719 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 720 | case Instruction::APUT_SHORT: |
| 721 | case Instruction::APUT_CHAR: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 722 | GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 723 | break; |
| 724 | case Instruction::APUT_BYTE: |
| 725 | case Instruction::APUT_BOOLEAN: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 726 | GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 727 | break; |
| 728 | |
| 729 | case Instruction::IGET_OBJECT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 730 | GenIGet(mir, opt_flags, kReference, rl_dest, rl_src[0], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 731 | break; |
| 732 | |
| 733 | case Instruction::IGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 734 | GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 735 | break; |
| 736 | |
| 737 | case Instruction::IGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 738 | GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 739 | break; |
| 740 | |
| 741 | case Instruction::IGET_CHAR: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 742 | GenIGet(mir, opt_flags, kUnsignedHalf, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 743 | break; |
| 744 | |
| 745 | case Instruction::IGET_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 746 | GenIGet(mir, opt_flags, kSignedHalf, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 747 | break; |
| 748 | |
| 749 | case Instruction::IGET_BOOLEAN: |
| 750 | case Instruction::IGET_BYTE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 751 | GenIGet(mir, opt_flags, kUnsignedByte, rl_dest, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 752 | break; |
| 753 | |
| 754 | case Instruction::IPUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 755 | GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 756 | break; |
| 757 | |
| 758 | case Instruction::IPUT_OBJECT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 759 | GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 760 | break; |
| 761 | |
| 762 | case Instruction::IPUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 763 | GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 764 | break; |
| 765 | |
| 766 | case Instruction::IPUT_BOOLEAN: |
| 767 | case Instruction::IPUT_BYTE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 768 | GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 769 | break; |
| 770 | |
| 771 | case Instruction::IPUT_CHAR: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 772 | GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 773 | break; |
| 774 | |
| 775 | case Instruction::IPUT_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 776 | GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 777 | break; |
| 778 | |
| 779 | case Instruction::SGET_OBJECT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 780 | GenSget(mir, rl_dest, false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 781 | break; |
| 782 | case Instruction::SGET: |
| 783 | case Instruction::SGET_BOOLEAN: |
| 784 | case Instruction::SGET_BYTE: |
| 785 | case Instruction::SGET_CHAR: |
| 786 | case Instruction::SGET_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 787 | GenSget(mir, rl_dest, false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 788 | break; |
| 789 | |
| 790 | case Instruction::SGET_WIDE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 791 | GenSget(mir, rl_dest, true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 792 | break; |
| 793 | |
| 794 | case Instruction::SPUT_OBJECT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 795 | GenSput(mir, rl_src[0], false, true); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 796 | break; |
| 797 | |
| 798 | case Instruction::SPUT: |
| 799 | case Instruction::SPUT_BOOLEAN: |
| 800 | case Instruction::SPUT_BYTE: |
| 801 | case Instruction::SPUT_CHAR: |
| 802 | case Instruction::SPUT_SHORT: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 803 | GenSput(mir, rl_src[0], false, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 804 | break; |
| 805 | |
| 806 | case Instruction::SPUT_WIDE: |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 807 | GenSput(mir, rl_src[0], true, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 808 | break; |
| 809 | |
| 810 | case Instruction::INVOKE_STATIC_RANGE: |
| 811 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 812 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 813 | // If the invocation is not inlined, we can assume there is already a |
| 814 | // suspend check at the return site |
| 815 | mir_graph_->AppendGenSuspendTestList(bb); |
| 816 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 817 | break; |
| 818 | case Instruction::INVOKE_STATIC: |
| 819 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 820 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 821 | mir_graph_->AppendGenSuspendTestList(bb); |
| 822 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 823 | break; |
| 824 | |
| 825 | case Instruction::INVOKE_DIRECT: |
| 826 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 827 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 828 | mir_graph_->AppendGenSuspendTestList(bb); |
| 829 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 830 | break; |
| 831 | case Instruction::INVOKE_DIRECT_RANGE: |
| 832 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 833 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 834 | mir_graph_->AppendGenSuspendTestList(bb); |
| 835 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 836 | break; |
| 837 | |
| 838 | case Instruction::INVOKE_VIRTUAL: |
| 839 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 840 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 841 | mir_graph_->AppendGenSuspendTestList(bb); |
| 842 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 843 | break; |
| 844 | case Instruction::INVOKE_VIRTUAL_RANGE: |
| 845 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 846 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 847 | mir_graph_->AppendGenSuspendTestList(bb); |
| 848 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 849 | break; |
| 850 | |
| 851 | case Instruction::INVOKE_SUPER: |
| 852 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 853 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 854 | mir_graph_->AppendGenSuspendTestList(bb); |
| 855 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 856 | break; |
| 857 | case Instruction::INVOKE_SUPER_RANGE: |
| 858 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 859 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 860 | mir_graph_->AppendGenSuspendTestList(bb); |
| 861 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 862 | break; |
| 863 | |
| 864 | case Instruction::INVOKE_INTERFACE: |
| 865 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 866 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 867 | mir_graph_->AppendGenSuspendTestList(bb); |
| 868 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 869 | break; |
| 870 | case Instruction::INVOKE_INTERFACE_RANGE: |
| 871 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 872 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 873 | mir_graph_->AppendGenSuspendTestList(bb); |
| 874 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 875 | break; |
| 876 | |
| 877 | case Instruction::NEG_INT: |
| 878 | case Instruction::NOT_INT: |
| 879 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 880 | break; |
| 881 | |
| 882 | case Instruction::NEG_LONG: |
| 883 | case Instruction::NOT_LONG: |
| 884 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 885 | break; |
| 886 | |
| 887 | case Instruction::NEG_FLOAT: |
| 888 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 889 | break; |
| 890 | |
| 891 | case Instruction::NEG_DOUBLE: |
| 892 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 893 | break; |
| 894 | |
| 895 | case Instruction::INT_TO_LONG: |
| 896 | GenIntToLong(rl_dest, rl_src[0]); |
| 897 | break; |
| 898 | |
| 899 | case Instruction::LONG_TO_INT: |
| 900 | rl_src[0] = UpdateLocWide(rl_src[0]); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 901 | rl_src[0] = NarrowRegLoc(rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 902 | StoreValue(rl_dest, rl_src[0]); |
| 903 | break; |
| 904 | |
| 905 | case Instruction::INT_TO_BYTE: |
| 906 | case Instruction::INT_TO_SHORT: |
| 907 | case Instruction::INT_TO_CHAR: |
| 908 | GenIntNarrowing(opcode, rl_dest, rl_src[0]); |
| 909 | break; |
| 910 | |
| 911 | case Instruction::INT_TO_FLOAT: |
| 912 | case Instruction::INT_TO_DOUBLE: |
| 913 | case Instruction::LONG_TO_FLOAT: |
| 914 | case Instruction::LONG_TO_DOUBLE: |
| 915 | case Instruction::FLOAT_TO_INT: |
| 916 | case Instruction::FLOAT_TO_LONG: |
| 917 | case Instruction::FLOAT_TO_DOUBLE: |
| 918 | case Instruction::DOUBLE_TO_INT: |
| 919 | case Instruction::DOUBLE_TO_LONG: |
| 920 | case Instruction::DOUBLE_TO_FLOAT: |
| 921 | GenConversion(opcode, rl_dest, rl_src[0]); |
| 922 | break; |
| 923 | |
| 924 | |
| 925 | case Instruction::ADD_INT: |
| 926 | case Instruction::ADD_INT_2ADDR: |
| 927 | case Instruction::MUL_INT: |
| 928 | case Instruction::MUL_INT_2ADDR: |
| 929 | case Instruction::AND_INT: |
| 930 | case Instruction::AND_INT_2ADDR: |
| 931 | case Instruction::OR_INT: |
| 932 | case Instruction::OR_INT_2ADDR: |
| 933 | case Instruction::XOR_INT: |
| 934 | case Instruction::XOR_INT_2ADDR: |
| 935 | if (rl_src[0].is_const && |
| 936 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]))) { |
| 937 | GenArithOpIntLit(opcode, rl_dest, rl_src[1], |
| 938 | mir_graph_->ConstantValue(rl_src[0].orig_sreg)); |
| 939 | } else if (rl_src[1].is_const && |
| 940 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) { |
| 941 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], |
| 942 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
| 943 | } else { |
| 944 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 945 | } |
| 946 | break; |
| 947 | |
| 948 | case Instruction::SUB_INT: |
| 949 | case Instruction::SUB_INT_2ADDR: |
| 950 | case Instruction::DIV_INT: |
| 951 | case Instruction::DIV_INT_2ADDR: |
| 952 | case Instruction::REM_INT: |
| 953 | case Instruction::REM_INT_2ADDR: |
| 954 | case Instruction::SHL_INT: |
| 955 | case Instruction::SHL_INT_2ADDR: |
| 956 | case Instruction::SHR_INT: |
| 957 | case Instruction::SHR_INT_2ADDR: |
| 958 | case Instruction::USHR_INT: |
| 959 | case Instruction::USHR_INT_2ADDR: |
| 960 | if (rl_src[1].is_const && |
| 961 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]))) { |
| 962 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1])); |
| 963 | } else { |
| 964 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 965 | } |
| 966 | break; |
| 967 | |
| 968 | case Instruction::ADD_LONG: |
| 969 | case Instruction::SUB_LONG: |
| 970 | case Instruction::AND_LONG: |
| 971 | case Instruction::OR_LONG: |
| 972 | case Instruction::XOR_LONG: |
| 973 | case Instruction::ADD_LONG_2ADDR: |
| 974 | case Instruction::SUB_LONG_2ADDR: |
| 975 | case Instruction::AND_LONG_2ADDR: |
| 976 | case Instruction::OR_LONG_2ADDR: |
| 977 | case Instruction::XOR_LONG_2ADDR: |
| 978 | if (rl_src[0].is_const || rl_src[1].is_const) { |
| 979 | GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 980 | break; |
| 981 | } |
| 982 | // Note: intentional fallthrough. |
| 983 | |
| 984 | case Instruction::MUL_LONG: |
| 985 | case Instruction::DIV_LONG: |
| 986 | case Instruction::REM_LONG: |
| 987 | case Instruction::MUL_LONG_2ADDR: |
| 988 | case Instruction::DIV_LONG_2ADDR: |
| 989 | case Instruction::REM_LONG_2ADDR: |
| 990 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 991 | break; |
| 992 | |
| 993 | case Instruction::SHL_LONG: |
| 994 | case Instruction::SHR_LONG: |
| 995 | case Instruction::USHR_LONG: |
| 996 | case Instruction::SHL_LONG_2ADDR: |
| 997 | case Instruction::SHR_LONG_2ADDR: |
| 998 | case Instruction::USHR_LONG_2ADDR: |
| 999 | if (rl_src[1].is_const) { |
| 1000 | GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1001 | } else { |
| 1002 | GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1003 | } |
| 1004 | break; |
| 1005 | |
| 1006 | case Instruction::ADD_FLOAT: |
| 1007 | case Instruction::SUB_FLOAT: |
| 1008 | case Instruction::MUL_FLOAT: |
| 1009 | case Instruction::DIV_FLOAT: |
| 1010 | case Instruction::REM_FLOAT: |
| 1011 | case Instruction::ADD_FLOAT_2ADDR: |
| 1012 | case Instruction::SUB_FLOAT_2ADDR: |
| 1013 | case Instruction::MUL_FLOAT_2ADDR: |
| 1014 | case Instruction::DIV_FLOAT_2ADDR: |
| 1015 | case Instruction::REM_FLOAT_2ADDR: |
| 1016 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1017 | break; |
| 1018 | |
| 1019 | case Instruction::ADD_DOUBLE: |
| 1020 | case Instruction::SUB_DOUBLE: |
| 1021 | case Instruction::MUL_DOUBLE: |
| 1022 | case Instruction::DIV_DOUBLE: |
| 1023 | case Instruction::REM_DOUBLE: |
| 1024 | case Instruction::ADD_DOUBLE_2ADDR: |
| 1025 | case Instruction::SUB_DOUBLE_2ADDR: |
| 1026 | case Instruction::MUL_DOUBLE_2ADDR: |
| 1027 | case Instruction::DIV_DOUBLE_2ADDR: |
| 1028 | case Instruction::REM_DOUBLE_2ADDR: |
| 1029 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1030 | break; |
| 1031 | |
| 1032 | case Instruction::RSUB_INT: |
| 1033 | case Instruction::ADD_INT_LIT16: |
| 1034 | case Instruction::MUL_INT_LIT16: |
| 1035 | case Instruction::DIV_INT_LIT16: |
| 1036 | case Instruction::REM_INT_LIT16: |
| 1037 | case Instruction::AND_INT_LIT16: |
| 1038 | case Instruction::OR_INT_LIT16: |
| 1039 | case Instruction::XOR_INT_LIT16: |
| 1040 | case Instruction::ADD_INT_LIT8: |
| 1041 | case Instruction::RSUB_INT_LIT8: |
| 1042 | case Instruction::MUL_INT_LIT8: |
| 1043 | case Instruction::DIV_INT_LIT8: |
| 1044 | case Instruction::REM_INT_LIT8: |
| 1045 | case Instruction::AND_INT_LIT8: |
| 1046 | case Instruction::OR_INT_LIT8: |
| 1047 | case Instruction::XOR_INT_LIT8: |
| 1048 | case Instruction::SHL_INT_LIT8: |
| 1049 | case Instruction::SHR_INT_LIT8: |
| 1050 | case Instruction::USHR_INT_LIT8: |
| 1051 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC); |
| 1052 | break; |
| 1053 | |
| 1054 | default: |
| 1055 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 1056 | } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 1057 | DCHECK(CheckCorePoolSanity()); |
Brian Carlstrom | 1895ea3 | 2013-07-18 13:28:37 -0700 | [diff] [blame] | 1058 | } // NOLINT(readability/fn_size) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1059 | |
| 1060 | // Process extended MIR instructions |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1061 | void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1062 | switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) { |
| 1063 | case kMirOpCopy: { |
| 1064 | RegLocation rl_src = mir_graph_->GetSrc(mir, 0); |
| 1065 | RegLocation rl_dest = mir_graph_->GetDest(mir); |
| 1066 | StoreValue(rl_dest, rl_src); |
| 1067 | break; |
| 1068 | } |
| 1069 | case kMirOpFusedCmplFloat: |
| 1070 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/); |
| 1071 | break; |
| 1072 | case kMirOpFusedCmpgFloat: |
| 1073 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/); |
| 1074 | break; |
| 1075 | case kMirOpFusedCmplDouble: |
| 1076 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/); |
| 1077 | break; |
| 1078 | case kMirOpFusedCmpgDouble: |
| 1079 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/); |
| 1080 | break; |
| 1081 | case kMirOpFusedCmpLong: |
| 1082 | GenFusedLongCmpBranch(bb, mir); |
| 1083 | break; |
| 1084 | case kMirOpSelect: |
| 1085 | GenSelect(bb, mir); |
| 1086 | break; |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1087 | case kMirOpPhi: |
| 1088 | case kMirOpNop: |
| 1089 | case kMirOpNullCheck: |
| 1090 | case kMirOpRangeCheck: |
| 1091 | case kMirOpDivZeroCheck: |
| 1092 | case kMirOpCheck: |
| 1093 | case kMirOpCheckPart2: |
| 1094 | // Ignore these known opcodes |
| 1095 | break; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1096 | default: |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1097 | // Give the backends a chance to handle unknown extended MIR opcodes. |
| 1098 | GenMachineSpecificExtendedMethodMIR(bb, mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1099 | break; |
| 1100 | } |
| 1101 | } |
| 1102 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1103 | void Mir2Lir::GenPrintLabel(MIR* mir) { |
| 1104 | // Mark the beginning of a Dalvik instruction for line tracking. |
| 1105 | if (cu_->verbose) { |
| 1106 | char* inst_str = mir_graph_->GetDalvikDisassembly(mir); |
| 1107 | MarkBoundary(mir->offset, inst_str); |
| 1108 | } |
| 1109 | } |
| 1110 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1111 | // Handle the content in each basic block. |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1112 | bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1113 | if (bb->block_type == kDead) return false; |
| 1114 | current_dalvik_offset_ = bb->start_offset; |
| 1115 | MIR* mir; |
| 1116 | int block_id = bb->id; |
| 1117 | |
| 1118 | block_label_list_[block_id].operands[0] = bb->start_offset; |
| 1119 | |
| 1120 | // Insert the block label. |
| 1121 | block_label_list_[block_id].opcode = kPseudoNormalBlockLabel; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1122 | block_label_list_[block_id].flags.fixup = kFixupLabel; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1123 | AppendLIR(&block_label_list_[block_id]); |
| 1124 | |
| 1125 | LIR* head_lir = NULL; |
| 1126 | |
| 1127 | // If this is a catch block, export the start address. |
| 1128 | if (bb->catch_entry) { |
| 1129 | head_lir = NewLIR0(kPseudoExportedPC); |
| 1130 | } |
| 1131 | |
| 1132 | // Free temp registers and reset redundant store tracking. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1133 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1134 | |
| 1135 | if (bb->block_type == kEntryBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1136 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1137 | int start_vreg = cu_->num_dalvik_registers - cu_->num_ins; |
| 1138 | GenEntrySequence(&mir_graph_->reg_location_[start_vreg], |
| 1139 | mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]); |
| 1140 | } else if (bb->block_type == kExitBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1141 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1142 | GenExitSequence(); |
| 1143 | } |
| 1144 | |
| 1145 | for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) { |
| 1146 | ResetRegPool(); |
| 1147 | if (cu_->disable_opt & (1 << kTrackLiveTemps)) { |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1148 | ClobberAllTemps(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1149 | // Reset temp allocation to minimize differences when A/B testing. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1150 | reg_pool_->ResetNextTemp(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1151 | } |
| 1152 | |
| 1153 | if (cu_->disable_opt & (1 << kSuppressLoads)) { |
| 1154 | ResetDefTracking(); |
| 1155 | } |
| 1156 | |
| 1157 | // Reset temp tracking sanity check. |
| 1158 | if (kIsDebugBuild) { |
| 1159 | live_sreg_ = INVALID_SREG; |
| 1160 | } |
| 1161 | |
| 1162 | current_dalvik_offset_ = mir->offset; |
| 1163 | int opcode = mir->dalvikInsn.opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1164 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1165 | GenPrintLabel(mir); |
| 1166 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1167 | // Remember the first LIR for this block. |
| 1168 | if (head_lir == NULL) { |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1169 | head_lir = &block_label_list_[bb->id]; |
| 1170 | // Set the first label as a scheduling barrier. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1171 | DCHECK(!head_lir->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1172 | head_lir->u.m.def_mask = &kEncodeAll; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1173 | } |
| 1174 | |
| 1175 | if (opcode == kMirOpCheck) { |
| 1176 | // Combine check and work halves of throwing instruction. |
| 1177 | MIR* work_half = mir->meta.throw_insn; |
| 1178 | mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode; |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1179 | mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1180 | opcode = work_half->dalvikInsn.opcode; |
| 1181 | SSARepresentation* ssa_rep = work_half->ssa_rep; |
| 1182 | work_half->ssa_rep = mir->ssa_rep; |
| 1183 | mir->ssa_rep = ssa_rep; |
| 1184 | work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2); |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1185 | work_half->meta.throw_insn = mir; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1186 | } |
| 1187 | |
Jean Christophe Beyler | 2ab40eb | 2014-06-02 09:03:14 -0700 | [diff] [blame^] | 1188 | if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1189 | HandleExtendedMethodMIR(bb, mir); |
| 1190 | continue; |
| 1191 | } |
| 1192 | |
| 1193 | CompileDalvikInstruction(mir, bb, block_label_list_); |
| 1194 | } |
| 1195 | |
| 1196 | if (head_lir) { |
| 1197 | // Eliminate redundant loads/stores and delay stores into later slots. |
| 1198 | ApplyLocalOptimizations(head_lir, last_lir_insn_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1199 | } |
| 1200 | return false; |
| 1201 | } |
| 1202 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1203 | bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) { |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 1204 | cu_->NewTimingSplit("SpecialMIR2LIR"); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1205 | // Find the first DalvikByteCode block. |
| 1206 | int num_reachable_blocks = mir_graph_->GetNumReachableBlocks(); |
| 1207 | BasicBlock*bb = NULL; |
| 1208 | for (int idx = 0; idx < num_reachable_blocks; idx++) { |
| 1209 | // TODO: no direct access of growable lists. |
| 1210 | int dfs_index = mir_graph_->GetDfsOrder()->Get(idx); |
| 1211 | bb = mir_graph_->GetBasicBlock(dfs_index); |
| 1212 | if (bb->block_type == kDalvikByteCode) { |
| 1213 | break; |
| 1214 | } |
| 1215 | } |
| 1216 | if (bb == NULL) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1217 | return false; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1218 | } |
| 1219 | DCHECK_EQ(bb->start_offset, 0); |
| 1220 | DCHECK(bb->first_mir_insn != NULL); |
| 1221 | |
| 1222 | // Get the first instruction. |
| 1223 | MIR* mir = bb->first_mir_insn; |
| 1224 | |
| 1225 | // Free temp registers and reset redundant store tracking. |
| 1226 | ResetRegPool(); |
| 1227 | ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1228 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1229 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1230 | return GenSpecialCase(bb, mir, special); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1231 | } |
| 1232 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1233 | void Mir2Lir::MethodMIR2LIR() { |
buzbee | a61f495 | 2013-08-23 14:27:06 -0700 | [diff] [blame] | 1234 | cu_->NewTimingSplit("MIR2LIR"); |
| 1235 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1236 | // Hold the labels of each block. |
| 1237 | block_label_list_ = |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 1238 | static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(), |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 1239 | kArenaAllocLIR)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1240 | |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1241 | PreOrderDfsIterator iter(mir_graph_); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1242 | BasicBlock* curr_bb = iter.Next(); |
| 1243 | BasicBlock* next_bb = iter.Next(); |
| 1244 | while (curr_bb != NULL) { |
| 1245 | MethodBlockCodeGen(curr_bb); |
| 1246 | // If the fall_through block is no longer laid out consecutively, drop in a branch. |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1247 | BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through); |
| 1248 | if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) { |
| 1249 | OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1250 | } |
| 1251 | curr_bb = next_bb; |
| 1252 | do { |
| 1253 | next_bb = iter.Next(); |
| 1254 | } while ((next_bb != NULL) && (next_bb->block_type == kDead)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1255 | } |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1256 | HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1257 | } |
| 1258 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1259 | // |
| 1260 | // LIR Slow Path |
| 1261 | // |
| 1262 | |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1263 | LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) { |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1264 | m2l_->SetCurrentDexPc(current_dex_pc_); |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1265 | LIR* target = m2l_->NewLIR0(opcode); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1266 | fromfast_->target = target; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1267 | return target; |
| 1268 | } |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1269 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1270 | |
| 1271 | void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, |
| 1272 | bool fail, bool report) |
| 1273 | const { |
| 1274 | if (rs.Valid()) { |
| 1275 | if (ref == RefCheck::kCheckRef) { |
| 1276 | if (cu_->target64 && !rs.Is64Bit()) { |
| 1277 | if (fail) { |
| 1278 | CHECK(false) << "Reg storage not 64b for ref."; |
| 1279 | } else if (report) { |
| 1280 | LOG(WARNING) << "Reg storage not 64b for ref."; |
| 1281 | } |
| 1282 | } |
| 1283 | } |
| 1284 | if (wide == WidenessCheck::kCheckWide) { |
| 1285 | if (!rs.Is64Bit()) { |
| 1286 | if (fail) { |
| 1287 | CHECK(false) << "Reg storage not 64b for wide."; |
| 1288 | } else if (report) { |
| 1289 | LOG(WARNING) << "Reg storage not 64b for wide."; |
| 1290 | } |
| 1291 | } |
| 1292 | } |
| 1293 | // A tighter check would be nice, but for now soft-float will not check float at all. |
| 1294 | if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) { |
| 1295 | if (!rs.IsFloat()) { |
| 1296 | if (fail) { |
| 1297 | CHECK(false) << "Reg storage not float for fp."; |
| 1298 | } else if (report) { |
| 1299 | LOG(WARNING) << "Reg storage not float for fp."; |
| 1300 | } |
| 1301 | } |
| 1302 | } else if (fp == FPCheck::kCheckNotFP) { |
| 1303 | if (rs.IsFloat()) { |
| 1304 | if (fail) { |
| 1305 | CHECK(false) << "Reg storage float for not-fp."; |
| 1306 | } else if (report) { |
| 1307 | LOG(WARNING) << "Reg storage float for not-fp."; |
| 1308 | } |
| 1309 | } |
| 1310 | } |
| 1311 | } |
| 1312 | } |
| 1313 | |
| 1314 | void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const { |
| 1315 | // Regrettably can't use the fp part of rl, as that is not really indicative of where a value |
| 1316 | // will be stored. |
| 1317 | CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide, |
| 1318 | rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report); |
| 1319 | } |
| 1320 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1321 | } // namespace art |