Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "dex/compiler_internals.h" |
| 18 | #include "dex/dataflow_iterator-inl.h" |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 19 | #include "dex/quick/dex_file_method_inliner.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 20 | #include "mir_to_lir-inl.h" |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 21 | #include "primitive.h" |
Ian Rogers | 02ed4c0 | 2013-09-06 13:10:04 -0700 | [diff] [blame] | 22 | #include "thread-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 23 | |
| 24 | namespace art { |
| 25 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 26 | RegisterClass Mir2Lir::ShortyToRegClass(char shorty_type) { |
| 27 | RegisterClass res; |
| 28 | switch (shorty_type) { |
| 29 | case 'L': |
| 30 | res = kRefReg; |
| 31 | break; |
| 32 | case 'F': |
| 33 | // Expected fallthrough. |
| 34 | case 'D': |
| 35 | res = kFPReg; |
| 36 | break; |
| 37 | default: |
| 38 | res = kCoreReg; |
| 39 | } |
| 40 | return res; |
| 41 | } |
| 42 | |
| 43 | RegisterClass Mir2Lir::LocToRegClass(RegLocation loc) { |
| 44 | RegisterClass res; |
| 45 | if (loc.fp) { |
| 46 | DCHECK(!loc.ref) << "At most, one of ref/fp may be set"; |
| 47 | res = kFPReg; |
| 48 | } else if (loc.ref) { |
| 49 | res = kRefReg; |
| 50 | } else { |
| 51 | res = kCoreReg; |
| 52 | } |
| 53 | return res; |
| 54 | } |
| 55 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 56 | void Mir2Lir::LockArg(int in_position, bool wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 57 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 58 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 59 | RegStorage::InvalidReg(); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 60 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 61 | if (reg_arg_low.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 62 | LockTemp(reg_arg_low); |
| 63 | } |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 64 | if (reg_arg_high.Valid() && reg_arg_low.NotExactlyEquals(reg_arg_high)) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 65 | LockTemp(reg_arg_high); |
| 66 | } |
| 67 | } |
| 68 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 69 | // TODO: simplify when 32-bit targets go hard-float. |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 70 | RegStorage Mir2Lir::LoadArg(int in_position, RegisterClass reg_class, bool wide) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 71 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 72 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 73 | |
| 74 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 75 | /* |
| 76 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 77 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 78 | */ |
| 79 | offset += sizeof(uint32_t); |
| 80 | } |
| 81 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 82 | if (cu_->instruction_set == kX86_64) { |
| 83 | /* |
| 84 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 85 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 86 | */ |
| 87 | offset += sizeof(uint64_t); |
| 88 | } |
| 89 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 90 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 91 | RegStorage reg_arg = GetArgMappingToPhysicalReg(in_position); |
| 92 | if (!reg_arg.Valid()) { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 93 | RegStorage new_reg = |
| 94 | wide ? AllocTypedTempWide(false, reg_class) : AllocTypedTemp(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 95 | LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 96 | return new_reg; |
| 97 | } else { |
| 98 | // Check if we need to copy the arg to a different reg_class. |
| 99 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 100 | if (wide) { |
| 101 | RegStorage new_reg = AllocTypedTempWide(false, reg_class); |
| 102 | OpRegCopyWide(new_reg, reg_arg); |
| 103 | reg_arg = new_reg; |
| 104 | } else { |
| 105 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 106 | OpRegCopy(new_reg, reg_arg); |
| 107 | reg_arg = new_reg; |
| 108 | } |
| 109 | } |
| 110 | } |
| 111 | return reg_arg; |
| 112 | } |
| 113 | |
| 114 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 115 | RegStorage reg_arg_high = wide ? GetArgMappingToPhysicalReg(in_position + 1) : |
| 116 | RegStorage::InvalidReg(); |
| 117 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 118 | // If the VR is wide and there is no register for high part, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 119 | if (wide && !reg_arg_high.Valid()) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 120 | // If the low part is not in a reg, we allocate a pair. Otherwise, we just load to high reg. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 121 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 122 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 123 | LoadBaseDisp(TargetPtrReg(kSp), offset, new_regs, k64, kNotVolatile); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 124 | return new_regs; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 125 | } else { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 126 | // Assume that no ABI allows splitting a wide fp reg between a narrow fp reg and memory, |
| 127 | // i.e. the low part is in a core reg. Load the second part in a core reg as well for now. |
| 128 | DCHECK(!reg_arg_low.IsFloat()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 129 | reg_arg_high = AllocTemp(); |
| 130 | int offset_high = offset + sizeof(uint32_t); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 131 | Load32Disp(TargetPtrReg(kSp), offset_high, reg_arg_high); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 132 | // Continue below to check the reg_class. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 133 | } |
| 134 | } |
| 135 | |
| 136 | // If the low part is not in a register yet, we need to load it. |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 137 | if (!reg_arg_low.Valid()) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 138 | // Assume that if the low part of a wide arg is passed in memory, so is the high part, |
| 139 | // thus we don't get here for wide args as it's handled above. Big-endian ABIs could |
| 140 | // conceivably break this assumption but Android supports only little-endian architectures. |
| 141 | DCHECK(!wide); |
| 142 | reg_arg_low = AllocTypedTemp(false, reg_class); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 143 | Load32Disp(TargetPtrReg(kSp), offset, reg_arg_low); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 144 | return reg_arg_low; // The reg_class is OK, we can return. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 145 | } |
| 146 | |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 147 | RegStorage reg_arg = wide ? RegStorage::MakeRegPair(reg_arg_low, reg_arg_high) : reg_arg_low; |
| 148 | // Check if we need to copy the arg to a different reg_class. |
| 149 | if (!RegClassMatches(reg_class, reg_arg)) { |
| 150 | if (wide) { |
| 151 | RegStorage new_regs = AllocTypedTempWide(false, reg_class); |
| 152 | OpRegCopyWide(new_regs, reg_arg); |
| 153 | reg_arg = new_regs; |
| 154 | } else { |
| 155 | RegStorage new_reg = AllocTypedTemp(false, reg_class); |
| 156 | OpRegCopy(new_reg, reg_arg); |
| 157 | reg_arg = new_reg; |
| 158 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 159 | } |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 160 | return reg_arg; |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 161 | } |
| 162 | |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 163 | // TODO: simpilfy when 32-bit targets go hard float. |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 164 | void Mir2Lir::LoadArgDirect(int in_position, RegLocation rl_dest) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 165 | ScopedMemRefType mem_ref_type(this, ResourceMask::kDalvikReg); |
Nicolas Geoffray | 42fcd98 | 2014-04-22 11:03:52 +0000 | [diff] [blame] | 166 | int offset = StackVisitor::GetOutVROffset(in_position, cu_->instruction_set); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 167 | if (cu_->instruction_set == kX86) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 168 | /* |
| 169 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 170 | * Thus, we add another 4 bytes to figure out the out of caller (in of callee). |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 171 | */ |
| 172 | offset += sizeof(uint32_t); |
| 173 | } |
| 174 | |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 175 | if (cu_->instruction_set == kX86_64) { |
| 176 | /* |
| 177 | * When doing a call for x86, it moves the stack pointer in order to push return. |
| 178 | * Thus, we add another 8 bytes to figure out the out of caller (in of callee). |
| 179 | */ |
| 180 | offset += sizeof(uint64_t); |
| 181 | } |
| 182 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 183 | if (!rl_dest.wide) { |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 184 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 185 | if (reg.Valid()) { |
| 186 | OpRegCopy(rl_dest.reg, reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 187 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 188 | Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 189 | } |
| 190 | } else { |
buzbee | 33ae558 | 2014-06-12 14:56:32 -0700 | [diff] [blame] | 191 | if (cu_->target64) { |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 192 | RegStorage reg = GetArgMappingToPhysicalReg(in_position); |
| 193 | if (reg.Valid()) { |
| 194 | OpRegCopy(rl_dest.reg, reg); |
| 195 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 196 | LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 197 | } |
| 198 | return; |
| 199 | } |
| 200 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 201 | RegStorage reg_arg_low = GetArgMappingToPhysicalReg(in_position); |
| 202 | RegStorage reg_arg_high = GetArgMappingToPhysicalReg(in_position + 1); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 203 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 204 | if (reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 205 | OpRegCopyWide(rl_dest.reg, RegStorage::MakeRegPair(reg_arg_low, reg_arg_high)); |
| 206 | } else if (reg_arg_low.Valid() && !reg_arg_high.Valid()) { |
| 207 | OpRegCopy(rl_dest.reg, reg_arg_low); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 208 | int offset_high = offset + sizeof(uint32_t); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 209 | Load32Disp(TargetPtrReg(kSp), offset_high, rl_dest.reg.GetHigh()); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 210 | } else if (!reg_arg_low.Valid() && reg_arg_high.Valid()) { |
| 211 | OpRegCopy(rl_dest.reg.GetHigh(), reg_arg_high); |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 212 | Load32Disp(TargetPtrReg(kSp), offset, rl_dest.reg.GetLow()); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 213 | } else { |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 214 | LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 215 | } |
| 216 | } |
| 217 | } |
| 218 | |
| 219 | bool Mir2Lir::GenSpecialIGet(MIR* mir, const InlineMethod& special) { |
| 220 | // FastInstance() already checked by DexFileMethodInliner. |
| 221 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 222 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 223 | // The object is not "this" and has to be null-checked. |
| 224 | return false; |
| 225 | } |
| 226 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 227 | OpSize size = k32; |
| 228 | switch (data.op_variant) { |
| 229 | case InlineMethodAnalyser::IGetVariant(Instruction::IGET_OBJECT): |
| 230 | size = kReference; |
| 231 | break; |
| 232 | case InlineMethodAnalyser::IGetVariant(Instruction::IGET_WIDE): |
| 233 | size = k64; |
| 234 | break; |
| 235 | case InlineMethodAnalyser::IGetVariant(Instruction::IGET_SHORT): |
| 236 | size = kSignedHalf; |
| 237 | break; |
| 238 | case InlineMethodAnalyser::IGetVariant(Instruction::IGET_CHAR): |
| 239 | size = kUnsignedHalf; |
| 240 | break; |
| 241 | case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BYTE): |
| 242 | size = kSignedByte; |
| 243 | break; |
| 244 | case InlineMethodAnalyser::IGetVariant(Instruction::IGET_BOOLEAN): |
| 245 | size = kUnsignedByte; |
| 246 | break; |
| 247 | } |
Vladimir Marko | 455759b | 2014-05-06 20:49:36 +0100 | [diff] [blame] | 248 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 249 | // Point of no return - no aborts after this |
| 250 | GenPrintLabel(mir); |
| 251 | LockArg(data.object_arg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 252 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 253 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 254 | RegisterClass ret_reg_class = ShortyToRegClass(cu_->shorty[0]); |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 255 | RegLocation rl_dest = IsWide(size) ? GetReturnWide(ret_reg_class) : GetReturn(ret_reg_class); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 256 | RegStorage r_result = rl_dest.reg; |
| 257 | if (!RegClassMatches(reg_class, r_result)) { |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 258 | r_result = IsWide(size) ? AllocTypedTempWide(rl_dest.fp, reg_class) |
| 259 | : AllocTypedTemp(rl_dest.fp, reg_class); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 260 | } |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 261 | if (IsRef(size)) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 262 | LoadRefDisp(reg_obj, data.field_offset, r_result, data.is_volatile ? kVolatile : kNotVolatile); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 263 | } else { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 264 | LoadBaseDisp(reg_obj, data.field_offset, r_result, size, data.is_volatile ? kVolatile : |
| 265 | kNotVolatile); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 266 | } |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 267 | if (r_result.NotExactlyEquals(rl_dest.reg)) { |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 268 | if (IsWide(size)) { |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 269 | OpRegCopyWide(rl_dest.reg, r_result); |
| 270 | } else { |
| 271 | OpRegCopy(rl_dest.reg, r_result); |
| 272 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 273 | } |
| 274 | return true; |
| 275 | } |
| 276 | |
| 277 | bool Mir2Lir::GenSpecialIPut(MIR* mir, const InlineMethod& special) { |
| 278 | // FastInstance() already checked by DexFileMethodInliner. |
| 279 | const InlineIGetIPutData& data = special.d.ifield_data; |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 280 | if (data.method_is_static != 0u || data.object_arg != 0u) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 281 | // The object is not "this" and has to be null-checked. |
| 282 | return false; |
| 283 | } |
Vladimir Marko | e1fced1 | 2014-04-04 14:52:53 +0100 | [diff] [blame] | 284 | if (data.return_arg_plus1 != 0u) { |
| 285 | // The setter returns a method argument which we don't support here. |
| 286 | return false; |
| 287 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 288 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 289 | OpSize size = k32; |
| 290 | switch (data.op_variant) { |
| 291 | case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_OBJECT): |
| 292 | size = kReference; |
| 293 | break; |
| 294 | case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_WIDE): |
| 295 | size = k64; |
| 296 | break; |
| 297 | case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_SHORT): |
| 298 | size = kSignedHalf; |
| 299 | break; |
| 300 | case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_CHAR): |
| 301 | size = kUnsignedHalf; |
| 302 | break; |
| 303 | case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BYTE): |
| 304 | size = kSignedByte; |
| 305 | break; |
| 306 | case InlineMethodAnalyser::IPutVariant(Instruction::IPUT_BOOLEAN): |
| 307 | size = kUnsignedByte; |
| 308 | break; |
| 309 | } |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 310 | |
| 311 | // Point of no return - no aborts after this |
| 312 | GenPrintLabel(mir); |
| 313 | LockArg(data.object_arg); |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 314 | LockArg(data.src_arg, IsWide(size)); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 315 | RegStorage reg_obj = LoadArg(data.object_arg, kRefReg); |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 316 | RegisterClass reg_class = RegClassForFieldLoadStore(size, data.is_volatile); |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 317 | RegStorage reg_src = LoadArg(data.src_arg, reg_class, IsWide(size)); |
| 318 | if (IsRef(size)) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 319 | StoreRefDisp(reg_obj, data.field_offset, reg_src, data.is_volatile ? kVolatile : kNotVolatile); |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 320 | } else { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 321 | StoreBaseDisp(reg_obj, data.field_offset, reg_src, size, data.is_volatile ? kVolatile : |
| 322 | kNotVolatile); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 323 | } |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 324 | if (IsRef(size)) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 325 | MarkGCCard(reg_src, reg_obj); |
| 326 | } |
| 327 | return true; |
| 328 | } |
| 329 | |
| 330 | bool Mir2Lir::GenSpecialIdentity(MIR* mir, const InlineMethod& special) { |
| 331 | const InlineReturnArgData& data = special.d.return_data; |
Vladimir Marko | e3e0260 | 2014-03-12 15:42:41 +0000 | [diff] [blame] | 332 | bool wide = (data.is_wide != 0u); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 333 | |
| 334 | // Point of no return - no aborts after this |
| 335 | GenPrintLabel(mir); |
| 336 | LockArg(data.arg, wide); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 337 | RegisterClass reg_class = ShortyToRegClass(cu_->shorty[0]); |
| 338 | RegLocation rl_dest = wide ? GetReturnWide(reg_class) : GetReturn(reg_class); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 339 | LoadArgDirect(data.arg, rl_dest); |
| 340 | return true; |
| 341 | } |
| 342 | |
| 343 | /* |
| 344 | * Special-case code generation for simple non-throwing leaf methods. |
| 345 | */ |
| 346 | bool Mir2Lir::GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special) { |
| 347 | DCHECK(special.flags & kInlineSpecial); |
| 348 | current_dalvik_offset_ = mir->offset; |
| 349 | MIR* return_mir = nullptr; |
| 350 | bool successful = false; |
| 351 | |
| 352 | switch (special.opcode) { |
| 353 | case kInlineOpNop: |
| 354 | successful = true; |
| 355 | DCHECK_EQ(mir->dalvikInsn.opcode, Instruction::RETURN_VOID); |
| 356 | return_mir = mir; |
| 357 | break; |
| 358 | case kInlineOpNonWideConst: { |
| 359 | successful = true; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 360 | RegLocation rl_dest = GetReturn(ShortyToRegClass(cu_->shorty[0])); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 361 | GenPrintLabel(mir); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 362 | LoadConstant(rl_dest.reg, static_cast<int>(special.d.data)); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 363 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 364 | break; |
| 365 | } |
| 366 | case kInlineOpReturnArg: |
| 367 | successful = GenSpecialIdentity(mir, special); |
| 368 | return_mir = mir; |
| 369 | break; |
| 370 | case kInlineOpIGet: |
| 371 | successful = GenSpecialIGet(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 372 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 373 | break; |
| 374 | case kInlineOpIPut: |
| 375 | successful = GenSpecialIPut(mir, special); |
Jean Christophe Beyler | cdacac4 | 2014-03-13 14:54:59 -0700 | [diff] [blame] | 376 | return_mir = bb->GetNextUnconditionalMir(mir_graph_, mir); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 377 | break; |
| 378 | default: |
| 379 | break; |
| 380 | } |
| 381 | |
| 382 | if (successful) { |
Vladimir Marko | 39d95e6 | 2014-02-28 12:51:24 +0000 | [diff] [blame] | 383 | if (kIsDebugBuild) { |
| 384 | // Clear unreachable catch entries. |
| 385 | mir_graph_->catches_.clear(); |
| 386 | } |
| 387 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 388 | // Handle verbosity for return MIR. |
| 389 | if (return_mir != nullptr) { |
| 390 | current_dalvik_offset_ = return_mir->offset; |
| 391 | // Not handling special identity case because it already generated code as part |
| 392 | // of the return. The label should have been added before any code was generated. |
| 393 | if (special.opcode != kInlineOpReturnArg) { |
| 394 | GenPrintLabel(return_mir); |
| 395 | } |
| 396 | } |
| 397 | GenSpecialExitSequence(); |
| 398 | |
| 399 | core_spill_mask_ = 0; |
| 400 | num_core_spills_ = 0; |
| 401 | fp_spill_mask_ = 0; |
| 402 | num_fp_spills_ = 0; |
| 403 | frame_size_ = 0; |
| 404 | core_vmap_table_.clear(); |
| 405 | fp_vmap_table_.clear(); |
| 406 | } |
| 407 | |
| 408 | return successful; |
| 409 | } |
| 410 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 411 | /* |
| 412 | * Target-independent code generation. Use only high-level |
| 413 | * load/store utilities here, or target-dependent genXX() handlers |
| 414 | * when necessary. |
| 415 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 416 | void Mir2Lir::CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 417 | RegLocation rl_src[3]; |
| 418 | RegLocation rl_dest = mir_graph_->GetBadLoc(); |
| 419 | RegLocation rl_result = mir_graph_->GetBadLoc(); |
| 420 | Instruction::Code opcode = mir->dalvikInsn.opcode; |
| 421 | int opt_flags = mir->optimization_flags; |
| 422 | uint32_t vB = mir->dalvikInsn.vB; |
| 423 | uint32_t vC = mir->dalvikInsn.vC; |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 424 | DCHECK(CheckCorePoolSanity()) << PrettyMethod(cu_->method_idx, *cu_->dex_file) << " @ 0x:" |
| 425 | << std::hex << current_dalvik_offset_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 426 | |
| 427 | // Prep Src and Dest locations. |
| 428 | int next_sreg = 0; |
| 429 | int next_loc = 0; |
Jean Christophe Beyler | cc794c3 | 2014-05-02 09:34:13 -0700 | [diff] [blame] | 430 | uint64_t attrs = MIRGraph::GetDataFlowAttributes(opcode); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 431 | rl_src[0] = rl_src[1] = rl_src[2] = mir_graph_->GetBadLoc(); |
| 432 | if (attrs & DF_UA) { |
| 433 | if (attrs & DF_A_WIDE) { |
| 434 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 435 | next_sreg+= 2; |
| 436 | } else { |
| 437 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 438 | next_sreg++; |
| 439 | } |
| 440 | } |
| 441 | if (attrs & DF_UB) { |
| 442 | if (attrs & DF_B_WIDE) { |
| 443 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 444 | next_sreg+= 2; |
| 445 | } else { |
| 446 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 447 | next_sreg++; |
| 448 | } |
| 449 | } |
| 450 | if (attrs & DF_UC) { |
| 451 | if (attrs & DF_C_WIDE) { |
| 452 | rl_src[next_loc++] = mir_graph_->GetSrcWide(mir, next_sreg); |
| 453 | } else { |
| 454 | rl_src[next_loc++] = mir_graph_->GetSrc(mir, next_sreg); |
| 455 | } |
| 456 | } |
| 457 | if (attrs & DF_DA) { |
| 458 | if (attrs & DF_A_WIDE) { |
| 459 | rl_dest = mir_graph_->GetDestWide(mir); |
| 460 | } else { |
| 461 | rl_dest = mir_graph_->GetDest(mir); |
| 462 | } |
| 463 | } |
| 464 | switch (opcode) { |
| 465 | case Instruction::NOP: |
| 466 | break; |
| 467 | |
| 468 | case Instruction::MOVE_EXCEPTION: |
| 469 | GenMoveException(rl_dest); |
| 470 | break; |
| 471 | |
| 472 | case Instruction::RETURN_VOID: |
| 473 | if (((cu_->access_flags & kAccConstructor) != 0) && |
| 474 | cu_->compiler_driver->RequiresConstructorBarrier(Thread::Current(), cu_->dex_file, |
| 475 | cu_->class_def_idx)) { |
| 476 | GenMemBarrier(kStoreStore); |
| 477 | } |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 478 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 479 | GenSuspendTest(opt_flags); |
| 480 | } |
| 481 | break; |
| 482 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 483 | case Instruction::RETURN_OBJECT: |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 484 | DCHECK(rl_src[0].ref); |
| 485 | // Intentional fallthrough. |
| 486 | case Instruction::RETURN: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 487 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 488 | GenSuspendTest(opt_flags); |
| 489 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 490 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 491 | StoreValue(GetReturn(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 492 | break; |
| 493 | |
| 494 | case Instruction::RETURN_WIDE: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 495 | if (!kLeafOptimization || !mir_graph_->MethodIsLeaf()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 496 | GenSuspendTest(opt_flags); |
| 497 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 498 | DCHECK_EQ(LocToRegClass(rl_src[0]), ShortyToRegClass(cu_->shorty[0])); |
| 499 | StoreValueWide(GetReturnWide(LocToRegClass(rl_src[0])), rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 500 | break; |
| 501 | |
| 502 | case Instruction::MOVE_RESULT_WIDE: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 503 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 504 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 505 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 506 | StoreValueWide(rl_dest, GetReturnWide(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 507 | break; |
| 508 | |
| 509 | case Instruction::MOVE_RESULT: |
| 510 | case Instruction::MOVE_RESULT_OBJECT: |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 511 | if ((opt_flags & MIR_INLINED) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 512 | break; // Nop - combined w/ previous invoke. |
Vladimir Marko | 9820b7c | 2014-01-02 16:40:37 +0000 | [diff] [blame] | 513 | } |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 514 | StoreValue(rl_dest, GetReturn(LocToRegClass(rl_dest))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 515 | break; |
| 516 | |
| 517 | case Instruction::MOVE: |
| 518 | case Instruction::MOVE_OBJECT: |
| 519 | case Instruction::MOVE_16: |
| 520 | case Instruction::MOVE_OBJECT_16: |
| 521 | case Instruction::MOVE_FROM16: |
| 522 | case Instruction::MOVE_OBJECT_FROM16: |
| 523 | StoreValue(rl_dest, rl_src[0]); |
| 524 | break; |
| 525 | |
| 526 | case Instruction::MOVE_WIDE: |
| 527 | case Instruction::MOVE_WIDE_16: |
| 528 | case Instruction::MOVE_WIDE_FROM16: |
| 529 | StoreValueWide(rl_dest, rl_src[0]); |
| 530 | break; |
| 531 | |
| 532 | case Instruction::CONST: |
| 533 | case Instruction::CONST_4: |
| 534 | case Instruction::CONST_16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 535 | GenConst(rl_dest, vB); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 536 | break; |
| 537 | |
| 538 | case Instruction::CONST_HIGH16: |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 539 | GenConst(rl_dest, vB << 16); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 540 | break; |
| 541 | |
| 542 | case Instruction::CONST_WIDE_16: |
| 543 | case Instruction::CONST_WIDE_32: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 544 | GenConstWide(rl_dest, static_cast<int64_t>(static_cast<int32_t>(vB))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 545 | break; |
| 546 | |
| 547 | case Instruction::CONST_WIDE: |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 548 | GenConstWide(rl_dest, mir->dalvikInsn.vB_wide); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 549 | break; |
| 550 | |
| 551 | case Instruction::CONST_WIDE_HIGH16: |
| 552 | rl_result = EvalLoc(rl_dest, kAnyReg, true); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 553 | LoadConstantWide(rl_result.reg, static_cast<int64_t>(vB) << 48); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 554 | StoreValueWide(rl_dest, rl_result); |
| 555 | break; |
| 556 | |
| 557 | case Instruction::MONITOR_ENTER: |
| 558 | GenMonitorEnter(opt_flags, rl_src[0]); |
| 559 | break; |
| 560 | |
| 561 | case Instruction::MONITOR_EXIT: |
| 562 | GenMonitorExit(opt_flags, rl_src[0]); |
| 563 | break; |
| 564 | |
| 565 | case Instruction::CHECK_CAST: { |
| 566 | GenCheckCast(mir->offset, vB, rl_src[0]); |
| 567 | break; |
| 568 | } |
| 569 | case Instruction::INSTANCE_OF: |
| 570 | GenInstanceof(vC, rl_dest, rl_src[0]); |
| 571 | break; |
| 572 | |
| 573 | case Instruction::NEW_INSTANCE: |
| 574 | GenNewInstance(vB, rl_dest); |
| 575 | break; |
| 576 | |
| 577 | case Instruction::THROW: |
| 578 | GenThrow(rl_src[0]); |
| 579 | break; |
| 580 | |
| 581 | case Instruction::ARRAY_LENGTH: |
| 582 | int len_offset; |
| 583 | len_offset = mirror::Array::LengthOffset().Int32Value(); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 584 | rl_src[0] = LoadValue(rl_src[0], kRefReg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 585 | GenNullCheck(rl_src[0].reg, opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 586 | rl_result = EvalLoc(rl_dest, kCoreReg, true); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 587 | Load32Disp(rl_src[0].reg, len_offset, rl_result.reg); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 588 | MarkPossibleNullPointerException(opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 589 | StoreValue(rl_dest, rl_result); |
| 590 | break; |
| 591 | |
| 592 | case Instruction::CONST_STRING: |
| 593 | case Instruction::CONST_STRING_JUMBO: |
| 594 | GenConstString(vB, rl_dest); |
| 595 | break; |
| 596 | |
| 597 | case Instruction::CONST_CLASS: |
| 598 | GenConstClass(vB, rl_dest); |
| 599 | break; |
| 600 | |
| 601 | case Instruction::FILL_ARRAY_DATA: |
| 602 | GenFillArrayData(vB, rl_src[0]); |
| 603 | break; |
| 604 | |
| 605 | case Instruction::FILLED_NEW_ARRAY: |
| 606 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 607 | false /* not range */)); |
| 608 | break; |
| 609 | |
| 610 | case Instruction::FILLED_NEW_ARRAY_RANGE: |
| 611 | GenFilledNewArray(mir_graph_->NewMemCallInfo(bb, mir, kStatic, |
| 612 | true /* range */)); |
| 613 | break; |
| 614 | |
| 615 | case Instruction::NEW_ARRAY: |
| 616 | GenNewArray(vC, rl_dest, rl_src[0]); |
| 617 | break; |
| 618 | |
| 619 | case Instruction::GOTO: |
| 620 | case Instruction::GOTO_16: |
| 621 | case Instruction::GOTO_32: |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 622 | if (mir_graph_->IsBackedge(bb, bb->taken) && |
| 623 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken))) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 624 | GenSuspendTestAndBranch(opt_flags, &label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 625 | } else { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 626 | OpUnconditionalBranch(&label_list[bb->taken]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 627 | } |
| 628 | break; |
| 629 | |
| 630 | case Instruction::PACKED_SWITCH: |
| 631 | GenPackedSwitch(mir, vB, rl_src[0]); |
| 632 | break; |
| 633 | |
| 634 | case Instruction::SPARSE_SWITCH: |
| 635 | GenSparseSwitch(mir, vB, rl_src[0]); |
| 636 | break; |
| 637 | |
| 638 | case Instruction::CMPL_FLOAT: |
| 639 | case Instruction::CMPG_FLOAT: |
| 640 | case Instruction::CMPL_DOUBLE: |
| 641 | case Instruction::CMPG_DOUBLE: |
| 642 | GenCmpFP(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 643 | break; |
| 644 | |
| 645 | case Instruction::CMP_LONG: |
| 646 | GenCmpLong(rl_dest, rl_src[0], rl_src[1]); |
| 647 | break; |
| 648 | |
| 649 | case Instruction::IF_EQ: |
| 650 | case Instruction::IF_NE: |
| 651 | case Instruction::IF_LT: |
| 652 | case Instruction::IF_GE: |
| 653 | case Instruction::IF_GT: |
| 654 | case Instruction::IF_LE: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 655 | LIR* taken = &label_list[bb->taken]; |
| 656 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 657 | // Result known at compile time? |
| 658 | if (rl_src[0].is_const && rl_src[1].is_const) { |
| 659 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), |
| 660 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 661 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 662 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 663 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 664 | GenSuspendTest(opt_flags); |
| 665 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 666 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 667 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 668 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 669 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 670 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 671 | GenSuspendTest(opt_flags); |
| 672 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 673 | GenCompareAndBranch(opcode, rl_src[0], rl_src[1], taken, fall_through); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 674 | } |
| 675 | break; |
| 676 | } |
| 677 | |
| 678 | case Instruction::IF_EQZ: |
| 679 | case Instruction::IF_NEZ: |
| 680 | case Instruction::IF_LTZ: |
| 681 | case Instruction::IF_GEZ: |
| 682 | case Instruction::IF_GTZ: |
| 683 | case Instruction::IF_LEZ: { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 684 | LIR* taken = &label_list[bb->taken]; |
| 685 | LIR* fall_through = &label_list[bb->fall_through]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 686 | // Result known at compile time? |
| 687 | if (rl_src[0].is_const) { |
| 688 | bool is_taken = EvaluateBranch(opcode, mir_graph_->ConstantValue(rl_src[0].orig_sreg), 0); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 689 | BasicBlockId target_id = is_taken ? bb->taken : bb->fall_through; |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 690 | if (mir_graph_->IsBackedge(bb, target_id) && |
| 691 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, target_id))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 692 | GenSuspendTest(opt_flags); |
| 693 | } |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 694 | OpUnconditionalBranch(&label_list[target_id]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 695 | } else { |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 696 | if (mir_graph_->IsBackwardsBranch(bb) && |
| 697 | (kLeafOptimization || !mir_graph_->HasSuspendTestBetween(bb, bb->taken) || |
| 698 | !mir_graph_->HasSuspendTestBetween(bb, bb->fall_through))) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 699 | GenSuspendTest(opt_flags); |
| 700 | } |
| 701 | GenCompareZeroAndBranch(opcode, rl_src[0], taken, fall_through); |
| 702 | } |
| 703 | break; |
| 704 | } |
| 705 | |
| 706 | case Instruction::AGET_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 707 | GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3); |
| 708 | break; |
| 709 | case Instruction::AGET_OBJECT: |
| 710 | GenArrayGet(opt_flags, kReference, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 711 | break; |
| 712 | case Instruction::AGET: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 713 | GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 714 | break; |
| 715 | case Instruction::AGET_BOOLEAN: |
| 716 | GenArrayGet(opt_flags, kUnsignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 717 | break; |
| 718 | case Instruction::AGET_BYTE: |
| 719 | GenArrayGet(opt_flags, kSignedByte, rl_src[0], rl_src[1], rl_dest, 0); |
| 720 | break; |
| 721 | case Instruction::AGET_CHAR: |
| 722 | GenArrayGet(opt_flags, kUnsignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 723 | break; |
| 724 | case Instruction::AGET_SHORT: |
| 725 | GenArrayGet(opt_flags, kSignedHalf, rl_src[0], rl_src[1], rl_dest, 1); |
| 726 | break; |
| 727 | case Instruction::APUT_WIDE: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 728 | GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 729 | break; |
| 730 | case Instruction::APUT: |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 731 | GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 732 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 733 | case Instruction::APUT_OBJECT: { |
| 734 | bool is_null = mir_graph_->IsConstantNullRef(rl_src[0]); |
| 735 | bool is_safe = is_null; // Always safe to store null. |
| 736 | if (!is_safe) { |
| 737 | // Check safety from verifier type information. |
Vladimir Marko | 2730db0 | 2014-01-27 11:15:17 +0000 | [diff] [blame] | 738 | const DexCompilationUnit* unit = mir_graph_->GetCurrentDexCompilationUnit(); |
| 739 | is_safe = cu_->compiler_driver->IsSafeCast(unit, mir->offset); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 740 | } |
| 741 | if (is_null || is_safe) { |
| 742 | // Store of constant null doesn't require an assignability test and can be generated inline |
| 743 | // without fixed register usage or a card mark. |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 744 | GenArrayPut(opt_flags, kReference, rl_src[1], rl_src[2], rl_src[0], 2, !is_null); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 745 | } else { |
| 746 | GenArrayObjPut(opt_flags, rl_src[1], rl_src[2], rl_src[0]); |
| 747 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 748 | break; |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 749 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 750 | case Instruction::APUT_SHORT: |
| 751 | case Instruction::APUT_CHAR: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 752 | GenArrayPut(opt_flags, kUnsignedHalf, rl_src[1], rl_src[2], rl_src[0], 1, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 753 | break; |
| 754 | case Instruction::APUT_BYTE: |
| 755 | case Instruction::APUT_BOOLEAN: |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 756 | GenArrayPut(opt_flags, kUnsignedByte, rl_src[1], rl_src[2], rl_src[0], 0, false); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 757 | break; |
| 758 | |
| 759 | case Instruction::IGET_OBJECT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 760 | GenIGet(mir, opt_flags, kReference, Primitive::kPrimNot, rl_dest, rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 761 | break; |
| 762 | |
| 763 | case Instruction::IGET_WIDE: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 764 | // kPrimLong and kPrimDouble share the same entrypoints. |
| 765 | GenIGet(mir, opt_flags, k64, Primitive::kPrimLong, rl_dest, rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 766 | break; |
| 767 | |
| 768 | case Instruction::IGET: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 769 | GenIGet(mir, opt_flags, k32, Primitive::kPrimInt, rl_dest, rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 770 | break; |
| 771 | |
| 772 | case Instruction::IGET_CHAR: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 773 | GenIGet(mir, opt_flags, kUnsignedHalf, Primitive::kPrimChar, rl_dest, rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 774 | break; |
| 775 | |
| 776 | case Instruction::IGET_SHORT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 777 | GenIGet(mir, opt_flags, kSignedHalf, Primitive::kPrimShort, rl_dest, rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 778 | break; |
| 779 | |
| 780 | case Instruction::IGET_BOOLEAN: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 781 | GenIGet(mir, opt_flags, kUnsignedByte, Primitive::kPrimBoolean, rl_dest, rl_src[0]); |
| 782 | break; |
| 783 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 784 | case Instruction::IGET_BYTE: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 785 | GenIGet(mir, opt_flags, kSignedByte, Primitive::kPrimByte, rl_dest, rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 786 | break; |
| 787 | |
| 788 | case Instruction::IPUT_WIDE: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 789 | GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 790 | break; |
| 791 | |
| 792 | case Instruction::IPUT_OBJECT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 793 | GenIPut(mir, opt_flags, kReference, rl_src[0], rl_src[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 794 | break; |
| 795 | |
| 796 | case Instruction::IPUT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 797 | GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 798 | break; |
| 799 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 800 | case Instruction::IPUT_BYTE: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 801 | case Instruction::IPUT_BOOLEAN: |
| 802 | GenIPut(mir, opt_flags, kUnsignedByte, rl_src[0], rl_src[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 803 | break; |
| 804 | |
| 805 | case Instruction::IPUT_CHAR: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 806 | GenIPut(mir, opt_flags, kUnsignedHalf, rl_src[0], rl_src[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 807 | break; |
| 808 | |
| 809 | case Instruction::IPUT_SHORT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 810 | GenIPut(mir, opt_flags, kSignedHalf, rl_src[0], rl_src[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 811 | break; |
| 812 | |
| 813 | case Instruction::SGET_OBJECT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 814 | GenSget(mir, rl_dest, kReference, Primitive::kPrimNot); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 815 | break; |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 816 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 817 | case Instruction::SGET: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 818 | GenSget(mir, rl_dest, k32, Primitive::kPrimInt); |
| 819 | break; |
| 820 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 821 | case Instruction::SGET_CHAR: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 822 | GenSget(mir, rl_dest, kUnsignedHalf, Primitive::kPrimChar); |
| 823 | break; |
| 824 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 825 | case Instruction::SGET_SHORT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 826 | GenSget(mir, rl_dest, kSignedHalf, Primitive::kPrimShort); |
| 827 | break; |
| 828 | |
| 829 | case Instruction::SGET_BOOLEAN: |
| 830 | GenSget(mir, rl_dest, kUnsignedByte, Primitive::kPrimBoolean); |
| 831 | break; |
| 832 | |
| 833 | case Instruction::SGET_BYTE: |
| 834 | GenSget(mir, rl_dest, kSignedByte, Primitive::kPrimByte); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 835 | break; |
| 836 | |
| 837 | case Instruction::SGET_WIDE: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 838 | // kPrimLong and kPrimDouble share the same entrypoints. |
| 839 | GenSget(mir, rl_dest, k64, Primitive::kPrimLong); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 840 | break; |
| 841 | |
| 842 | case Instruction::SPUT_OBJECT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 843 | GenSput(mir, rl_src[0], kReference); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 844 | break; |
| 845 | |
| 846 | case Instruction::SPUT: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 847 | GenSput(mir, rl_src[0], k32); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 848 | break; |
| 849 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 850 | case Instruction::SPUT_BYTE: |
| 851 | case Instruction::SPUT_BOOLEAN: |
| 852 | GenSput(mir, rl_src[0], kUnsignedByte); |
| 853 | break; |
| 854 | |
| 855 | case Instruction::SPUT_CHAR: |
| 856 | GenSput(mir, rl_src[0], kUnsignedHalf); |
| 857 | break; |
| 858 | |
| 859 | case Instruction::SPUT_SHORT: |
| 860 | GenSput(mir, rl_src[0], kSignedHalf); |
| 861 | break; |
| 862 | |
| 863 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 864 | case Instruction::SPUT_WIDE: |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame^] | 865 | GenSput(mir, rl_src[0], k64); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 866 | break; |
| 867 | |
| 868 | case Instruction::INVOKE_STATIC_RANGE: |
| 869 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 870 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 871 | // If the invocation is not inlined, we can assume there is already a |
| 872 | // suspend check at the return site |
| 873 | mir_graph_->AppendGenSuspendTestList(bb); |
| 874 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 875 | break; |
| 876 | case Instruction::INVOKE_STATIC: |
| 877 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kStatic, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 878 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 879 | mir_graph_->AppendGenSuspendTestList(bb); |
| 880 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 881 | break; |
| 882 | |
| 883 | case Instruction::INVOKE_DIRECT: |
| 884 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 885 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 886 | mir_graph_->AppendGenSuspendTestList(bb); |
| 887 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 888 | break; |
| 889 | case Instruction::INVOKE_DIRECT_RANGE: |
| 890 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kDirect, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 891 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 892 | mir_graph_->AppendGenSuspendTestList(bb); |
| 893 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 894 | break; |
| 895 | |
| 896 | case Instruction::INVOKE_VIRTUAL: |
| 897 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 898 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 899 | mir_graph_->AppendGenSuspendTestList(bb); |
| 900 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 901 | break; |
| 902 | case Instruction::INVOKE_VIRTUAL_RANGE: |
| 903 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kVirtual, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 904 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 905 | mir_graph_->AppendGenSuspendTestList(bb); |
| 906 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 907 | break; |
| 908 | |
| 909 | case Instruction::INVOKE_SUPER: |
| 910 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 911 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 912 | mir_graph_->AppendGenSuspendTestList(bb); |
| 913 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 914 | break; |
| 915 | case Instruction::INVOKE_SUPER_RANGE: |
| 916 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kSuper, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 917 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 918 | mir_graph_->AppendGenSuspendTestList(bb); |
| 919 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 920 | break; |
| 921 | |
| 922 | case Instruction::INVOKE_INTERFACE: |
| 923 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, false)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 924 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 925 | mir_graph_->AppendGenSuspendTestList(bb); |
| 926 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 927 | break; |
| 928 | case Instruction::INVOKE_INTERFACE_RANGE: |
| 929 | GenInvoke(mir_graph_->NewMemCallInfo(bb, mir, kInterface, true)); |
Wei Jin | 04f4d8a | 2014-05-29 18:04:29 -0700 | [diff] [blame] | 930 | if (!kLeafOptimization && (opt_flags & MIR_INLINED) == 0) { |
| 931 | mir_graph_->AppendGenSuspendTestList(bb); |
| 932 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 933 | break; |
| 934 | |
| 935 | case Instruction::NEG_INT: |
| 936 | case Instruction::NOT_INT: |
| 937 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 938 | break; |
| 939 | |
| 940 | case Instruction::NEG_LONG: |
| 941 | case Instruction::NOT_LONG: |
| 942 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 943 | break; |
| 944 | |
| 945 | case Instruction::NEG_FLOAT: |
| 946 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 947 | break; |
| 948 | |
| 949 | case Instruction::NEG_DOUBLE: |
| 950 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[0]); |
| 951 | break; |
| 952 | |
| 953 | case Instruction::INT_TO_LONG: |
| 954 | GenIntToLong(rl_dest, rl_src[0]); |
| 955 | break; |
| 956 | |
| 957 | case Instruction::LONG_TO_INT: |
| 958 | rl_src[0] = UpdateLocWide(rl_src[0]); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 959 | rl_src[0] = NarrowRegLoc(rl_src[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 960 | StoreValue(rl_dest, rl_src[0]); |
| 961 | break; |
| 962 | |
| 963 | case Instruction::INT_TO_BYTE: |
| 964 | case Instruction::INT_TO_SHORT: |
| 965 | case Instruction::INT_TO_CHAR: |
| 966 | GenIntNarrowing(opcode, rl_dest, rl_src[0]); |
| 967 | break; |
| 968 | |
| 969 | case Instruction::INT_TO_FLOAT: |
| 970 | case Instruction::INT_TO_DOUBLE: |
| 971 | case Instruction::LONG_TO_FLOAT: |
| 972 | case Instruction::LONG_TO_DOUBLE: |
| 973 | case Instruction::FLOAT_TO_INT: |
| 974 | case Instruction::FLOAT_TO_LONG: |
| 975 | case Instruction::FLOAT_TO_DOUBLE: |
| 976 | case Instruction::DOUBLE_TO_INT: |
| 977 | case Instruction::DOUBLE_TO_LONG: |
| 978 | case Instruction::DOUBLE_TO_FLOAT: |
| 979 | GenConversion(opcode, rl_dest, rl_src[0]); |
| 980 | break; |
| 981 | |
| 982 | |
| 983 | case Instruction::ADD_INT: |
| 984 | case Instruction::ADD_INT_2ADDR: |
| 985 | case Instruction::MUL_INT: |
| 986 | case Instruction::MUL_INT_2ADDR: |
| 987 | case Instruction::AND_INT: |
| 988 | case Instruction::AND_INT_2ADDR: |
| 989 | case Instruction::OR_INT: |
| 990 | case Instruction::OR_INT_2ADDR: |
| 991 | case Instruction::XOR_INT: |
| 992 | case Instruction::XOR_INT_2ADDR: |
| 993 | if (rl_src[0].is_const && |
Matteo Franchin | c763e35 | 2014-07-04 12:53:27 +0100 | [diff] [blame] | 994 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[0]), opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 995 | GenArithOpIntLit(opcode, rl_dest, rl_src[1], |
| 996 | mir_graph_->ConstantValue(rl_src[0].orig_sreg)); |
| 997 | } else if (rl_src[1].is_const && |
Matteo Franchin | c763e35 | 2014-07-04 12:53:27 +0100 | [diff] [blame] | 998 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 999 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], |
| 1000 | mir_graph_->ConstantValue(rl_src[1].orig_sreg)); |
| 1001 | } else { |
| 1002 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1003 | } |
| 1004 | break; |
| 1005 | |
| 1006 | case Instruction::SUB_INT: |
| 1007 | case Instruction::SUB_INT_2ADDR: |
| 1008 | case Instruction::DIV_INT: |
| 1009 | case Instruction::DIV_INT_2ADDR: |
| 1010 | case Instruction::REM_INT: |
| 1011 | case Instruction::REM_INT_2ADDR: |
| 1012 | case Instruction::SHL_INT: |
| 1013 | case Instruction::SHL_INT_2ADDR: |
| 1014 | case Instruction::SHR_INT: |
| 1015 | case Instruction::SHR_INT_2ADDR: |
| 1016 | case Instruction::USHR_INT: |
| 1017 | case Instruction::USHR_INT_2ADDR: |
| 1018 | if (rl_src[1].is_const && |
Matteo Franchin | c763e35 | 2014-07-04 12:53:27 +0100 | [diff] [blame] | 1019 | InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src[1]), opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1020 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], mir_graph_->ConstantValue(rl_src[1])); |
| 1021 | } else { |
| 1022 | GenArithOpInt(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1023 | } |
| 1024 | break; |
| 1025 | |
| 1026 | case Instruction::ADD_LONG: |
| 1027 | case Instruction::SUB_LONG: |
| 1028 | case Instruction::AND_LONG: |
| 1029 | case Instruction::OR_LONG: |
| 1030 | case Instruction::XOR_LONG: |
| 1031 | case Instruction::ADD_LONG_2ADDR: |
| 1032 | case Instruction::SUB_LONG_2ADDR: |
| 1033 | case Instruction::AND_LONG_2ADDR: |
| 1034 | case Instruction::OR_LONG_2ADDR: |
| 1035 | case Instruction::XOR_LONG_2ADDR: |
| 1036 | if (rl_src[0].is_const || rl_src[1].is_const) { |
| 1037 | GenArithImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1038 | break; |
| 1039 | } |
| 1040 | // Note: intentional fallthrough. |
| 1041 | |
| 1042 | case Instruction::MUL_LONG: |
| 1043 | case Instruction::DIV_LONG: |
| 1044 | case Instruction::REM_LONG: |
| 1045 | case Instruction::MUL_LONG_2ADDR: |
| 1046 | case Instruction::DIV_LONG_2ADDR: |
| 1047 | case Instruction::REM_LONG_2ADDR: |
| 1048 | GenArithOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1049 | break; |
| 1050 | |
| 1051 | case Instruction::SHL_LONG: |
| 1052 | case Instruction::SHR_LONG: |
| 1053 | case Instruction::USHR_LONG: |
| 1054 | case Instruction::SHL_LONG_2ADDR: |
| 1055 | case Instruction::SHR_LONG_2ADDR: |
| 1056 | case Instruction::USHR_LONG_2ADDR: |
| 1057 | if (rl_src[1].is_const) { |
| 1058 | GenShiftImmOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1059 | } else { |
| 1060 | GenShiftOpLong(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1061 | } |
| 1062 | break; |
| 1063 | |
| 1064 | case Instruction::ADD_FLOAT: |
| 1065 | case Instruction::SUB_FLOAT: |
| 1066 | case Instruction::MUL_FLOAT: |
| 1067 | case Instruction::DIV_FLOAT: |
| 1068 | case Instruction::REM_FLOAT: |
| 1069 | case Instruction::ADD_FLOAT_2ADDR: |
| 1070 | case Instruction::SUB_FLOAT_2ADDR: |
| 1071 | case Instruction::MUL_FLOAT_2ADDR: |
| 1072 | case Instruction::DIV_FLOAT_2ADDR: |
| 1073 | case Instruction::REM_FLOAT_2ADDR: |
| 1074 | GenArithOpFloat(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1075 | break; |
| 1076 | |
| 1077 | case Instruction::ADD_DOUBLE: |
| 1078 | case Instruction::SUB_DOUBLE: |
| 1079 | case Instruction::MUL_DOUBLE: |
| 1080 | case Instruction::DIV_DOUBLE: |
| 1081 | case Instruction::REM_DOUBLE: |
| 1082 | case Instruction::ADD_DOUBLE_2ADDR: |
| 1083 | case Instruction::SUB_DOUBLE_2ADDR: |
| 1084 | case Instruction::MUL_DOUBLE_2ADDR: |
| 1085 | case Instruction::DIV_DOUBLE_2ADDR: |
| 1086 | case Instruction::REM_DOUBLE_2ADDR: |
| 1087 | GenArithOpDouble(opcode, rl_dest, rl_src[0], rl_src[1]); |
| 1088 | break; |
| 1089 | |
| 1090 | case Instruction::RSUB_INT: |
| 1091 | case Instruction::ADD_INT_LIT16: |
| 1092 | case Instruction::MUL_INT_LIT16: |
| 1093 | case Instruction::DIV_INT_LIT16: |
| 1094 | case Instruction::REM_INT_LIT16: |
| 1095 | case Instruction::AND_INT_LIT16: |
| 1096 | case Instruction::OR_INT_LIT16: |
| 1097 | case Instruction::XOR_INT_LIT16: |
| 1098 | case Instruction::ADD_INT_LIT8: |
| 1099 | case Instruction::RSUB_INT_LIT8: |
| 1100 | case Instruction::MUL_INT_LIT8: |
| 1101 | case Instruction::DIV_INT_LIT8: |
| 1102 | case Instruction::REM_INT_LIT8: |
| 1103 | case Instruction::AND_INT_LIT8: |
| 1104 | case Instruction::OR_INT_LIT8: |
| 1105 | case Instruction::XOR_INT_LIT8: |
| 1106 | case Instruction::SHL_INT_LIT8: |
| 1107 | case Instruction::SHR_INT_LIT8: |
| 1108 | case Instruction::USHR_INT_LIT8: |
| 1109 | GenArithOpIntLit(opcode, rl_dest, rl_src[0], vC); |
| 1110 | break; |
| 1111 | |
| 1112 | default: |
| 1113 | LOG(FATAL) << "Unexpected opcode: " << opcode; |
| 1114 | } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 1115 | DCHECK(CheckCorePoolSanity()); |
Brian Carlstrom | 1895ea3 | 2013-07-18 13:28:37 -0700 | [diff] [blame] | 1116 | } // NOLINT(readability/fn_size) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1117 | |
| 1118 | // Process extended MIR instructions |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1119 | void Mir2Lir::HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1120 | switch (static_cast<ExtendedMIROpcode>(mir->dalvikInsn.opcode)) { |
| 1121 | case kMirOpCopy: { |
| 1122 | RegLocation rl_src = mir_graph_->GetSrc(mir, 0); |
| 1123 | RegLocation rl_dest = mir_graph_->GetDest(mir); |
| 1124 | StoreValue(rl_dest, rl_src); |
| 1125 | break; |
| 1126 | } |
| 1127 | case kMirOpFusedCmplFloat: |
| 1128 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, false /*double*/); |
| 1129 | break; |
| 1130 | case kMirOpFusedCmpgFloat: |
| 1131 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, false /*double*/); |
| 1132 | break; |
| 1133 | case kMirOpFusedCmplDouble: |
| 1134 | GenFusedFPCmpBranch(bb, mir, false /*gt bias*/, true /*double*/); |
| 1135 | break; |
| 1136 | case kMirOpFusedCmpgDouble: |
| 1137 | GenFusedFPCmpBranch(bb, mir, true /*gt bias*/, true /*double*/); |
| 1138 | break; |
| 1139 | case kMirOpFusedCmpLong: |
| 1140 | GenFusedLongCmpBranch(bb, mir); |
| 1141 | break; |
| 1142 | case kMirOpSelect: |
| 1143 | GenSelect(bb, mir); |
| 1144 | break; |
Razvan A Lupusoru | 7642324 | 2014-08-04 09:38:46 -0700 | [diff] [blame] | 1145 | case kMirOpNullCheck: { |
| 1146 | RegLocation rl_obj = mir_graph_->GetSrc(mir, 0); |
| 1147 | rl_obj = LoadValue(rl_obj, kRefReg); |
| 1148 | // An explicit check is done because it is not expected that when this is used, |
| 1149 | // that it will actually trip up the implicit checks (since an invalid access |
| 1150 | // is needed on the null object). |
| 1151 | GenExplicitNullCheck(rl_obj.reg, mir->optimization_flags); |
| 1152 | break; |
| 1153 | } |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1154 | case kMirOpPhi: |
| 1155 | case kMirOpNop: |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1156 | case kMirOpRangeCheck: |
| 1157 | case kMirOpDivZeroCheck: |
| 1158 | case kMirOpCheck: |
| 1159 | case kMirOpCheckPart2: |
| 1160 | // Ignore these known opcodes |
| 1161 | break; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1162 | default: |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1163 | // Give the backends a chance to handle unknown extended MIR opcodes. |
| 1164 | GenMachineSpecificExtendedMethodMIR(bb, mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1165 | break; |
| 1166 | } |
| 1167 | } |
| 1168 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1169 | void Mir2Lir::GenPrintLabel(MIR* mir) { |
| 1170 | // Mark the beginning of a Dalvik instruction for line tracking. |
| 1171 | if (cu_->verbose) { |
| 1172 | char* inst_str = mir_graph_->GetDalvikDisassembly(mir); |
| 1173 | MarkBoundary(mir->offset, inst_str); |
| 1174 | } |
| 1175 | } |
| 1176 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1177 | // Handle the content in each basic block. |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1178 | bool Mir2Lir::MethodBlockCodeGen(BasicBlock* bb) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1179 | if (bb->block_type == kDead) return false; |
| 1180 | current_dalvik_offset_ = bb->start_offset; |
| 1181 | MIR* mir; |
| 1182 | int block_id = bb->id; |
| 1183 | |
| 1184 | block_label_list_[block_id].operands[0] = bb->start_offset; |
| 1185 | |
| 1186 | // Insert the block label. |
| 1187 | block_label_list_[block_id].opcode = kPseudoNormalBlockLabel; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1188 | block_label_list_[block_id].flags.fixup = kFixupLabel; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1189 | AppendLIR(&block_label_list_[block_id]); |
| 1190 | |
| 1191 | LIR* head_lir = NULL; |
| 1192 | |
| 1193 | // If this is a catch block, export the start address. |
| 1194 | if (bb->catch_entry) { |
| 1195 | head_lir = NewLIR0(kPseudoExportedPC); |
| 1196 | } |
| 1197 | |
| 1198 | // Free temp registers and reset redundant store tracking. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1199 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1200 | |
| 1201 | if (bb->block_type == kEntryBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1202 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1203 | int start_vreg = cu_->num_dalvik_registers - cu_->num_ins; |
| 1204 | GenEntrySequence(&mir_graph_->reg_location_[start_vreg], |
| 1205 | mir_graph_->reg_location_[mir_graph_->GetMethodSReg()]); |
| 1206 | } else if (bb->block_type == kExitBlock) { |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1207 | ResetRegPool(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1208 | GenExitSequence(); |
| 1209 | } |
| 1210 | |
| 1211 | for (mir = bb->first_mir_insn; mir != NULL; mir = mir->next) { |
| 1212 | ResetRegPool(); |
| 1213 | if (cu_->disable_opt & (1 << kTrackLiveTemps)) { |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1214 | ClobberAllTemps(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1215 | // Reset temp allocation to minimize differences when A/B testing. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1216 | reg_pool_->ResetNextTemp(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1217 | } |
| 1218 | |
| 1219 | if (cu_->disable_opt & (1 << kSuppressLoads)) { |
| 1220 | ResetDefTracking(); |
| 1221 | } |
| 1222 | |
| 1223 | // Reset temp tracking sanity check. |
| 1224 | if (kIsDebugBuild) { |
| 1225 | live_sreg_ = INVALID_SREG; |
| 1226 | } |
| 1227 | |
| 1228 | current_dalvik_offset_ = mir->offset; |
| 1229 | int opcode = mir->dalvikInsn.opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1230 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1231 | GenPrintLabel(mir); |
| 1232 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1233 | // Remember the first LIR for this block. |
| 1234 | if (head_lir == NULL) { |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1235 | head_lir = &block_label_list_[bb->id]; |
| 1236 | // Set the first label as a scheduling barrier. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1237 | DCHECK(!head_lir->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1238 | head_lir->u.m.def_mask = &kEncodeAll; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1239 | } |
| 1240 | |
| 1241 | if (opcode == kMirOpCheck) { |
| 1242 | // Combine check and work halves of throwing instruction. |
| 1243 | MIR* work_half = mir->meta.throw_insn; |
| 1244 | mir->dalvikInsn.opcode = work_half->dalvikInsn.opcode; |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1245 | mir->meta = work_half->meta; // Whatever the work_half had, we need to copy it. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1246 | opcode = work_half->dalvikInsn.opcode; |
| 1247 | SSARepresentation* ssa_rep = work_half->ssa_rep; |
| 1248 | work_half->ssa_rep = mir->ssa_rep; |
| 1249 | mir->ssa_rep = ssa_rep; |
| 1250 | work_half->dalvikInsn.opcode = static_cast<Instruction::Code>(kMirOpCheckPart2); |
Vladimir Marko | 4376c87 | 2014-01-23 12:39:29 +0000 | [diff] [blame] | 1251 | work_half->meta.throw_insn = mir; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1252 | } |
| 1253 | |
Jean Christophe Beyler | 2ab40eb | 2014-06-02 09:03:14 -0700 | [diff] [blame] | 1254 | if (MIR::DecodedInstruction::IsPseudoMirOp(opcode)) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1255 | HandleExtendedMethodMIR(bb, mir); |
| 1256 | continue; |
| 1257 | } |
| 1258 | |
| 1259 | CompileDalvikInstruction(mir, bb, block_label_list_); |
| 1260 | } |
| 1261 | |
| 1262 | if (head_lir) { |
| 1263 | // Eliminate redundant loads/stores and delay stores into later slots. |
| 1264 | ApplyLocalOptimizations(head_lir, last_lir_insn_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1265 | } |
| 1266 | return false; |
| 1267 | } |
| 1268 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1269 | bool Mir2Lir::SpecialMIR2LIR(const InlineMethod& special) { |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 1270 | cu_->NewTimingSplit("SpecialMIR2LIR"); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1271 | // Find the first DalvikByteCode block. |
| 1272 | int num_reachable_blocks = mir_graph_->GetNumReachableBlocks(); |
| 1273 | BasicBlock*bb = NULL; |
| 1274 | for (int idx = 0; idx < num_reachable_blocks; idx++) { |
| 1275 | // TODO: no direct access of growable lists. |
| 1276 | int dfs_index = mir_graph_->GetDfsOrder()->Get(idx); |
| 1277 | bb = mir_graph_->GetBasicBlock(dfs_index); |
| 1278 | if (bb->block_type == kDalvikByteCode) { |
| 1279 | break; |
| 1280 | } |
| 1281 | } |
| 1282 | if (bb == NULL) { |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1283 | return false; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1284 | } |
| 1285 | DCHECK_EQ(bb->start_offset, 0); |
| 1286 | DCHECK(bb->first_mir_insn != NULL); |
| 1287 | |
| 1288 | // Get the first instruction. |
| 1289 | MIR* mir = bb->first_mir_insn; |
| 1290 | |
| 1291 | // Free temp registers and reset redundant store tracking. |
| 1292 | ResetRegPool(); |
| 1293 | ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 1294 | ClobberAllTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1295 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1296 | return GenSpecialCase(bb, mir, special); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1297 | } |
| 1298 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1299 | void Mir2Lir::MethodMIR2LIR() { |
buzbee | a61f495 | 2013-08-23 14:27:06 -0700 | [diff] [blame] | 1300 | cu_->NewTimingSplit("MIR2LIR"); |
| 1301 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1302 | // Hold the labels of each block. |
| 1303 | block_label_list_ = |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 1304 | static_cast<LIR*>(arena_->Alloc(sizeof(LIR) * mir_graph_->GetNumBlocks(), |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 1305 | kArenaAllocLIR)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1306 | |
buzbee | 56c7178 | 2013-09-05 17:13:19 -0700 | [diff] [blame] | 1307 | PreOrderDfsIterator iter(mir_graph_); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1308 | BasicBlock* curr_bb = iter.Next(); |
| 1309 | BasicBlock* next_bb = iter.Next(); |
| 1310 | while (curr_bb != NULL) { |
| 1311 | MethodBlockCodeGen(curr_bb); |
| 1312 | // If the fall_through block is no longer laid out consecutively, drop in a branch. |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1313 | BasicBlock* curr_bb_fall_through = mir_graph_->GetBasicBlock(curr_bb->fall_through); |
| 1314 | if ((curr_bb_fall_through != NULL) && (curr_bb_fall_through != next_bb)) { |
| 1315 | OpUnconditionalBranch(&block_label_list_[curr_bb->fall_through]); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 1316 | } |
| 1317 | curr_bb = next_bb; |
| 1318 | do { |
| 1319 | next_bb = iter.Next(); |
| 1320 | } while ((next_bb != NULL) && (next_bb->block_type == kDead)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1321 | } |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1322 | HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1323 | } |
| 1324 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1325 | // |
| 1326 | // LIR Slow Path |
| 1327 | // |
| 1328 | |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1329 | LIR* Mir2Lir::LIRSlowPath::GenerateTargetLabel(int opcode) { |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1330 | m2l_->SetCurrentDexPc(current_dex_pc_); |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 1331 | LIR* target = m2l_->NewLIR0(opcode); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1332 | fromfast_->target = target; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1333 | return target; |
| 1334 | } |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 1335 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1336 | |
| 1337 | void Mir2Lir::CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, |
| 1338 | bool fail, bool report) |
| 1339 | const { |
| 1340 | if (rs.Valid()) { |
| 1341 | if (ref == RefCheck::kCheckRef) { |
| 1342 | if (cu_->target64 && !rs.Is64Bit()) { |
| 1343 | if (fail) { |
| 1344 | CHECK(false) << "Reg storage not 64b for ref."; |
| 1345 | } else if (report) { |
| 1346 | LOG(WARNING) << "Reg storage not 64b for ref."; |
| 1347 | } |
| 1348 | } |
| 1349 | } |
| 1350 | if (wide == WidenessCheck::kCheckWide) { |
| 1351 | if (!rs.Is64Bit()) { |
| 1352 | if (fail) { |
| 1353 | CHECK(false) << "Reg storage not 64b for wide."; |
| 1354 | } else if (report) { |
| 1355 | LOG(WARNING) << "Reg storage not 64b for wide."; |
| 1356 | } |
| 1357 | } |
| 1358 | } |
| 1359 | // A tighter check would be nice, but for now soft-float will not check float at all. |
| 1360 | if (fp == FPCheck::kCheckFP && cu_->instruction_set != kArm) { |
| 1361 | if (!rs.IsFloat()) { |
| 1362 | if (fail) { |
| 1363 | CHECK(false) << "Reg storage not float for fp."; |
| 1364 | } else if (report) { |
| 1365 | LOG(WARNING) << "Reg storage not float for fp."; |
| 1366 | } |
| 1367 | } |
| 1368 | } else if (fp == FPCheck::kCheckNotFP) { |
| 1369 | if (rs.IsFloat()) { |
| 1370 | if (fail) { |
| 1371 | CHECK(false) << "Reg storage float for not-fp."; |
| 1372 | } else if (report) { |
| 1373 | LOG(WARNING) << "Reg storage float for not-fp."; |
| 1374 | } |
| 1375 | } |
| 1376 | } |
| 1377 | } |
| 1378 | } |
| 1379 | |
| 1380 | void Mir2Lir::CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const { |
| 1381 | // Regrettably can't use the fp part of rl, as that is not really indicative of where a value |
| 1382 | // will be stored. |
| 1383 | CheckRegStorageImpl(rl.reg, rl.wide ? WidenessCheck::kCheckWide : WidenessCheck::kCheckNotWide, |
| 1384 | rl.ref ? RefCheck::kCheckRef : RefCheck::kCheckNotRef, FPCheck::kIgnoreFP, fail, report); |
| 1385 | } |
| 1386 | |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 1387 | size_t Mir2Lir::GetInstructionOffset(LIR* lir) { |
| 1388 | UNIMPLEMENTED(FATAL) << "Unsuppored GetInstructionOffset()"; |
| 1389 | return 0; |
| 1390 | } |
| 1391 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1392 | } // namespace art |