Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |
| 18 | #define ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 19 | |
| 20 | #include "invoke_type.h" |
| 21 | #include "compiled_method.h" |
| 22 | #include "dex/compiler_enums.h" |
| 23 | #include "dex/compiler_ir.h" |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 24 | #include "dex/reg_location.h" |
Bill Buzbee | 00e1ec6 | 2014-02-27 23:44:13 +0000 | [diff] [blame] | 25 | #include "dex/reg_storage.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 26 | #include "dex/backend.h" |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 27 | #include "dex/quick/resource_mask.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 28 | #include "driver/compiler_driver.h" |
Andreas Gampe | 7cd26f3 | 2014-06-18 17:01:15 -0700 | [diff] [blame] | 29 | #include "instruction_set.h" |
Brian Carlstrom | a1ce1fe | 2014-02-24 23:23:58 -0800 | [diff] [blame] | 30 | #include "leb128.h" |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 31 | #include "entrypoints/quick/quick_entrypoints_enum.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 32 | #include "safe_map.h" |
Vladimir Marko | 089142c | 2014-06-05 10:57:05 +0100 | [diff] [blame] | 33 | #include "utils/array_ref.h" |
Nicolas Geoffray | 818f210 | 2014-02-18 16:43:35 +0000 | [diff] [blame] | 34 | #include "utils/arena_allocator.h" |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 35 | #include "utils/arena_containers.h" |
Nicolas Geoffray | f12feb8 | 2014-07-17 18:32:41 +0100 | [diff] [blame] | 36 | #include "utils/stack_checks.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 37 | |
| 38 | namespace art { |
| 39 | |
| 40 | // Set to 1 to measure cost of suspend check. |
| 41 | #define NO_SUSPEND 0 |
| 42 | |
| 43 | #define IS_BINARY_OP (1ULL << kIsBinaryOp) |
| 44 | #define IS_BRANCH (1ULL << kIsBranch) |
| 45 | #define IS_IT (1ULL << kIsIT) |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 46 | #define IS_MOVE (1ULL << kIsMoveOp) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 47 | #define IS_LOAD (1ULL << kMemLoad) |
| 48 | #define IS_QUAD_OP (1ULL << kIsQuadOp) |
| 49 | #define IS_QUIN_OP (1ULL << kIsQuinOp) |
| 50 | #define IS_SEXTUPLE_OP (1ULL << kIsSextupleOp) |
| 51 | #define IS_STORE (1ULL << kMemStore) |
| 52 | #define IS_TERTIARY_OP (1ULL << kIsTertiaryOp) |
| 53 | #define IS_UNARY_OP (1ULL << kIsUnaryOp) |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 54 | #define IS_VOLATILE (1ULL << kMemVolatile) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 55 | #define NEEDS_FIXUP (1ULL << kPCRelFixup) |
| 56 | #define NO_OPERAND (1ULL << kNoOperand) |
| 57 | #define REG_DEF0 (1ULL << kRegDef0) |
| 58 | #define REG_DEF1 (1ULL << kRegDef1) |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 59 | #define REG_DEF2 (1ULL << kRegDef2) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 60 | #define REG_DEFA (1ULL << kRegDefA) |
| 61 | #define REG_DEFD (1ULL << kRegDefD) |
| 62 | #define REG_DEF_FPCS_LIST0 (1ULL << kRegDefFPCSList0) |
| 63 | #define REG_DEF_FPCS_LIST2 (1ULL << kRegDefFPCSList2) |
| 64 | #define REG_DEF_LIST0 (1ULL << kRegDefList0) |
| 65 | #define REG_DEF_LIST1 (1ULL << kRegDefList1) |
| 66 | #define REG_DEF_LR (1ULL << kRegDefLR) |
| 67 | #define REG_DEF_SP (1ULL << kRegDefSP) |
| 68 | #define REG_USE0 (1ULL << kRegUse0) |
| 69 | #define REG_USE1 (1ULL << kRegUse1) |
| 70 | #define REG_USE2 (1ULL << kRegUse2) |
| 71 | #define REG_USE3 (1ULL << kRegUse3) |
| 72 | #define REG_USE4 (1ULL << kRegUse4) |
| 73 | #define REG_USEA (1ULL << kRegUseA) |
| 74 | #define REG_USEC (1ULL << kRegUseC) |
| 75 | #define REG_USED (1ULL << kRegUseD) |
Vladimir Marko | 70b797d | 2013-12-03 15:25:24 +0000 | [diff] [blame] | 76 | #define REG_USEB (1ULL << kRegUseB) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 77 | #define REG_USE_FPCS_LIST0 (1ULL << kRegUseFPCSList0) |
| 78 | #define REG_USE_FPCS_LIST2 (1ULL << kRegUseFPCSList2) |
| 79 | #define REG_USE_LIST0 (1ULL << kRegUseList0) |
| 80 | #define REG_USE_LIST1 (1ULL << kRegUseList1) |
| 81 | #define REG_USE_LR (1ULL << kRegUseLR) |
| 82 | #define REG_USE_PC (1ULL << kRegUsePC) |
| 83 | #define REG_USE_SP (1ULL << kRegUseSP) |
| 84 | #define SETS_CCODES (1ULL << kSetsCCodes) |
| 85 | #define USES_CCODES (1ULL << kUsesCCodes) |
Serguei Katkov | e90501d | 2014-03-12 15:56:54 +0700 | [diff] [blame] | 86 | #define USE_FP_STACK (1ULL << kUseFpStack) |
buzbee | 9da5c10 | 2014-03-28 12:59:18 -0700 | [diff] [blame] | 87 | #define REG_USE_LO (1ULL << kUseLo) |
| 88 | #define REG_USE_HI (1ULL << kUseHi) |
| 89 | #define REG_DEF_LO (1ULL << kDefLo) |
| 90 | #define REG_DEF_HI (1ULL << kDefHi) |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 91 | #define SCALED_OFFSET_X0 (1ULL << kMemScaledx0) |
| 92 | #define SCALED_OFFSET_X2 (1ULL << kMemScaledx2) |
| 93 | #define SCALED_OFFSET_X4 (1ULL << kMemScaledx4) |
| 94 | |
| 95 | // Special load/stores |
| 96 | #define IS_LOADX (IS_LOAD | IS_VOLATILE) |
| 97 | #define IS_LOAD_OFF (IS_LOAD | SCALED_OFFSET_X0) |
| 98 | #define IS_LOAD_OFF2 (IS_LOAD | SCALED_OFFSET_X2) |
| 99 | #define IS_LOAD_OFF4 (IS_LOAD | SCALED_OFFSET_X4) |
| 100 | |
| 101 | #define IS_STOREX (IS_STORE | IS_VOLATILE) |
| 102 | #define IS_STORE_OFF (IS_STORE | SCALED_OFFSET_X0) |
| 103 | #define IS_STORE_OFF2 (IS_STORE | SCALED_OFFSET_X2) |
| 104 | #define IS_STORE_OFF4 (IS_STORE | SCALED_OFFSET_X4) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 105 | |
| 106 | // Common combo register usage patterns. |
| 107 | #define REG_DEF01 (REG_DEF0 | REG_DEF1) |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 108 | #define REG_DEF012 (REG_DEF0 | REG_DEF1 | REG_DEF2) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 109 | #define REG_DEF01_USE2 (REG_DEF0 | REG_DEF1 | REG_USE2) |
| 110 | #define REG_DEF0_USE01 (REG_DEF0 | REG_USE01) |
| 111 | #define REG_DEF0_USE0 (REG_DEF0 | REG_USE0) |
| 112 | #define REG_DEF0_USE12 (REG_DEF0 | REG_USE12) |
Vladimir Marko | 3e5af82 | 2013-11-21 15:01:20 +0000 | [diff] [blame] | 113 | #define REG_DEF0_USE123 (REG_DEF0 | REG_USE123) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 114 | #define REG_DEF0_USE1 (REG_DEF0 | REG_USE1) |
| 115 | #define REG_DEF0_USE2 (REG_DEF0 | REG_USE2) |
| 116 | #define REG_DEFAD_USEAD (REG_DEFAD_USEA | REG_USED) |
| 117 | #define REG_DEFAD_USEA (REG_DEFA_USEA | REG_DEFD) |
| 118 | #define REG_DEFA_USEA (REG_DEFA | REG_USEA) |
| 119 | #define REG_USE012 (REG_USE01 | REG_USE2) |
| 120 | #define REG_USE014 (REG_USE01 | REG_USE4) |
| 121 | #define REG_USE01 (REG_USE0 | REG_USE1) |
| 122 | #define REG_USE02 (REG_USE0 | REG_USE2) |
| 123 | #define REG_USE12 (REG_USE1 | REG_USE2) |
| 124 | #define REG_USE23 (REG_USE2 | REG_USE3) |
Vladimir Marko | 3e5af82 | 2013-11-21 15:01:20 +0000 | [diff] [blame] | 125 | #define REG_USE123 (REG_USE1 | REG_USE2 | REG_USE3) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 126 | |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 127 | // TODO: #includes need a cleanup |
| 128 | #ifndef INVALID_SREG |
| 129 | #define INVALID_SREG (-1) |
| 130 | #endif |
| 131 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 132 | struct BasicBlock; |
| 133 | struct CallInfo; |
| 134 | struct CompilationUnit; |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 135 | struct InlineMethod; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 136 | struct MIR; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 137 | struct LIR; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 138 | struct RegisterInfo; |
Vladimir Marko | 5c96e6b | 2013-11-14 15:34:17 +0000 | [diff] [blame] | 139 | class DexFileMethodInliner; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 140 | class MIRGraph; |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 141 | class MirMethodLoweringInfo; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 142 | class Mir2Lir; |
| 143 | |
| 144 | typedef int (*NextCallInsn)(CompilationUnit*, CallInfo*, int, |
| 145 | const MethodReference& target_method, |
| 146 | uint32_t method_idx, uintptr_t direct_code, |
| 147 | uintptr_t direct_method, InvokeType type); |
| 148 | |
| 149 | typedef std::vector<uint8_t> CodeBuffer; |
| 150 | |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 151 | struct UseDefMasks { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 152 | const ResourceMask* use_mask; // Resource mask for use. |
| 153 | const ResourceMask* def_mask; // Resource mask for def. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 154 | }; |
| 155 | |
| 156 | struct AssemblyInfo { |
| 157 | LIR* pcrel_next; // Chain of LIR nodes needing pc relative fixups. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 158 | }; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 159 | |
| 160 | struct LIR { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 161 | CodeOffset offset; // Offset of this instruction. |
| 162 | NarrowDexOffset dalvik_offset; // Offset of Dalvik opcode in code units (16-bit words). |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 163 | int16_t opcode; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 164 | LIR* next; |
| 165 | LIR* prev; |
| 166 | LIR* target; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 167 | struct { |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 168 | unsigned int alias_info:17; // For Dalvik register disambiguation. |
| 169 | bool is_nop:1; // LIR is optimized away. |
| 170 | unsigned int size:4; // Note: size of encoded instruction is in bytes. |
| 171 | bool use_def_invalid:1; // If true, masks should not be used. |
| 172 | unsigned int generation:1; // Used to track visitation state during fixup pass. |
| 173 | unsigned int fixup:8; // Fixup kind. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 174 | } flags; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 175 | union { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 176 | UseDefMasks m; // Use & Def masks used during optimization. |
Vladimir Marko | 306f017 | 2014-01-07 18:21:20 +0000 | [diff] [blame] | 177 | AssemblyInfo a; // Instruction info used during assembly phase. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 178 | } u; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 179 | int32_t operands[5]; // [0..4] = [dest, src1, src2, extra, extra2]. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 180 | }; |
| 181 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 182 | // Utility macros to traverse the LIR list. |
| 183 | #define NEXT_LIR(lir) (lir->next) |
| 184 | #define PREV_LIR(lir) (lir->prev) |
| 185 | |
| 186 | // Defines for alias_info (tracks Dalvik register references). |
| 187 | #define DECODE_ALIAS_INFO_REG(X) (X & 0xffff) |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 188 | #define DECODE_ALIAS_INFO_WIDE_FLAG (0x10000) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 189 | #define DECODE_ALIAS_INFO_WIDE(X) ((X & DECODE_ALIAS_INFO_WIDE_FLAG) ? 1 : 0) |
| 190 | #define ENCODE_ALIAS_INFO(REG, ISWIDE) (REG | (ISWIDE ? DECODE_ALIAS_INFO_WIDE_FLAG : 0)) |
| 191 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 192 | #define ENCODE_REG_PAIR(low_reg, high_reg) ((low_reg & 0xff) | ((high_reg & 0xff) << 8)) |
| 193 | #define DECODE_REG_PAIR(both_regs, low_reg, high_reg) \ |
| 194 | do { \ |
| 195 | low_reg = both_regs & 0xff; \ |
| 196 | high_reg = (both_regs >> 8) & 0xff; \ |
| 197 | } while (false) |
| 198 | |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 199 | // Mask to denote sreg as the start of a 64-bit item. Must not interfere with low 16 bits. |
| 200 | #define STARTING_WIDE_SREG 0x10000 |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 201 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 202 | // TODO: replace these macros |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 203 | #define SLOW_FIELD_PATH (cu_->enable_debug & (1 << kDebugSlowFieldPath)) |
| 204 | #define SLOW_INVOKE_PATH (cu_->enable_debug & (1 << kDebugSlowInvokePath)) |
| 205 | #define SLOW_STRING_PATH (cu_->enable_debug & (1 << kDebugSlowStringPath)) |
| 206 | #define SLOW_TYPE_PATH (cu_->enable_debug & (1 << kDebugSlowTypePath)) |
| 207 | #define EXERCISE_SLOWEST_STRING_PATH (cu_->enable_debug & (1 << kDebugSlowestStringPath)) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 208 | |
| 209 | class Mir2Lir : public Backend { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 210 | public: |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 211 | static constexpr bool kFailOnSizeError = true && kIsDebugBuild; |
| 212 | static constexpr bool kReportSizeError = true && kIsDebugBuild; |
| 213 | |
Andreas Gampe | 48971b3 | 2014-08-06 10:09:01 -0700 | [diff] [blame] | 214 | // TODO: If necessary, this could be made target-dependent. |
| 215 | static constexpr uint16_t kSmallSwitchThreshold = 5; |
| 216 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 217 | /* |
| 218 | * Auxiliary information describing the location of data embedded in the Dalvik |
| 219 | * byte code stream. |
| 220 | */ |
| 221 | struct EmbeddedData { |
| 222 | CodeOffset offset; // Code offset of data block. |
| 223 | const uint16_t* table; // Original dex data. |
| 224 | DexOffset vaddr; // Dalvik offset of parent opcode. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 225 | }; |
| 226 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 227 | struct FillArrayData : EmbeddedData { |
| 228 | int32_t size; |
| 229 | }; |
| 230 | |
| 231 | struct SwitchTable : EmbeddedData { |
| 232 | LIR* anchor; // Reference instruction for relative offsets. |
| 233 | LIR** targets; // Array of case targets. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 234 | }; |
| 235 | |
| 236 | /* Static register use counts */ |
| 237 | struct RefCounts { |
| 238 | int count; |
| 239 | int s_reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 240 | }; |
| 241 | |
| 242 | /* |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 243 | * Data structure tracking the mapping detween a Dalvik value (32 or 64 bits) |
| 244 | * and native register storage. The primary purpose is to reuse previuosly |
| 245 | * loaded values, if possible, and otherwise to keep the value in register |
| 246 | * storage as long as possible. |
| 247 | * |
| 248 | * NOTE 1: wide_value refers to the width of the Dalvik value contained in |
| 249 | * this register (or pair). For example, a 64-bit register containing a 32-bit |
| 250 | * Dalvik value would have wide_value==false even though the storage container itself |
| 251 | * is wide. Similarly, a 32-bit register containing half of a 64-bit Dalvik value |
| 252 | * would have wide_value==true (and additionally would have its partner field set to the |
| 253 | * other half whose wide_value field would also be true. |
| 254 | * |
| 255 | * NOTE 2: In the case of a register pair, you can determine which of the partners |
| 256 | * is the low half by looking at the s_reg names. The high s_reg will equal low_sreg + 1. |
| 257 | * |
| 258 | * NOTE 3: In the case of a 64-bit register holding a Dalvik wide value, wide_value |
| 259 | * will be true and partner==self. s_reg refers to the low-order word of the Dalvik |
| 260 | * value, and the s_reg of the high word is implied (s_reg + 1). |
| 261 | * |
| 262 | * NOTE 4: The reg and is_temp fields should always be correct. If is_temp is false no |
| 263 | * other fields have meaning. [perhaps not true, wide should work for promoted regs?] |
| 264 | * If is_temp==true and live==false, no other fields have |
| 265 | * meaning. If is_temp==true and live==true, wide_value, partner, dirty, s_reg, def_start |
| 266 | * and def_end describe the relationship between the temp register/register pair and |
| 267 | * the Dalvik value[s] described by s_reg/s_reg+1. |
| 268 | * |
| 269 | * The fields used_storage, master_storage and storage_mask are used to track allocation |
| 270 | * in light of potential aliasing. For example, consider Arm's d2, which overlaps s4 & s5. |
| 271 | * d2's storage mask would be 0x00000003, the two low-order bits denoting 64 bits of |
| 272 | * storage use. For s4, it would be 0x0000001; for s5 0x00000002. These values should not |
| 273 | * change once initialized. The "used_storage" field tracks current allocation status. |
| 274 | * Although each record contains this field, only the field from the largest member of |
| 275 | * an aliased group is used. In our case, it would be d2's. The master_storage pointer |
| 276 | * of d2, s4 and s5 would all point to d2's used_storage field. Each bit in a used_storage |
| 277 | * represents 32 bits of storage. d2's used_storage would be initialized to 0xfffffffc. |
| 278 | * Then, if we wanted to determine whether s4 could be allocated, we would "and" |
| 279 | * s4's storage_mask with s4's *master_storage. If the result is zero, s4 is free and |
| 280 | * to allocate: *master_storage |= storage_mask. To free, *master_storage &= ~storage_mask. |
| 281 | * |
| 282 | * For an X86 vector register example, storage_mask would be: |
| 283 | * 0x00000001 for 32-bit view of xmm1 |
| 284 | * 0x00000003 for 64-bit view of xmm1 |
| 285 | * 0x0000000f for 128-bit view of xmm1 |
| 286 | * 0x000000ff for 256-bit view of ymm1 // future expansion, if needed |
| 287 | * 0x0000ffff for 512-bit view of ymm1 // future expansion, if needed |
| 288 | * 0xffffffff for 1024-bit view of ymm1 // future expansion, if needed |
| 289 | * |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 290 | * The "liveness" of a register is handled in a similar way. The liveness_ storage is |
| 291 | * held in the widest member of an aliased set. Note, though, that for a temp register to |
| 292 | * reused as live, it must both be marked live and the associated SReg() must match the |
| 293 | * desired s_reg. This gets a little complicated when dealing with aliased registers. All |
| 294 | * members of an aliased set will share the same liveness flags, but each will individually |
| 295 | * maintain s_reg_. In this way we can know that at least one member of an |
| 296 | * aliased set is live, but will only fully match on the appropriate alias view. For example, |
| 297 | * if Arm d1 is live as a double and has s_reg_ set to Dalvik v8 (which also implies v9 |
| 298 | * because it is wide), its aliases s2 and s3 will show as live, but will have |
| 299 | * s_reg_ == INVALID_SREG. An attempt to later AllocLiveReg() of v9 with a single-precision |
| 300 | * view will fail because although s3's liveness bit is set, its s_reg_ will not match v9. |
| 301 | * This will cause all members of the aliased set to be clobbered and AllocLiveReg() will |
| 302 | * report that v9 is currently not live as a single (which is what we want). |
| 303 | * |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 304 | * NOTE: the x86 usage is still somewhat in flux. There are competing notions of how |
| 305 | * to treat xmm registers: |
| 306 | * 1. Treat them all as 128-bits wide, but denote how much data used via bytes field. |
| 307 | * o This more closely matches reality, but means you'd need to be able to get |
| 308 | * to the associated RegisterInfo struct to figure out how it's being used. |
| 309 | * o This is how 64-bit core registers will be used - always 64 bits, but the |
| 310 | * "bytes" field will be 4 for 32-bit usage and 8 for 64-bit usage. |
| 311 | * 2. View the xmm registers based on contents. |
| 312 | * o A single in a xmm2 register would be k32BitVector, while a double in xmm2 would |
| 313 | * be a k64BitVector. |
| 314 | * o Note that the two uses above would be considered distinct registers (but with |
| 315 | * the aliasing mechanism, we could detect interference). |
| 316 | * o This is how aliased double and single float registers will be handled on |
| 317 | * Arm and MIPS. |
| 318 | * Working plan is, for all targets, to follow mechanism 1 for 64-bit core registers, and |
| 319 | * mechanism 2 for aliased float registers and x86 vector registers. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 320 | */ |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 321 | class RegisterInfo { |
| 322 | public: |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 323 | RegisterInfo(RegStorage r, const ResourceMask& mask = kEncodeAll); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 324 | ~RegisterInfo() {} |
| 325 | static void* operator new(size_t size, ArenaAllocator* arena) { |
| 326 | return arena->Alloc(size, kArenaAllocRegAlloc); |
| 327 | } |
| 328 | |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 329 | static const uint32_t k32SoloStorageMask = 0x00000001; |
| 330 | static const uint32_t kLowSingleStorageMask = 0x00000001; |
| 331 | static const uint32_t kHighSingleStorageMask = 0x00000002; |
| 332 | static const uint32_t k64SoloStorageMask = 0x00000003; |
| 333 | static const uint32_t k128SoloStorageMask = 0x0000000f; |
| 334 | static const uint32_t k256SoloStorageMask = 0x000000ff; |
| 335 | static const uint32_t k512SoloStorageMask = 0x0000ffff; |
| 336 | static const uint32_t k1024SoloStorageMask = 0xffffffff; |
| 337 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 338 | bool InUse() { return (storage_mask_ & master_->used_storage_) != 0; } |
| 339 | void MarkInUse() { master_->used_storage_ |= storage_mask_; } |
| 340 | void MarkFree() { master_->used_storage_ &= ~storage_mask_; } |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 341 | // No part of the containing storage is live in this view. |
| 342 | bool IsDead() { return (master_->liveness_ & storage_mask_) == 0; } |
| 343 | // Liveness of this view matches. Note: not equivalent to !IsDead(). |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 344 | bool IsLive() { return (master_->liveness_ & storage_mask_) == storage_mask_; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 345 | void MarkLive(int s_reg) { |
| 346 | // TODO: Anything useful to assert here? |
| 347 | s_reg_ = s_reg; |
| 348 | master_->liveness_ |= storage_mask_; |
| 349 | } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 350 | void MarkDead() { |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 351 | if (SReg() != INVALID_SREG) { |
| 352 | s_reg_ = INVALID_SREG; |
| 353 | master_->liveness_ &= ~storage_mask_; |
| 354 | ResetDefBody(); |
| 355 | } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 356 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 357 | RegStorage GetReg() { return reg_; } |
| 358 | void SetReg(RegStorage reg) { reg_ = reg; } |
| 359 | bool IsTemp() { return is_temp_; } |
| 360 | void SetIsTemp(bool val) { is_temp_ = val; } |
| 361 | bool IsWide() { return wide_value_; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 362 | void SetIsWide(bool val) { |
| 363 | wide_value_ = val; |
| 364 | if (!val) { |
| 365 | // If not wide, reset partner to self. |
| 366 | SetPartner(GetReg()); |
| 367 | } |
| 368 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 369 | bool IsDirty() { return dirty_; } |
| 370 | void SetIsDirty(bool val) { dirty_ = val; } |
| 371 | RegStorage Partner() { return partner_; } |
| 372 | void SetPartner(RegStorage partner) { partner_ = partner; } |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 373 | int SReg() { return (!IsTemp() || IsLive()) ? s_reg_ : INVALID_SREG; } |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 374 | const ResourceMask& DefUseMask() { return def_use_mask_; } |
| 375 | void SetDefUseMask(const ResourceMask& def_use_mask) { def_use_mask_ = def_use_mask; } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 376 | RegisterInfo* Master() { return master_; } |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 377 | void SetMaster(RegisterInfo* master) { |
| 378 | master_ = master; |
| 379 | if (master != this) { |
| 380 | master_->aliased_ = true; |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 381 | DCHECK(alias_chain_ == nullptr); |
| 382 | alias_chain_ = master_->alias_chain_; |
| 383 | master_->alias_chain_ = this; |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 384 | } |
| 385 | } |
| 386 | bool IsAliased() { return aliased_; } |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 387 | RegisterInfo* GetAliasChain() { return alias_chain_; } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 388 | uint32_t StorageMask() { return storage_mask_; } |
| 389 | void SetStorageMask(uint32_t storage_mask) { storage_mask_ = storage_mask; } |
| 390 | LIR* DefStart() { return def_start_; } |
| 391 | void SetDefStart(LIR* def_start) { def_start_ = def_start; } |
| 392 | LIR* DefEnd() { return def_end_; } |
| 393 | void SetDefEnd(LIR* def_end) { def_end_ = def_end; } |
| 394 | void ResetDefBody() { def_start_ = def_end_ = nullptr; } |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 395 | // Find member of aliased set matching storage_used; return nullptr if none. |
| 396 | RegisterInfo* FindMatchingView(uint32_t storage_used) { |
| 397 | RegisterInfo* res = Master(); |
| 398 | for (; res != nullptr; res = res->GetAliasChain()) { |
| 399 | if (res->StorageMask() == storage_used) |
| 400 | break; |
| 401 | } |
| 402 | return res; |
| 403 | } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 404 | |
| 405 | private: |
| 406 | RegStorage reg_; |
| 407 | bool is_temp_; // Can allocate as temp? |
| 408 | bool wide_value_; // Holds a Dalvik wide value (either itself, or part of a pair). |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 409 | bool dirty_; // If live, is it dirty? |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 410 | bool aliased_; // Is this the master for other aliased RegisterInfo's? |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 411 | RegStorage partner_; // If wide_value, other reg of pair or self if 64-bit register. |
| 412 | int s_reg_; // Name of live value. |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 413 | ResourceMask def_use_mask_; // Resources for this element. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 414 | uint32_t used_storage_; // 1 bit per 4 bytes of storage. Unused by aliases. |
buzbee | 30adc73 | 2014-05-09 15:10:18 -0700 | [diff] [blame] | 415 | uint32_t liveness_; // 1 bit per 4 bytes of storage. Unused by aliases. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 416 | RegisterInfo* master_; // Pointer to controlling storage mask. |
| 417 | uint32_t storage_mask_; // Track allocation of sub-units. |
| 418 | LIR *def_start_; // Starting inst in last def sequence. |
| 419 | LIR *def_end_; // Ending inst in last def sequence. |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 420 | RegisterInfo* alias_chain_; // Chain of aliased registers. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 421 | }; |
| 422 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 423 | class RegisterPool { |
| 424 | public: |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 425 | RegisterPool(Mir2Lir* m2l, ArenaAllocator* arena, |
Vladimir Marko | 089142c | 2014-06-05 10:57:05 +0100 | [diff] [blame] | 426 | const ArrayRef<const RegStorage>& core_regs, |
| 427 | const ArrayRef<const RegStorage>& core64_regs, |
| 428 | const ArrayRef<const RegStorage>& sp_regs, |
| 429 | const ArrayRef<const RegStorage>& dp_regs, |
| 430 | const ArrayRef<const RegStorage>& reserved_regs, |
| 431 | const ArrayRef<const RegStorage>& reserved64_regs, |
| 432 | const ArrayRef<const RegStorage>& core_temps, |
| 433 | const ArrayRef<const RegStorage>& core64_temps, |
| 434 | const ArrayRef<const RegStorage>& sp_temps, |
| 435 | const ArrayRef<const RegStorage>& dp_temps); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 436 | ~RegisterPool() {} |
| 437 | static void* operator new(size_t size, ArenaAllocator* arena) { |
| 438 | return arena->Alloc(size, kArenaAllocRegAlloc); |
| 439 | } |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 440 | static void operator delete(void* ptr) { UNUSED(ptr); } |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 441 | void ResetNextTemp() { |
| 442 | next_core_reg_ = 0; |
| 443 | next_sp_reg_ = 0; |
| 444 | next_dp_reg_ = 0; |
| 445 | } |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 446 | ArenaVector<RegisterInfo*> core_regs_; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 447 | int next_core_reg_; |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 448 | ArenaVector<RegisterInfo*> core64_regs_; |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 449 | int next_core64_reg_; |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 450 | ArenaVector<RegisterInfo*> sp_regs_; // Single precision float. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 451 | int next_sp_reg_; |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 452 | ArenaVector<RegisterInfo*> dp_regs_; // Double precision float. |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 453 | int next_dp_reg_; |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 454 | ArenaVector<RegisterInfo*>* ref_regs_; // Points to core_regs_ or core64_regs_ |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 455 | int* next_ref_reg_; |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 456 | |
| 457 | private: |
| 458 | Mir2Lir* const m2l_; |
| 459 | }; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 460 | |
| 461 | struct PromotionMap { |
| 462 | RegLocationType core_location:3; |
| 463 | uint8_t core_reg; |
| 464 | RegLocationType fp_location:3; |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 465 | uint8_t fp_reg; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 466 | bool first_in_pair; |
| 467 | }; |
| 468 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 469 | // |
| 470 | // Slow paths. This object is used generate a sequence of code that is executed in the |
| 471 | // slow path. For example, resolving a string or class is slow as it will only be executed |
| 472 | // once (after that it is resolved and doesn't need to be done again). We want slow paths |
| 473 | // to be placed out-of-line, and not require a (mispredicted, probably) conditional forward |
| 474 | // branch over them. |
| 475 | // |
| 476 | // If you want to create a slow path, declare a class derived from LIRSlowPath and provide |
| 477 | // the Compile() function that will be called near the end of the code generated by the |
| 478 | // method. |
| 479 | // |
| 480 | // The basic flow for a slow path is: |
| 481 | // |
| 482 | // CMP reg, #value |
| 483 | // BEQ fromfast |
| 484 | // cont: |
| 485 | // ... |
| 486 | // fast path code |
| 487 | // ... |
| 488 | // more code |
| 489 | // ... |
| 490 | // RETURN |
| 491 | /// |
| 492 | // fromfast: |
| 493 | // ... |
| 494 | // slow path code |
| 495 | // ... |
| 496 | // B cont |
| 497 | // |
| 498 | // So you see we need two labels and two branches. The first branch (called fromfast) is |
| 499 | // the conditional branch to the slow path code. The second label (called cont) is used |
| 500 | // as an unconditional branch target for getting back to the code after the slow path |
| 501 | // has completed. |
| 502 | // |
| 503 | |
| 504 | class LIRSlowPath { |
| 505 | public: |
| 506 | LIRSlowPath(Mir2Lir* m2l, const DexOffset dexpc, LIR* fromfast, |
| 507 | LIR* cont = nullptr) : |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 508 | m2l_(m2l), cu_(m2l->cu_), current_dex_pc_(dexpc), fromfast_(fromfast), cont_(cont) { |
Mark Mendell | e9f3e71 | 2014-07-03 21:34:41 -0400 | [diff] [blame] | 509 | m2l->StartSlowPath(this); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 510 | } |
| 511 | virtual ~LIRSlowPath() {} |
| 512 | virtual void Compile() = 0; |
| 513 | |
| 514 | static void* operator new(size_t size, ArenaAllocator* arena) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 515 | return arena->Alloc(size, kArenaAllocData); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 516 | } |
| 517 | |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 518 | LIR *GetContinuationLabel() { |
| 519 | return cont_; |
| 520 | } |
| 521 | |
| 522 | LIR *GetFromFast() { |
| 523 | return fromfast_; |
| 524 | } |
| 525 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 526 | protected: |
Mingyao Yang | 6ffcfa0 | 2014-04-25 11:06:00 -0700 | [diff] [blame] | 527 | LIR* GenerateTargetLabel(int opcode = kPseudoTargetLabel); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 528 | |
| 529 | Mir2Lir* const m2l_; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 530 | CompilationUnit* const cu_; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 531 | const DexOffset current_dex_pc_; |
| 532 | LIR* const fromfast_; |
| 533 | LIR* const cont_; |
| 534 | }; |
| 535 | |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 536 | // Helper class for changing mem_ref_type_ until the end of current scope. See mem_ref_type_. |
| 537 | class ScopedMemRefType { |
| 538 | public: |
| 539 | ScopedMemRefType(Mir2Lir* m2l, ResourceMask::ResourceBit new_mem_ref_type) |
| 540 | : m2l_(m2l), |
| 541 | old_mem_ref_type_(m2l->mem_ref_type_) { |
| 542 | m2l_->mem_ref_type_ = new_mem_ref_type; |
| 543 | } |
| 544 | |
| 545 | ~ScopedMemRefType() { |
| 546 | m2l_->mem_ref_type_ = old_mem_ref_type_; |
| 547 | } |
| 548 | |
| 549 | private: |
| 550 | Mir2Lir* const m2l_; |
| 551 | ResourceMask::ResourceBit old_mem_ref_type_; |
| 552 | |
| 553 | DISALLOW_COPY_AND_ASSIGN(ScopedMemRefType); |
| 554 | }; |
| 555 | |
Brian Carlstrom | 9b7085a | 2013-07-18 15:15:21 -0700 | [diff] [blame] | 556 | virtual ~Mir2Lir() {} |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 557 | |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 558 | /** |
| 559 | * @brief Decodes the LIR offset. |
| 560 | * @return Returns the scaled offset of LIR. |
| 561 | */ |
| 562 | virtual size_t GetInstructionOffset(LIR* lir); |
| 563 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 564 | int32_t s4FromSwitchData(const void* switch_data) { |
| 565 | return *reinterpret_cast<const int32_t*>(switch_data); |
| 566 | } |
| 567 | |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 568 | /* |
| 569 | * TODO: this is a trace JIT vestige, and its use should be reconsidered. At the time |
| 570 | * it was introduced, it was intended to be a quick best guess of type without having to |
| 571 | * take the time to do type analysis. Currently, though, we have a much better idea of |
| 572 | * the types of Dalvik virtual registers. Instead of using this for a best guess, why not |
| 573 | * just use our knowledge of type to select the most appropriate register class? |
| 574 | */ |
| 575 | RegisterClass RegClassBySize(OpSize size) { |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 576 | if (size == kReference) { |
| 577 | return kRefReg; |
| 578 | } else { |
| 579 | return (size == kUnsignedHalf || size == kSignedHalf || size == kUnsignedByte || |
| 580 | size == kSignedByte) ? kCoreReg : kAnyReg; |
| 581 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 582 | } |
| 583 | |
| 584 | size_t CodeBufferSizeInBytes() { |
| 585 | return code_buffer_.size() / sizeof(code_buffer_[0]); |
| 586 | } |
| 587 | |
Vladimir Marko | 306f017 | 2014-01-07 18:21:20 +0000 | [diff] [blame] | 588 | static bool IsPseudoLirOp(int opcode) { |
buzbee | 409fe94 | 2013-10-11 10:49:56 -0700 | [diff] [blame] | 589 | return (opcode < 0); |
| 590 | } |
| 591 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 592 | /* |
| 593 | * LIR operands are 32-bit integers. Sometimes, (especially for managing |
| 594 | * instructions which require PC-relative fixups), we need the operands to carry |
| 595 | * pointers. To do this, we assign these pointers an index in pointer_storage_, and |
| 596 | * hold that index in the operand array. |
| 597 | * TUNING: If use of these utilities becomes more common on 32-bit builds, it |
| 598 | * may be worth conditionally-compiling a set of identity functions here. |
| 599 | */ |
| 600 | uint32_t WrapPointer(void* pointer) { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 601 | uint32_t res = pointer_storage_.size(); |
| 602 | pointer_storage_.push_back(pointer); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 603 | return res; |
| 604 | } |
| 605 | |
| 606 | void* UnwrapPointer(size_t index) { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 607 | return pointer_storage_[index]; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 608 | } |
| 609 | |
| 610 | // strdup(), but allocates from the arena. |
| 611 | char* ArenaStrdup(const char* str) { |
| 612 | size_t len = strlen(str) + 1; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 613 | char* res = reinterpret_cast<char*>(arena_->Alloc(len, kArenaAllocMisc)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 614 | if (res != NULL) { |
| 615 | strncpy(res, str, len); |
| 616 | } |
| 617 | return res; |
| 618 | } |
| 619 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 620 | // Shared by all targets - implemented in codegen_util.cc |
| 621 | void AppendLIR(LIR* lir); |
| 622 | void InsertLIRBefore(LIR* current_lir, LIR* new_lir); |
| 623 | void InsertLIRAfter(LIR* current_lir, LIR* new_lir); |
| 624 | |
Razvan A Lupusoru | da7a69b | 2014-01-08 15:09:50 -0800 | [diff] [blame] | 625 | /** |
| 626 | * @brief Provides the maximum number of compiler temporaries that the backend can/wants |
| 627 | * to place in a frame. |
| 628 | * @return Returns the maximum number of compiler temporaries. |
| 629 | */ |
| 630 | size_t GetMaxPossibleCompilerTemps() const; |
| 631 | |
| 632 | /** |
| 633 | * @brief Provides the number of bytes needed in frame for spilling of compiler temporaries. |
| 634 | * @return Returns the size in bytes for space needed for compiler temporary spill region. |
| 635 | */ |
| 636 | size_t GetNumBytesForCompilerTempSpillRegion(); |
| 637 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 638 | DexOffset GetCurrentDexPc() const { |
| 639 | return current_dalvik_offset_; |
| 640 | } |
| 641 | |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 642 | RegisterClass ShortyToRegClass(char shorty_type); |
| 643 | RegisterClass LocToRegClass(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 644 | int ComputeFrameSize(); |
| 645 | virtual void Materialize(); |
| 646 | virtual CompiledMethod* GetCompiledMethod(); |
| 647 | void MarkSafepointPC(LIR* inst); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 648 | void MarkSafepointPCAfter(LIR* after); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 649 | void SetupResourceMasks(LIR* lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 650 | void SetMemRefType(LIR* lir, bool is_load, int mem_type); |
| 651 | void AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, bool is64bit); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 652 | void SetupRegMask(ResourceMask* mask, int reg); |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 653 | void ClearRegMask(ResourceMask* mask, int reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 654 | void DumpLIRInsn(LIR* arg, unsigned char* base_addr); |
Serban Constantinescu | 6399968 | 2014-07-15 17:44:21 +0100 | [diff] [blame] | 655 | void EliminateLoad(LIR* lir, int reg_id); |
| 656 | void DumpDependentInsnPair(LIR* check_lir, LIR* this_lir, const char* type); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 657 | void DumpPromotionMap(); |
| 658 | void CodegenDump(); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 659 | LIR* RawLIR(DexOffset dalvik_offset, int opcode, int op0 = 0, int op1 = 0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 660 | int op2 = 0, int op3 = 0, int op4 = 0, LIR* target = NULL); |
| 661 | LIR* NewLIR0(int opcode); |
| 662 | LIR* NewLIR1(int opcode, int dest); |
| 663 | LIR* NewLIR2(int opcode, int dest, int src1); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 664 | LIR* NewLIR2NoDest(int opcode, int src, int info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 665 | LIR* NewLIR3(int opcode, int dest, int src1, int src2); |
| 666 | LIR* NewLIR4(int opcode, int dest, int src1, int src2, int info); |
| 667 | LIR* NewLIR5(int opcode, int dest, int src1, int src2, int info1, int info2); |
| 668 | LIR* ScanLiteralPool(LIR* data_target, int value, unsigned int delta); |
| 669 | LIR* ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi); |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 670 | LIR* ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method); |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 671 | LIR* ScanLiteralPoolClass(LIR* data_target, const DexFile& dex_file, uint32_t type_idx); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 672 | LIR* AddWordData(LIR* *constant_list_p, int value); |
| 673 | LIR* AddWideData(LIR* *constant_list_p, int val_lo, int val_hi); |
| 674 | void ProcessSwitchTables(); |
| 675 | void DumpSparseSwitchTable(const uint16_t* table); |
| 676 | void DumpPackedSwitchTable(const uint16_t* table); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 677 | void MarkBoundary(DexOffset offset, const char* inst_str); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 678 | void NopLIR(LIR* lir); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 679 | void UnlinkLIR(LIR* lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 680 | bool EvaluateBranch(Instruction::Code opcode, int src1, int src2); |
| 681 | bool IsInexpensiveConstant(RegLocation rl_src); |
| 682 | ConditionCode FlipComparisonOrder(ConditionCode before); |
Vladimir Marko | a1a7074 | 2014-03-03 10:28:05 +0000 | [diff] [blame] | 683 | ConditionCode NegateComparison(ConditionCode before); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 684 | virtual void InstallLiteralPools(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 685 | void InstallSwitchTables(); |
| 686 | void InstallFillArrayData(); |
| 687 | bool VerifyCatchEntries(); |
| 688 | void CreateMappingTables(); |
| 689 | void CreateNativeGcMap(); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 690 | int AssignLiteralOffset(CodeOffset offset); |
| 691 | int AssignSwitchTablesOffset(CodeOffset offset); |
| 692 | int AssignFillArrayDataOffset(CodeOffset offset); |
Mark Mendell | e9f3e71 | 2014-07-03 21:34:41 -0400 | [diff] [blame] | 693 | virtual LIR* InsertCaseLabel(DexOffset vaddr, int keyVal); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 694 | void MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec); |
| 695 | void MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 696 | |
Mark Mendell | e9f3e71 | 2014-07-03 21:34:41 -0400 | [diff] [blame] | 697 | virtual void StartSlowPath(LIRSlowPath* slowpath) {} |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 698 | virtual void BeginInvoke(CallInfo* info) {} |
| 699 | virtual void EndInvoke(CallInfo* info) {} |
| 700 | |
| 701 | |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 702 | // Handle bookkeeping to convert a wide RegLocation to a narrow RegLocation. No code generated. |
Mark Mendell | e9f3e71 | 2014-07-03 21:34:41 -0400 | [diff] [blame] | 703 | virtual RegLocation NarrowRegLoc(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 704 | |
| 705 | // Shared by all targets - implemented in local_optimizations.cc |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 706 | void ConvertMemOpIntoMove(LIR* orig_lir, RegStorage dest, RegStorage src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 707 | void ApplyLoadStoreElimination(LIR* head_lir, LIR* tail_lir); |
| 708 | void ApplyLoadHoisting(LIR* head_lir, LIR* tail_lir); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 709 | virtual void ApplyLocalOptimizations(LIR* head_lir, LIR* tail_lir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 710 | |
| 711 | // Shared by all targets - implemented in ralloc_util.cc |
| 712 | int GetSRegHi(int lowSreg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 713 | bool LiveOut(int s_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 714 | void SimpleRegAlloc(); |
| 715 | void ResetRegPool(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 716 | void CompilerInitPool(RegisterInfo* info, RegStorage* regs, int num); |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 717 | void DumpRegPool(ArenaVector<RegisterInfo*>* regs); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 718 | void DumpCoreRegPool(); |
| 719 | void DumpFpRegPool(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 720 | void DumpRegPools(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 721 | /* Mark a temp register as dead. Does not affect allocation state. */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 722 | void Clobber(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 723 | void ClobberSReg(int s_reg); |
buzbee | 642fe34 | 2014-05-23 16:04:08 -0700 | [diff] [blame] | 724 | void ClobberAliases(RegisterInfo* info, uint32_t clobber_mask); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 725 | int SRegToPMap(int s_reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 726 | void RecordCorePromotion(RegStorage reg, int s_reg); |
| 727 | RegStorage AllocPreservedCoreReg(int s_reg); |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 728 | void RecordFpPromotion(RegStorage reg, int s_reg); |
| 729 | RegStorage AllocPreservedFpReg(int s_reg); |
| 730 | virtual RegStorage AllocPreservedSingle(int s_reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 731 | virtual RegStorage AllocPreservedDouble(int s_reg); |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 732 | RegStorage AllocTempBody(ArenaVector<RegisterInfo*>& regs, int* next_temp, bool required); |
Serguei Katkov | 9ee4519 | 2014-07-17 14:39:03 +0700 | [diff] [blame] | 733 | virtual RegStorage AllocTemp(bool required = true); |
| 734 | virtual RegStorage AllocTempWide(bool required = true); |
| 735 | virtual RegStorage AllocTempRef(bool required = true); |
| 736 | virtual RegStorage AllocTempSingle(bool required = true); |
| 737 | virtual RegStorage AllocTempDouble(bool required = true); |
| 738 | virtual RegStorage AllocTypedTemp(bool fp_hint, int reg_class, bool required = true); |
| 739 | virtual RegStorage AllocTypedTempWide(bool fp_hint, int reg_class, bool required = true); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 740 | void FlushReg(RegStorage reg); |
| 741 | void FlushRegWide(RegStorage reg); |
| 742 | RegStorage AllocLiveReg(int s_reg, int reg_class, bool wide); |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 743 | RegStorage FindLiveReg(ArenaVector<RegisterInfo*>& regs, int s_reg); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 744 | virtual void FreeTemp(RegStorage reg); |
| 745 | virtual void FreeRegLocTemps(RegLocation rl_keep, RegLocation rl_free); |
| 746 | virtual bool IsLive(RegStorage reg); |
| 747 | virtual bool IsTemp(RegStorage reg); |
buzbee | 262b299 | 2014-03-27 11:22:43 -0700 | [diff] [blame] | 748 | bool IsPromoted(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 749 | bool IsDirty(RegStorage reg); |
Mark Mendell | e9f3e71 | 2014-07-03 21:34:41 -0400 | [diff] [blame] | 750 | virtual void LockTemp(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 751 | void ResetDef(RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 752 | void NullifyRange(RegStorage reg, int s_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 753 | void MarkDef(RegLocation rl, LIR *start, LIR *finish); |
| 754 | void MarkDefWide(RegLocation rl, LIR *start, LIR *finish); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 755 | void ResetDefLoc(RegLocation rl); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 756 | void ResetDefLocWide(RegLocation rl); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 757 | void ResetDefTracking(); |
buzbee | ba57451 | 2014-05-12 15:13:16 -0700 | [diff] [blame] | 758 | void ClobberAllTemps(); |
Razvan A Lupusoru | 614c2b4 | 2014-01-28 17:05:21 -0800 | [diff] [blame] | 759 | void FlushSpecificReg(RegisterInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 760 | void FlushAllRegs(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 761 | bool RegClassMatches(int reg_class, RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 762 | void MarkLive(RegLocation loc); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 763 | void MarkTemp(RegStorage reg); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 764 | void UnmarkTemp(RegStorage reg); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 765 | void MarkWide(RegStorage reg); |
buzbee | 082833c | 2014-05-17 23:16:26 -0700 | [diff] [blame] | 766 | void MarkNarrow(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 767 | void MarkClean(RegLocation loc); |
| 768 | void MarkDirty(RegLocation loc); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 769 | void MarkInUse(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 770 | bool CheckCorePoolSanity(); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 771 | virtual RegLocation UpdateLoc(RegLocation loc); |
| 772 | virtual RegLocation UpdateLocWide(RegLocation loc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 773 | RegLocation UpdateRawLoc(RegLocation loc); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 774 | |
| 775 | /** |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 776 | * @brief Used to prepare a register location to receive a wide value. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 777 | * @see EvalLoc |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 778 | * @param loc the location where the value will be stored. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 779 | * @param reg_class Type of register needed. |
| 780 | * @param update Whether the liveness information should be updated. |
| 781 | * @return Returns the properly typed temporary in physical register pairs. |
| 782 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 783 | virtual RegLocation EvalLocWide(RegLocation loc, int reg_class, bool update); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 784 | |
| 785 | /** |
Vladimir Marko | 0dc242d | 2014-05-12 16:22:14 +0100 | [diff] [blame] | 786 | * @brief Used to prepare a register location to receive a value. |
| 787 | * @param loc the location where the value will be stored. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 788 | * @param reg_class Type of register needed. |
| 789 | * @param update Whether the liveness information should be updated. |
| 790 | * @return Returns the properly typed temporary in physical register. |
| 791 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 792 | virtual RegLocation EvalLoc(RegLocation loc, int reg_class, bool update); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 793 | |
buzbee | c729a6b | 2013-09-14 16:04:31 -0700 | [diff] [blame] | 794 | void CountRefs(RefCounts* core_counts, RefCounts* fp_counts, size_t num_regs); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 795 | void DumpCounts(const RefCounts* arr, int size, const char* msg); |
| 796 | void DoPromotion(); |
| 797 | int VRegOffset(int v_reg); |
| 798 | int SRegOffset(int s_reg); |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 799 | RegLocation GetReturnWide(RegisterClass reg_class); |
| 800 | RegLocation GetReturn(RegisterClass reg_class); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 801 | RegisterInfo* GetRegInfo(RegStorage reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 802 | |
| 803 | // Shared by all targets - implemented in gen_common.cc. |
Mingyao Yang | 3a74d15 | 2014-04-21 15:39:44 -0700 | [diff] [blame] | 804 | void AddIntrinsicSlowPath(CallInfo* info, LIR* branch, LIR* resume = nullptr); |
Matteo Franchin | c61b3c9 | 2014-06-18 11:52:47 +0100 | [diff] [blame] | 805 | virtual bool HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div, |
| 806 | RegLocation rl_src, RegLocation rl_dest, int lit); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 807 | bool HandleEasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 808 | virtual void HandleSlowPaths(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 809 | void GenBarrier(); |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 810 | void GenDivZeroException(); |
| 811 | // c_code holds condition code that's generated from testing divisor against 0. |
| 812 | void GenDivZeroCheck(ConditionCode c_code); |
| 813 | // reg holds divisor. |
| 814 | void GenDivZeroCheck(RegStorage reg); |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 815 | void GenArrayBoundsCheck(RegStorage index, RegStorage length); |
| 816 | void GenArrayBoundsCheck(int32_t index, RegStorage length); |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 817 | LIR* GenNullCheck(RegStorage reg); |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 818 | void MarkPossibleNullPointerException(int opt_flags); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 819 | void MarkPossibleNullPointerExceptionAfter(int opt_flags, LIR* after); |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 820 | void MarkPossibleStackOverflowException(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 821 | void ForceImplicitNullCheck(RegStorage reg, int opt_flags); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 822 | LIR* GenNullCheck(RegStorage m_reg, int opt_flags); |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 823 | LIR* GenExplicitNullCheck(RegStorage m_reg, int opt_flags); |
Dave Allison | 69dfe51 | 2014-07-11 17:11:58 +0000 | [diff] [blame] | 824 | virtual void GenImplicitNullCheck(RegStorage reg, int opt_flags); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 825 | void GenCompareAndBranch(Instruction::Code opcode, RegLocation rl_src1, |
| 826 | RegLocation rl_src2, LIR* taken, LIR* fall_through); |
| 827 | void GenCompareZeroAndBranch(Instruction::Code opcode, RegLocation rl_src, |
| 828 | LIR* taken, LIR* fall_through); |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 829 | virtual void GenIntToLong(RegLocation rl_dest, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 830 | void GenIntNarrowing(Instruction::Code opcode, RegLocation rl_dest, |
| 831 | RegLocation rl_src); |
| 832 | void GenNewArray(uint32_t type_idx, RegLocation rl_dest, |
| 833 | RegLocation rl_src); |
| 834 | void GenFilledNewArray(CallInfo* info); |
Ian Rogers | 832336b | 2014-10-08 15:35:22 -0700 | [diff] [blame] | 835 | void GenFillArrayData(MIR* mir, DexOffset table_offset, RegLocation rl_src); |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 836 | void GenSput(MIR* mir, RegLocation rl_src, OpSize size); |
| 837 | // Get entrypoints are specific for types, size alone is not sufficient to safely infer |
| 838 | // entrypoint. |
| 839 | void GenSget(MIR* mir, RegLocation rl_dest, OpSize size, Primitive::Type type); |
| 840 | void GenIGet(MIR* mir, int opt_flags, OpSize size, Primitive::Type type, |
| 841 | RegLocation rl_dest, RegLocation rl_obj); |
Vladimir Marko | be0e546 | 2014-02-26 11:24:15 +0000 | [diff] [blame] | 842 | void GenIPut(MIR* mir, int opt_flags, OpSize size, |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 843 | RegLocation rl_src, RegLocation rl_obj); |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 844 | void GenArrayObjPut(int opt_flags, RegLocation rl_array, RegLocation rl_index, |
| 845 | RegLocation rl_src); |
| 846 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 847 | void GenConstClass(uint32_t type_idx, RegLocation rl_dest); |
| 848 | void GenConstString(uint32_t string_idx, RegLocation rl_dest); |
| 849 | void GenNewInstance(uint32_t type_idx, RegLocation rl_dest); |
| 850 | void GenThrow(RegLocation rl_src); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 851 | void GenInstanceof(uint32_t type_idx, RegLocation rl_dest, RegLocation rl_src); |
| 852 | void GenCheckCast(uint32_t insn_idx, uint32_t type_idx, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 853 | void GenLong3Addr(OpKind first_op, OpKind second_op, RegLocation rl_dest, |
| 854 | RegLocation rl_src1, RegLocation rl_src2); |
Serban Constantinescu | ed65c5e | 2014-05-22 15:10:18 +0100 | [diff] [blame] | 855 | virtual void GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 856 | RegLocation rl_src1, RegLocation rl_shift); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 857 | void GenArithOpIntLit(Instruction::Code opcode, RegLocation rl_dest, |
| 858 | RegLocation rl_src, int lit); |
Andreas Gampe | c76c614 | 2014-08-04 16:30:03 -0700 | [diff] [blame] | 859 | virtual void GenArithOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 860 | RegLocation rl_src1, RegLocation rl_src2); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 861 | void GenConversionCall(QuickEntrypointEnum trampoline, RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 862 | virtual void GenSuspendTest(int opt_flags); |
| 863 | virtual void GenSuspendTestAndBranch(int opt_flags, LIR* target); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 864 | |
Bill Buzbee | d61ba4b | 2014-01-13 21:44:01 +0000 | [diff] [blame] | 865 | // This will be overridden by x86 implementation. |
| 866 | virtual void GenConstWide(RegLocation rl_dest, int64_t value); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 867 | virtual void GenArithOpInt(Instruction::Code opcode, RegLocation rl_dest, |
| 868 | RegLocation rl_src1, RegLocation rl_src2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 869 | |
| 870 | // Shared by all targets - implemented in gen_invoke.cc. |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 871 | LIR* CallHelper(RegStorage r_tgt, QuickEntrypointEnum trampoline, bool safepoint_pc, |
Dave Allison | d6ed642 | 2014-04-09 23:36:15 +0000 | [diff] [blame] | 872 | bool use_link = true); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 873 | RegStorage CallHelperSetup(QuickEntrypointEnum trampoline); |
| 874 | |
| 875 | void CallRuntimeHelper(QuickEntrypointEnum trampoline, bool safepoint_pc); |
| 876 | void CallRuntimeHelperImm(QuickEntrypointEnum trampoline, int arg0, bool safepoint_pc); |
| 877 | void CallRuntimeHelperReg(QuickEntrypointEnum trampoline, RegStorage arg0, bool safepoint_pc); |
| 878 | void CallRuntimeHelperRegLocation(QuickEntrypointEnum trampoline, RegLocation arg0, |
Ian Rogers | 468532e | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 879 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 880 | void CallRuntimeHelperImmImm(QuickEntrypointEnum trampoline, int arg0, int arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 881 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 882 | void CallRuntimeHelperImmRegLocation(QuickEntrypointEnum trampoline, int arg0, RegLocation arg1, |
| 883 | bool safepoint_pc); |
| 884 | void CallRuntimeHelperRegLocationImm(QuickEntrypointEnum trampoline, RegLocation arg0, int arg1, |
| 885 | bool safepoint_pc); |
| 886 | void CallRuntimeHelperImmReg(QuickEntrypointEnum trampoline, int arg0, RegStorage arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 887 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 888 | void CallRuntimeHelperRegImm(QuickEntrypointEnum trampoline, RegStorage arg0, int arg1, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 889 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 890 | void CallRuntimeHelperImmMethod(QuickEntrypointEnum trampoline, int arg0, bool safepoint_pc); |
| 891 | void CallRuntimeHelperRegMethod(QuickEntrypointEnum trampoline, RegStorage arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 892 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 893 | void CallRuntimeHelperRegMethodRegLocation(QuickEntrypointEnum trampoline, RegStorage arg0, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 894 | RegLocation arg2, bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 895 | void CallRuntimeHelperRegLocationRegLocation(QuickEntrypointEnum trampoline, RegLocation arg0, |
| 896 | RegLocation arg1, bool safepoint_pc); |
| 897 | void CallRuntimeHelperRegReg(QuickEntrypointEnum trampoline, RegStorage arg0, RegStorage arg1, |
| 898 | bool safepoint_pc); |
| 899 | void CallRuntimeHelperRegRegImm(QuickEntrypointEnum trampoline, RegStorage arg0, |
| 900 | RegStorage arg1, int arg2, bool safepoint_pc); |
| 901 | void CallRuntimeHelperImmMethodRegLocation(QuickEntrypointEnum trampoline, int arg0, |
| 902 | RegLocation arg2, bool safepoint_pc); |
| 903 | void CallRuntimeHelperImmMethodImm(QuickEntrypointEnum trampoline, int arg0, int arg2, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 904 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 905 | void CallRuntimeHelperImmRegLocationRegLocation(QuickEntrypointEnum trampoline, int arg0, |
| 906 | RegLocation arg1, RegLocation arg2, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 907 | bool safepoint_pc); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 908 | void CallRuntimeHelperRegLocationRegLocationRegLocation(QuickEntrypointEnum trampoline, |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 909 | RegLocation arg0, RegLocation arg1, |
| 910 | RegLocation arg2, |
| 911 | bool safepoint_pc); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 912 | void GenInvoke(CallInfo* info); |
Vladimir Marko | 3bc8615 | 2014-03-13 14:11:28 +0000 | [diff] [blame] | 913 | void GenInvokeNoInline(CallInfo* info); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 914 | virtual NextCallInsn GetNextSDCallInsn(); |
| 915 | |
| 916 | /* |
| 917 | * @brief Generate the actual call insn based on the method info. |
| 918 | * @param method_info the lowering info for the method call. |
| 919 | * @returns Call instruction |
| 920 | */ |
| 921 | virtual LIR* GenCallInsn(const MirMethodLoweringInfo& method_info); |
| 922 | |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 923 | virtual void FlushIns(RegLocation* ArgLocs, RegLocation rl_method); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 924 | virtual int GenDalvikArgsNoRange(CallInfo* info, int call_state, LIR** pcrLabel, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 925 | NextCallInsn next_call_insn, |
| 926 | const MethodReference& target_method, |
| 927 | uint32_t vtable_idx, |
| 928 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 929 | bool skip_this); |
Dmitry Petrochenko | 58994cd | 2014-05-17 01:02:18 +0700 | [diff] [blame] | 930 | virtual int GenDalvikArgsRange(CallInfo* info, int call_state, LIR** pcrLabel, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 931 | NextCallInsn next_call_insn, |
| 932 | const MethodReference& target_method, |
| 933 | uint32_t vtable_idx, |
| 934 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 935 | bool skip_this); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 936 | |
| 937 | /** |
| 938 | * @brief Used to determine the register location of destination. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 939 | * @details This is needed during generation of inline intrinsics because it finds destination |
| 940 | * of return, |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 941 | * either the physical register or the target of move-result. |
| 942 | * @param info Information about the invoke. |
| 943 | * @return Returns the destination location. |
| 944 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 945 | RegLocation InlineTarget(CallInfo* info); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 946 | |
| 947 | /** |
| 948 | * @brief Used to determine the wide register location of destination. |
| 949 | * @see InlineTarget |
| 950 | * @param info Information about the invoke. |
| 951 | * @return Returns the destination location. |
| 952 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 953 | RegLocation InlineTargetWide(CallInfo* info); |
| 954 | |
Mathieu Chartier | cd48f2d | 2014-09-09 13:51:09 -0700 | [diff] [blame] | 955 | bool GenInlinedReferenceGetReferent(CallInfo* info); |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 956 | virtual bool GenInlinedCharAt(CallInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 957 | bool GenInlinedStringIsEmptyOrLength(CallInfo* info, bool is_empty); |
Serban Constantinescu | 23abec9 | 2014-07-02 16:13:38 +0100 | [diff] [blame] | 958 | virtual bool GenInlinedReverseBits(CallInfo* info, OpSize size); |
Vladimir Marko | 6bdf1ff | 2013-10-29 17:40:46 +0000 | [diff] [blame] | 959 | bool GenInlinedReverseBytes(CallInfo* info, OpSize size); |
Martyn Capewell | 9a8a506 | 2014-08-07 11:31:48 +0100 | [diff] [blame] | 960 | virtual bool GenInlinedAbsInt(CallInfo* info); |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 961 | virtual bool GenInlinedAbsLong(CallInfo* info); |
Vladimir Marko | 5030d3e | 2014-07-17 10:43:08 +0100 | [diff] [blame] | 962 | virtual bool GenInlinedAbsFloat(CallInfo* info) = 0; |
| 963 | virtual bool GenInlinedAbsDouble(CallInfo* info) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 964 | bool GenInlinedFloatCvt(CallInfo* info); |
| 965 | bool GenInlinedDoubleCvt(CallInfo* info); |
Serban Constantinescu | 2eba1fa | 2014-07-31 19:07:17 +0100 | [diff] [blame] | 966 | virtual bool GenInlinedCeil(CallInfo* info); |
| 967 | virtual bool GenInlinedFloor(CallInfo* info); |
| 968 | virtual bool GenInlinedRint(CallInfo* info); |
| 969 | virtual bool GenInlinedRound(CallInfo* info, bool is_double); |
DaniilSokolov | 70c4f06 | 2014-06-24 17:34:00 -0700 | [diff] [blame] | 970 | virtual bool GenInlinedArrayCopyCharArray(CallInfo* info); |
Mark Mendell | 4028a6c | 2014-02-19 20:06:20 -0800 | [diff] [blame] | 971 | virtual bool GenInlinedIndexOf(CallInfo* info, bool zero_based); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 972 | bool GenInlinedStringCompareTo(CallInfo* info); |
Alexei Zavjalov | 6bbf096 | 2014-07-15 02:19:41 +0700 | [diff] [blame] | 973 | virtual bool GenInlinedCurrentThread(CallInfo* info); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 974 | bool GenInlinedUnsafeGet(CallInfo* info, bool is_long, bool is_volatile); |
| 975 | bool GenInlinedUnsafePut(CallInfo* info, bool is_long, bool is_object, |
| 976 | bool is_volatile, bool is_ordered); |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 977 | virtual int LoadArgRegs(CallInfo* info, int call_state, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 978 | NextCallInsn next_call_insn, |
| 979 | const MethodReference& target_method, |
| 980 | uint32_t vtable_idx, |
| 981 | uintptr_t direct_code, uintptr_t direct_method, InvokeType type, |
| 982 | bool skip_this); |
| 983 | |
| 984 | // Shared by all targets - implemented in gen_loadstore.cc. |
| 985 | RegLocation LoadCurrMethod(); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 986 | void LoadCurrMethodDirect(RegStorage r_tgt); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 987 | virtual LIR* LoadConstant(RegStorage r_dest, int value); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 988 | // Natural word size. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 989 | virtual LIR* LoadWordDisp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 990 | return LoadBaseDisp(r_base, displacement, r_dest, kWord, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 991 | } |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 992 | // Load 8 bits, regardless of target. |
| 993 | virtual LIR* Load8Disp(RegStorage r_base, int displacement, RegStorage r_dest) { |
| 994 | return LoadBaseDisp(r_base, displacement, r_dest, kSignedByte, kNotVolatile); |
| 995 | } |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 996 | // Load 32 bits, regardless of target. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 997 | virtual LIR* Load32Disp(RegStorage r_base, int displacement, RegStorage r_dest) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 998 | return LoadBaseDisp(r_base, displacement, r_dest, k32, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 999 | } |
| 1000 | // Load a reference at base + displacement and decompress into register. |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1001 | virtual LIR* LoadRefDisp(RegStorage r_base, int displacement, RegStorage r_dest, |
| 1002 | VolatileKind is_volatile) { |
| 1003 | return LoadBaseDisp(r_base, displacement, r_dest, kReference, is_volatile); |
| 1004 | } |
| 1005 | // Load a reference at base + index and decompress into register. |
Matteo Franchin | 255e014 | 2014-07-04 13:50:41 +0100 | [diff] [blame] | 1006 | virtual LIR* LoadRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, |
| 1007 | int scale) { |
| 1008 | return LoadBaseIndexed(r_base, r_index, r_dest, scale, kReference); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1009 | } |
| 1010 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1011 | virtual RegLocation LoadValue(RegLocation rl_src, RegisterClass op_kind); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1012 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1013 | virtual RegLocation LoadValueWide(RegLocation rl_src, RegisterClass op_kind); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1014 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1015 | virtual void LoadValueDirect(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1016 | // Load Dalvik value with 32-bit memory storage. If compressed object reference, decompress. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1017 | virtual void LoadValueDirectFixed(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1018 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1019 | virtual void LoadValueDirectWide(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1020 | // Load Dalvik value with 64-bit memory storage. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1021 | virtual void LoadValueDirectWideFixed(RegLocation rl_src, RegStorage r_dest); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1022 | // Store an item of natural word size. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1023 | virtual LIR* StoreWordDisp(RegStorage r_base, int displacement, RegStorage r_src) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1024 | return StoreBaseDisp(r_base, displacement, r_src, kWord, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1025 | } |
| 1026 | // Store an uncompressed reference into a compressed 32-bit container. |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1027 | virtual LIR* StoreRefDisp(RegStorage r_base, int displacement, RegStorage r_src, |
| 1028 | VolatileKind is_volatile) { |
| 1029 | return StoreBaseDisp(r_base, displacement, r_src, kReference, is_volatile); |
| 1030 | } |
| 1031 | // Store an uncompressed reference into a compressed 32-bit container by index. |
Matteo Franchin | 255e014 | 2014-07-04 13:50:41 +0100 | [diff] [blame] | 1032 | virtual LIR* StoreRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, |
| 1033 | int scale) { |
| 1034 | return StoreBaseIndexed(r_base, r_index, r_src, scale, kReference); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1035 | } |
| 1036 | // Store 32 bits, regardless of target. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1037 | virtual LIR* Store32Disp(RegStorage r_base, int displacement, RegStorage r_src) { |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1038 | return StoreBaseDisp(r_base, displacement, r_src, k32, kNotVolatile); |
buzbee | 695d13a | 2014-04-19 13:32:20 -0700 | [diff] [blame] | 1039 | } |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1040 | |
| 1041 | /** |
| 1042 | * @brief Used to do the final store in the destination as per bytecode semantics. |
| 1043 | * @param rl_dest The destination dalvik register location. |
| 1044 | * @param rl_src The source register location. Can be either physical register or dalvik register. |
| 1045 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1046 | virtual void StoreValue(RegLocation rl_dest, RegLocation rl_src); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1047 | |
| 1048 | /** |
| 1049 | * @brief Used to do the final store in a wide destination as per bytecode semantics. |
| 1050 | * @see StoreValue |
| 1051 | * @param rl_dest The destination dalvik register location. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1052 | * @param rl_src The source register location. Can be either physical register or dalvik |
| 1053 | * register. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1054 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1055 | virtual void StoreValueWide(RegLocation rl_dest, RegLocation rl_src); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1056 | |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1057 | /** |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 1058 | * @brief Used to do the final store to a destination as per bytecode semantics. |
| 1059 | * @see StoreValue |
| 1060 | * @param rl_dest The destination dalvik register location. |
| 1061 | * @param rl_src The source register location. It must be kLocPhysReg |
| 1062 | * |
| 1063 | * This is used for x86 two operand computations, where we have computed the correct |
| 1064 | * register value that now needs to be properly registered. This is used to avoid an |
| 1065 | * extra register copy that would result if StoreValue was called. |
| 1066 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1067 | virtual void StoreFinalValue(RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | feb2b4e | 2014-01-28 12:59:49 -0800 | [diff] [blame] | 1068 | |
| 1069 | /** |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1070 | * @brief Used to do the final store in a wide destination as per bytecode semantics. |
| 1071 | * @see StoreValueWide |
| 1072 | * @param rl_dest The destination dalvik register location. |
| 1073 | * @param rl_src The source register location. It must be kLocPhysReg |
| 1074 | * |
| 1075 | * This is used for x86 two operand computations, where we have computed the correct |
| 1076 | * register values that now need to be properly registered. This is used to avoid an |
| 1077 | * extra pair of register copies that would result if StoreValueWide was called. |
| 1078 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1079 | virtual void StoreFinalValueWide(RegLocation rl_dest, RegLocation rl_src); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1080 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1081 | // Shared by all targets - implemented in mir_to_lir.cc. |
| 1082 | void CompileDalvikInstruction(MIR* mir, BasicBlock* bb, LIR* label_list); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1083 | virtual void HandleExtendedMethodMIR(BasicBlock* bb, MIR* mir); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1084 | bool MethodBlockCodeGen(BasicBlock* bb); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1085 | bool SpecialMIR2LIR(const InlineMethod& special); |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1086 | virtual void MethodMIR2LIR(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1087 | // Update LIR for verbose listings. |
| 1088 | void UpdateLIROffsets(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1089 | |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1090 | /* |
| 1091 | * @brief Load the address of the dex method into the register. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1092 | * @param target_method The MethodReference of the method to be invoked. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1093 | * @param type How the method will be invoked. |
| 1094 | * @param register that will contain the code address. |
| 1095 | * @note register will be passed to TargetReg to get physical register. |
| 1096 | */ |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1097 | void LoadCodeAddress(const MethodReference& target_method, InvokeType type, |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1098 | SpecialTargetRegister symbolic_reg); |
| 1099 | |
| 1100 | /* |
| 1101 | * @brief Load the Method* of a dex method into the register. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1102 | * @param target_method The MethodReference of the method to be invoked. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1103 | * @param type How the method will be invoked. |
| 1104 | * @param register that will contain the code address. |
| 1105 | * @note register will be passed to TargetReg to get physical register. |
| 1106 | */ |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1107 | virtual void LoadMethodAddress(const MethodReference& target_method, InvokeType type, |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1108 | SpecialTargetRegister symbolic_reg); |
| 1109 | |
| 1110 | /* |
| 1111 | * @brief Load the Class* of a Dex Class type into the register. |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 1112 | * @param dex DexFile that contains the class type. |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1113 | * @param type How the method will be invoked. |
| 1114 | * @param register that will contain the code address. |
| 1115 | * @note register will be passed to TargetReg to get physical register. |
| 1116 | */ |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 1117 | virtual void LoadClassType(const DexFile& dex_file, uint32_t type_idx, |
| 1118 | SpecialTargetRegister symbolic_reg); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1119 | |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1120 | // Routines that work for the generic case, but may be overriden by target. |
| 1121 | /* |
| 1122 | * @brief Compare memory to immediate, and branch if condition true. |
| 1123 | * @param cond The condition code that when true will branch to the target. |
| 1124 | * @param temp_reg A temporary register that can be used if compare to memory is not |
| 1125 | * supported by the architecture. |
| 1126 | * @param base_reg The register holding the base address. |
| 1127 | * @param offset The offset from the base. |
| 1128 | * @param check_value The immediate to compare to. |
Dave Allison | 69dfe51 | 2014-07-11 17:11:58 +0000 | [diff] [blame] | 1129 | * @param target branch target (or nullptr) |
| 1130 | * @param compare output for getting LIR for comparison (or nullptr) |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1131 | * @returns The branch instruction that was generated. |
| 1132 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1133 | virtual LIR* OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg, |
Dave Allison | 69dfe51 | 2014-07-11 17:11:58 +0000 | [diff] [blame] | 1134 | int offset, int check_value, LIR* target, LIR** compare); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1135 | |
| 1136 | // Required for target - codegen helpers. |
buzbee | 11b63d1 | 2013-08-27 07:34:17 -0700 | [diff] [blame] | 1137 | virtual bool SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1138 | RegLocation rl_src, RegLocation rl_dest, int lit) = 0; |
Ian Rogers | e2143c0 | 2014-03-28 08:47:16 -0700 | [diff] [blame] | 1139 | virtual bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) = 0; |
Dave Allison | b373e09 | 2014-02-20 16:06:36 -0800 | [diff] [blame] | 1140 | virtual LIR* CheckSuspendUsingLoad() = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1141 | |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 1142 | virtual RegStorage LoadHelper(QuickEntrypointEnum trampoline) = 0; |
Andreas Gampe | 2f244e9 | 2014-05-08 03:35:25 -0700 | [diff] [blame] | 1143 | |
Vladimir Marko | 3bf7c60 | 2014-05-07 14:55:43 +0100 | [diff] [blame] | 1144 | virtual LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1145 | OpSize size, VolatileKind is_volatile) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1146 | virtual LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, |
| 1147 | int scale, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1148 | virtual LIR* LoadConstantNoClobber(RegStorage r_dest, int value) = 0; |
| 1149 | virtual LIR* LoadConstantWide(RegStorage r_dest, int64_t value) = 0; |
| 1150 | virtual LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src, |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1151 | OpSize size, VolatileKind is_volatile) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1152 | virtual LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, |
| 1153 | int scale, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1154 | virtual void MarkGCCard(RegStorage val_reg, RegStorage tgt_addr_reg) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1155 | |
| 1156 | // Required for target - register utilities. |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1157 | |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 1158 | bool IsSameReg(RegStorage reg1, RegStorage reg2) { |
| 1159 | RegisterInfo* info1 = GetRegInfo(reg1); |
| 1160 | RegisterInfo* info2 = GetRegInfo(reg2); |
| 1161 | return (info1->Master() == info2->Master() && |
| 1162 | (info1->StorageMask() & info2->StorageMask()) != 0); |
| 1163 | } |
| 1164 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1165 | static constexpr bool IsWide(OpSize size) { |
| 1166 | return size == k64 || size == kDouble; |
| 1167 | } |
| 1168 | |
| 1169 | static constexpr bool IsRef(OpSize size) { |
| 1170 | return size == kReference; |
| 1171 | } |
| 1172 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1173 | /** |
| 1174 | * @brief Portable way of getting special registers from the backend. |
| 1175 | * @param reg Enumeration describing the purpose of the register. |
| 1176 | * @return Return the #RegStorage corresponding to the given purpose @p reg. |
| 1177 | * @note This function is currently allowed to return any suitable view of the registers |
| 1178 | * (e.g. this could be 64-bit solo or 32-bit solo for 64-bit backends). |
| 1179 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1180 | virtual RegStorage TargetReg(SpecialTargetRegister reg) = 0; |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1181 | |
| 1182 | /** |
| 1183 | * @brief Portable way of getting special registers from the backend. |
| 1184 | * @param reg Enumeration describing the purpose of the register. |
Andreas Gampe | ccc6026 | 2014-07-04 18:02:38 -0700 | [diff] [blame] | 1185 | * @param wide_kind What kind of view of the special register is required. |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1186 | * @return Return the #RegStorage corresponding to the given purpose @p reg. |
Andreas Gampe | ccc6026 | 2014-07-04 18:02:38 -0700 | [diff] [blame] | 1187 | * |
Matteo Franchin | ed7a0f2 | 2014-06-10 19:23:45 +0100 | [diff] [blame] | 1188 | * @note For 32b system, wide (kWide) views only make sense for the argument registers and the |
Andreas Gampe | ccc6026 | 2014-07-04 18:02:38 -0700 | [diff] [blame] | 1189 | * return. In that case, this function should return a pair where the first component of |
| 1190 | * the result will be the indicated special register. |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1191 | */ |
Andreas Gampe | ccc6026 | 2014-07-04 18:02:38 -0700 | [diff] [blame] | 1192 | virtual RegStorage TargetReg(SpecialTargetRegister reg, WideKind wide_kind) { |
| 1193 | if (wide_kind == kWide) { |
| 1194 | DCHECK((kArg0 <= reg && reg < kArg7) || (kFArg0 <= reg && reg < kFArg7) || (kRet0 == reg)); |
| 1195 | COMPILE_ASSERT((kArg1 == kArg0 + 1) && (kArg2 == kArg1 + 1) && (kArg3 == kArg2 + 1) && |
| 1196 | (kArg4 == kArg3 + 1) && (kArg5 == kArg4 + 1) && (kArg6 == kArg5 + 1) && |
| 1197 | (kArg7 == kArg6 + 1), kargs_range_unexpected); |
| 1198 | COMPILE_ASSERT((kFArg1 == kFArg0 + 1) && (kFArg2 == kFArg1 + 1) && (kFArg3 == kFArg2 + 1) && |
| 1199 | (kFArg4 == kFArg3 + 1) && (kFArg5 == kFArg4 + 1) && (kFArg6 == kFArg5 + 1) && |
| 1200 | (kFArg7 == kFArg6 + 1), kfargs_range_unexpected); |
| 1201 | COMPILE_ASSERT(kRet1 == kRet0 + 1, kret_range_unexpected); |
| 1202 | return RegStorage::MakeRegPair(TargetReg(reg), |
| 1203 | TargetReg(static_cast<SpecialTargetRegister>(reg + 1))); |
| 1204 | } else { |
| 1205 | return TargetReg(reg); |
| 1206 | } |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1207 | } |
| 1208 | |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 1209 | /** |
| 1210 | * @brief Portable way of getting a special register for storing a pointer. |
| 1211 | * @see TargetReg() |
| 1212 | */ |
| 1213 | virtual RegStorage TargetPtrReg(SpecialTargetRegister reg) { |
| 1214 | return TargetReg(reg); |
| 1215 | } |
| 1216 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1217 | // Get a reg storage corresponding to the wide & ref flags of the reg location. |
| 1218 | virtual RegStorage TargetReg(SpecialTargetRegister reg, RegLocation loc) { |
| 1219 | if (loc.ref) { |
Andreas Gampe | ccc6026 | 2014-07-04 18:02:38 -0700 | [diff] [blame] | 1220 | return TargetReg(reg, kRef); |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1221 | } else { |
Andreas Gampe | ccc6026 | 2014-07-04 18:02:38 -0700 | [diff] [blame] | 1222 | return TargetReg(reg, loc.wide ? kWide : kNotWide); |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1223 | } |
| 1224 | } |
| 1225 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1226 | virtual RegStorage GetArgMappingToPhysicalReg(int arg_num) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1227 | virtual RegLocation GetReturnAlt() = 0; |
| 1228 | virtual RegLocation GetReturnWideAlt() = 0; |
| 1229 | virtual RegLocation LocCReturn() = 0; |
buzbee | a0cd2d7 | 2014-06-01 09:33:49 -0700 | [diff] [blame] | 1230 | virtual RegLocation LocCReturnRef() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1231 | virtual RegLocation LocCReturnDouble() = 0; |
| 1232 | virtual RegLocation LocCReturnFloat() = 0; |
| 1233 | virtual RegLocation LocCReturnWide() = 0; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1234 | virtual ResourceMask GetRegMaskCommon(const RegStorage& reg) const = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1235 | virtual void AdjustSpillMask() = 0; |
Vladimir Marko | 31c2aac | 2013-12-09 16:31:19 +0000 | [diff] [blame] | 1236 | virtual void ClobberCallerSave() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1237 | virtual void FreeCallTemps() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1238 | virtual void LockCallTemps() = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1239 | virtual void CompilerInitializeRegAlloc() = 0; |
| 1240 | |
| 1241 | // Required for target - miscellaneous. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1242 | virtual void AssembleLIR() = 0; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1243 | virtual void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) = 0; |
| 1244 | virtual void SetupTargetResourceMasks(LIR* lir, uint64_t flags, |
| 1245 | ResourceMask* use_mask, ResourceMask* def_mask) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1246 | virtual const char* GetTargetInstFmt(int opcode) = 0; |
| 1247 | virtual const char* GetTargetInstName(int opcode) = 0; |
| 1248 | virtual std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr) = 0; |
Andreas Gampe | af263df | 2014-07-11 16:40:54 -0700 | [diff] [blame] | 1249 | |
| 1250 | // Note: This may return kEncodeNone on architectures that do not expose a PC. The caller must |
| 1251 | // take care of this. |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1252 | virtual ResourceMask GetPCUseDefEncoding() const = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1253 | virtual uint64_t GetTargetInstFlags(int opcode) = 0; |
Ian Rogers | 5aa6e04 | 2014-06-13 16:38:24 -0700 | [diff] [blame] | 1254 | virtual size_t GetInsnSize(LIR* lir) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1255 | virtual bool IsUnconditionalBranch(LIR* lir) = 0; |
| 1256 | |
Vladimir Marko | 674744e | 2014-04-24 15:18:26 +0100 | [diff] [blame] | 1257 | // Get the register class for load/store of a field. |
| 1258 | virtual RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) = 0; |
| 1259 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1260 | // Required for target - Dalvik-level generators. |
| 1261 | virtual void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 1262 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1263 | virtual void GenArithOpDouble(Instruction::Code opcode, |
| 1264 | RegLocation rl_dest, RegLocation rl_src1, |
| 1265 | RegLocation rl_src2) = 0; |
| 1266 | virtual void GenArithOpFloat(Instruction::Code opcode, RegLocation rl_dest, |
| 1267 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
| 1268 | virtual void GenCmpFP(Instruction::Code opcode, RegLocation rl_dest, |
| 1269 | RegLocation rl_src1, RegLocation rl_src2) = 0; |
| 1270 | virtual void GenConversion(Instruction::Code opcode, RegLocation rl_dest, |
| 1271 | RegLocation rl_src) = 0; |
Vladimir Marko | 1c282e2 | 2013-11-21 14:49:47 +0000 | [diff] [blame] | 1272 | virtual bool GenInlinedCas(CallInfo* info, bool is_long, bool is_object) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1273 | |
| 1274 | /** |
| 1275 | * @brief Used to generate code for intrinsic java\.lang\.Math methods min and max. |
| 1276 | * @details This is also applicable for java\.lang\.StrictMath since it is a simple algorithm |
| 1277 | * that applies on integers. The generated code will write the smallest or largest value |
| 1278 | * directly into the destination register as specified by the invoke information. |
| 1279 | * @param info Information about the invoke. |
| 1280 | * @param is_min If true generates code that computes minimum. Otherwise computes maximum. |
Serban Constantinescu | 23abec9 | 2014-07-02 16:13:38 +0100 | [diff] [blame] | 1281 | * @param is_long If true the value value is Long. Otherwise the value is Int. |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1282 | * @return Returns true if successfully generated |
| 1283 | */ |
Serban Constantinescu | 23abec9 | 2014-07-02 16:13:38 +0100 | [diff] [blame] | 1284 | virtual bool GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long) = 0; |
| 1285 | virtual bool GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double); |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1286 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1287 | virtual bool GenInlinedSqrt(CallInfo* info) = 0; |
Vladimir Marko | e508a20 | 2013-11-04 15:24:22 +0000 | [diff] [blame] | 1288 | virtual bool GenInlinedPeek(CallInfo* info, OpSize size) = 0; |
| 1289 | virtual bool GenInlinedPoke(CallInfo* info, OpSize size) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1290 | virtual RegLocation GenDivRem(RegLocation rl_dest, RegStorage reg_lo, RegStorage reg_hi, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1291 | bool is_div) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1292 | virtual RegLocation GenDivRemLit(RegLocation rl_dest, RegStorage reg_lo, int lit, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1293 | bool is_div) = 0; |
Mark Mendell | 2bf31e6 | 2014-01-23 12:13:40 -0800 | [diff] [blame] | 1294 | /* |
| 1295 | * @brief Generate an integer div or rem operation by a literal. |
| 1296 | * @param rl_dest Destination Location. |
| 1297 | * @param rl_src1 Numerator Location. |
| 1298 | * @param rl_src2 Divisor Location. |
| 1299 | * @param is_div 'true' if this is a division, 'false' for a remainder. |
| 1300 | * @param check_zero 'true' if an exception should be generated if the divisor is 0. |
| 1301 | */ |
| 1302 | virtual RegLocation GenDivRem(RegLocation rl_dest, RegLocation rl_src1, |
| 1303 | RegLocation rl_src2, bool is_div, bool check_zero) = 0; |
| 1304 | /* |
| 1305 | * @brief Generate an integer div or rem operation by a literal. |
| 1306 | * @param rl_dest Destination Location. |
| 1307 | * @param rl_src Numerator Location. |
| 1308 | * @param lit Divisor. |
| 1309 | * @param is_div 'true' if this is a division, 'false' for a remainder. |
| 1310 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1311 | virtual RegLocation GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit, |
| 1312 | bool is_div) = 0; |
| 1313 | virtual void GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2) = 0; |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1314 | |
| 1315 | /** |
| 1316 | * @brief Used for generating code that throws ArithmeticException if both registers are zero. |
Ian Rogers | dd7624d | 2014-03-14 17:43:00 -0700 | [diff] [blame] | 1317 | * @details This is used for generating DivideByZero checks when divisor is held in two |
| 1318 | * separate registers. |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 1319 | * @param reg The register holding the pair of 32-bit values. |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1320 | */ |
Mingyao Yang | e643a17 | 2014-04-08 11:02:52 -0700 | [diff] [blame] | 1321 | virtual void GenDivZeroCheckWide(RegStorage reg) = 0; |
Razvan A Lupusoru | 090dd44 | 2013-12-20 14:35:03 -0800 | [diff] [blame] | 1322 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1323 | virtual void GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1324 | virtual void GenExitSequence() = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1325 | virtual void GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, bool is_double) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1326 | virtual void GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir) = 0; |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1327 | |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1328 | /* |
| 1329 | * @brief Handle Machine Specific MIR Extended opcodes. |
| 1330 | * @param bb The basic block in which the MIR is from. |
| 1331 | * @param mir The MIR whose opcode is not standard extended MIR. |
| 1332 | * @note Base class implementation will abort for unknown opcodes. |
| 1333 | */ |
| 1334 | virtual void GenMachineSpecificExtendedMethodMIR(BasicBlock* bb, MIR* mir); |
| 1335 | |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1336 | /** |
| 1337 | * @brief Lowers the kMirOpSelect MIR into LIR. |
| 1338 | * @param bb The basic block in which the MIR is from. |
| 1339 | * @param mir The MIR whose opcode is kMirOpSelect. |
| 1340 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1341 | virtual void GenSelect(BasicBlock* bb, MIR* mir) = 0; |
Razvan A Lupusoru | e27b3bf | 2014-01-23 09:41:45 -0800 | [diff] [blame] | 1342 | |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1343 | /** |
Andreas Gampe | 90969af | 2014-07-15 23:02:11 -0700 | [diff] [blame] | 1344 | * @brief Generates code to select one of the given constants depending on the given opcode. |
Andreas Gampe | 90969af | 2014-07-15 23:02:11 -0700 | [diff] [blame] | 1345 | */ |
| 1346 | virtual void GenSelectConst32(RegStorage left_op, RegStorage right_op, ConditionCode code, |
| 1347 | int32_t true_val, int32_t false_val, RegStorage rs_dest, |
| 1348 | int dest_reg_class) = 0; |
| 1349 | |
| 1350 | /** |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1351 | * @brief Used to generate a memory barrier in an architecture specific way. |
| 1352 | * @details The last generated LIR will be considered for use as barrier. Namely, |
| 1353 | * if the last LIR can be updated in a way where it will serve the semantics of |
| 1354 | * barrier, then it will be used as such. Otherwise, a new LIR will be generated |
| 1355 | * that can keep the semantics. |
| 1356 | * @param barrier_kind The kind of memory barrier to generate. |
Andreas Gampe | b14329f | 2014-05-15 11:16:06 -0700 | [diff] [blame] | 1357 | * @return whether a new instruction was generated. |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1358 | */ |
Andreas Gampe | b14329f | 2014-05-15 11:16:06 -0700 | [diff] [blame] | 1359 | virtual bool GenMemBarrier(MemBarrierKind barrier_kind) = 0; |
Razvan A Lupusoru | 99ad723 | 2014-02-25 17:41:08 -0800 | [diff] [blame] | 1360 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1361 | virtual void GenMoveException(RegLocation rl_dest) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1362 | virtual void GenMultiplyByTwoBitMultiplier(RegLocation rl_src, RegLocation rl_result, int lit, |
| 1363 | int first_bit, int second_bit) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1364 | virtual void GenNegDouble(RegLocation rl_dest, RegLocation rl_src) = 0; |
| 1365 | virtual void GenNegFloat(RegLocation rl_dest, RegLocation rl_src) = 0; |
Andreas Gampe | 48971b3 | 2014-08-06 10:09:01 -0700 | [diff] [blame] | 1366 | |
| 1367 | // Create code for switch statements. Will decide between short and long versions below. |
| 1368 | void GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src); |
| 1369 | void GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src); |
| 1370 | |
| 1371 | // Potentially backend-specific versions of switch instructions for shorter switch statements. |
| 1372 | // The default implementation will create a chained compare-and-branch. |
| 1373 | virtual void GenSmallPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src); |
| 1374 | virtual void GenSmallSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src); |
| 1375 | // Backend-specific versions of switch instructions for longer switch statements. |
| 1376 | virtual void GenLargePackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) = 0; |
| 1377 | virtual void GenLargeSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src) = 0; |
| 1378 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1379 | virtual void GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array, |
| 1380 | RegLocation rl_index, RegLocation rl_dest, int scale) = 0; |
| 1381 | virtual void GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array, |
Ian Rogers | a9a8254 | 2013-10-04 11:17:26 -0700 | [diff] [blame] | 1382 | RegLocation rl_index, RegLocation rl_src, int scale, |
| 1383 | bool card_mark) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1384 | virtual void GenShiftImmOpLong(Instruction::Code opcode, RegLocation rl_dest, |
| 1385 | RegLocation rl_src1, RegLocation rl_shift) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1386 | |
| 1387 | // Required for target - single operation generators. |
| 1388 | virtual LIR* OpUnconditionalBranch(LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1389 | virtual LIR* OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target) = 0; |
| 1390 | virtual LIR* OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, |
| 1391 | LIR* target) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1392 | virtual LIR* OpCondBranch(ConditionCode cc, LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1393 | virtual LIR* OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target) = 0; |
| 1394 | virtual LIR* OpFpRegCopy(RegStorage r_dest, RegStorage r_src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1395 | virtual LIR* OpIT(ConditionCode cond, const char* guide) = 0; |
Dave Allison | 3da67a5 | 2014-04-02 17:03:45 -0700 | [diff] [blame] | 1396 | virtual void OpEndIT(LIR* it) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1397 | virtual LIR* OpMem(OpKind op, RegStorage r_base, int disp) = 0; |
| 1398 | virtual LIR* OpPcRelLoad(RegStorage reg, LIR* target) = 0; |
| 1399 | virtual LIR* OpReg(OpKind op, RegStorage r_dest_src) = 0; |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 1400 | virtual void OpRegCopy(RegStorage r_dest, RegStorage r_src) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1401 | virtual LIR* OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src) = 0; |
| 1402 | virtual LIR* OpRegImm(OpKind op, RegStorage r_dest_src1, int value) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1403 | virtual LIR* OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1404 | |
| 1405 | /** |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1406 | * @brief Used to generate an LIR that does a load from mem to reg. |
| 1407 | * @param r_dest The destination physical register. |
| 1408 | * @param r_base The base physical register for memory operand. |
| 1409 | * @param offset The displacement for memory operand. |
| 1410 | * @param move_type Specification on the move desired (size, alignment, register kind). |
| 1411 | * @return Returns the generate move LIR. |
| 1412 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1413 | virtual LIR* OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, |
| 1414 | MoveType move_type) = 0; |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1415 | |
| 1416 | /** |
| 1417 | * @brief Used to generate an LIR that does a store from reg to mem. |
| 1418 | * @param r_base The base physical register for memory operand. |
| 1419 | * @param offset The displacement for memory operand. |
| 1420 | * @param r_src The destination physical register. |
| 1421 | * @param bytes_to_move The number of bytes to move. |
| 1422 | * @param is_aligned Whether the memory location is known to be aligned. |
| 1423 | * @return Returns the generate move LIR. |
| 1424 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1425 | virtual LIR* OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, |
| 1426 | MoveType move_type) = 0; |
Razvan A Lupusoru | 2c498d1 | 2014-01-29 16:02:57 -0800 | [diff] [blame] | 1427 | |
| 1428 | /** |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1429 | * @brief Used for generating a conditional register to register operation. |
| 1430 | * @param op The opcode kind. |
| 1431 | * @param cc The condition code that when true will perform the opcode. |
| 1432 | * @param r_dest The destination physical register. |
| 1433 | * @param r_src The source physical register. |
| 1434 | * @return Returns the newly created LIR or null in case of creation failure. |
| 1435 | */ |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1436 | virtual LIR* OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src) = 0; |
Razvan A Lupusoru | bd288c2 | 2013-12-20 17:27:23 -0800 | [diff] [blame] | 1437 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1438 | virtual LIR* OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value) = 0; |
| 1439 | virtual LIR* OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, |
| 1440 | RegStorage r_src2) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1441 | virtual LIR* OpTestSuspend(LIR* target) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1442 | virtual LIR* OpVldm(RegStorage r_base, int count) = 0; |
| 1443 | virtual LIR* OpVstm(RegStorage r_base, int count) = 0; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1444 | virtual void OpRegCopyWide(RegStorage dest, RegStorage src) = 0; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1445 | virtual bool InexpensiveConstantInt(int32_t value) = 0; |
| 1446 | virtual bool InexpensiveConstantFloat(int32_t value) = 0; |
| 1447 | virtual bool InexpensiveConstantLong(int64_t value) = 0; |
| 1448 | virtual bool InexpensiveConstantDouble(int64_t value) = 0; |
Matteo Franchin | c763e35 | 2014-07-04 12:53:27 +0100 | [diff] [blame] | 1449 | virtual bool InexpensiveConstantInt(int32_t value, Instruction::Code opcode) { |
| 1450 | return InexpensiveConstantInt(value); |
| 1451 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1452 | |
Ian Rogers | d9c4fc9 | 2013-10-01 19:45:43 -0700 | [diff] [blame] | 1453 | // May be optimized by targets. |
| 1454 | virtual void GenMonitorEnter(int opt_flags, RegLocation rl_src); |
| 1455 | virtual void GenMonitorExit(int opt_flags, RegLocation rl_src); |
| 1456 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1457 | // Temp workaround |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1458 | void Workaround7250540(RegLocation rl_dest, RegStorage zero_reg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1459 | |
Andreas Gampe | 9843059 | 2014-07-27 19:44:50 -0700 | [diff] [blame] | 1460 | virtual LIR* InvokeTrampoline(OpKind op, RegStorage r_tgt, QuickEntrypointEnum trampoline) = 0; |
| 1461 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1462 | protected: |
| 1463 | Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena); |
| 1464 | |
| 1465 | CompilationUnit* GetCompilationUnit() { |
| 1466 | return cu_; |
| 1467 | } |
Mark Mendell | 4708dcd | 2014-01-22 09:05:18 -0800 | [diff] [blame] | 1468 | /* |
| 1469 | * @brief Returns the index of the lowest set bit in 'x'. |
| 1470 | * @param x Value to be examined. |
| 1471 | * @returns The bit number of the lowest bit set in the value. |
| 1472 | */ |
| 1473 | int32_t LowestSetBit(uint64_t x); |
| 1474 | /* |
| 1475 | * @brief Is this value a power of two? |
| 1476 | * @param x Value to be examined. |
| 1477 | * @returns 'true' if only 1 bit is set in the value. |
| 1478 | */ |
| 1479 | bool IsPowerOfTwo(uint64_t x); |
| 1480 | /* |
| 1481 | * @brief Do these SRs overlap? |
| 1482 | * @param rl_op1 One RegLocation |
| 1483 | * @param rl_op2 The other RegLocation |
| 1484 | * @return 'true' if the VR pairs overlap |
| 1485 | * |
| 1486 | * Check to see if a result pair has a misaligned overlap with an operand pair. This |
| 1487 | * is not usual for dx to generate, but it is legal (for now). In a future rev of |
| 1488 | * dex, we'll want to make this case illegal. |
| 1489 | */ |
Alexei Zavjalov | d8c3e36 | 2014-10-08 15:51:59 +0700 | [diff] [blame] | 1490 | bool PartiallyIntersects(RegLocation rl_op1, RegLocation rl_op2); |
| 1491 | |
| 1492 | /* |
| 1493 | * @brief Do these SRs intersect? |
| 1494 | * @param rl_op1 One RegLocation |
| 1495 | * @param rl_op2 The other RegLocation |
| 1496 | * @return 'true' if the VR pairs intersect |
| 1497 | * |
| 1498 | * Check to see if a result pair has misaligned overlap or |
| 1499 | * full overlap with an operand pair. |
| 1500 | */ |
| 1501 | bool Intersects(RegLocation rl_op1, RegLocation rl_op2); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1502 | |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1503 | /* |
| 1504 | * @brief Force a location (in a register) into a temporary register |
| 1505 | * @param loc location of result |
| 1506 | * @returns update location |
| 1507 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1508 | virtual RegLocation ForceTemp(RegLocation loc); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1509 | |
| 1510 | /* |
| 1511 | * @brief Force a wide location (in registers) into temporary registers |
| 1512 | * @param loc location of result |
| 1513 | * @returns update location |
| 1514 | */ |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1515 | virtual RegLocation ForceTempWide(RegLocation loc); |
Mark Mendell | e02d48f | 2014-01-15 11:19:23 -0800 | [diff] [blame] | 1516 | |
Mark Mendell | df8ee2e | 2014-01-27 16:37:47 -0800 | [diff] [blame] | 1517 | virtual void GenInstanceofFinal(bool use_declaring_class, uint32_t type_idx, |
| 1518 | RegLocation rl_dest, RegLocation rl_src); |
| 1519 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1520 | void AddSlowPath(LIRSlowPath* slowpath); |
| 1521 | |
Serguei Katkov | 9ee4519 | 2014-07-17 14:39:03 +0700 | [diff] [blame] | 1522 | /* |
| 1523 | * |
| 1524 | * @brief Implement Set up instanceof a class. |
| 1525 | * @param needs_access_check 'true' if we must check the access. |
| 1526 | * @param type_known_final 'true' if the type is known to be a final class. |
| 1527 | * @param type_known_abstract 'true' if the type is known to be an abstract class. |
| 1528 | * @param use_declaring_class 'true' if the type can be loaded off the current Method*. |
| 1529 | * @param can_assume_type_is_in_dex_cache 'true' if the type is known to be in the cache. |
| 1530 | * @param type_idx Type index to use if use_declaring_class is 'false'. |
| 1531 | * @param rl_dest Result to be set to 0 or 1. |
| 1532 | * @param rl_src Object to be tested. |
| 1533 | */ |
| 1534 | void GenInstanceofCallingHelper(bool needs_access_check, bool type_known_final, |
| 1535 | bool type_known_abstract, bool use_declaring_class, |
| 1536 | bool can_assume_type_is_in_dex_cache, |
| 1537 | uint32_t type_idx, RegLocation rl_dest, |
| 1538 | RegLocation rl_src); |
Mark Mendell | ae9fd93 | 2014-02-10 16:14:35 -0800 | [diff] [blame] | 1539 | /* |
Tong Shen | 547cdfd | 2014-08-05 01:54:19 -0700 | [diff] [blame] | 1540 | * @brief Generate the eh_frame FDE information if possible. |
| 1541 | * @returns pointer to vector containg FDE information, or NULL. |
Mark Mendell | ae9fd93 | 2014-02-10 16:14:35 -0800 | [diff] [blame] | 1542 | */ |
Tong Shen | 547cdfd | 2014-08-05 01:54:19 -0700 | [diff] [blame] | 1543 | virtual std::vector<uint8_t>* ReturnFrameDescriptionEntry(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1544 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1545 | /** |
| 1546 | * @brief Used to insert marker that can be used to associate MIR with LIR. |
| 1547 | * @details Only inserts marker if verbosity is enabled. |
| 1548 | * @param mir The mir that is currently being generated. |
| 1549 | */ |
| 1550 | void GenPrintLabel(MIR* mir); |
| 1551 | |
| 1552 | /** |
| 1553 | * @brief Used to generate return sequence when there is no frame. |
| 1554 | * @details Assumes that the return registers have already been populated. |
| 1555 | */ |
| 1556 | virtual void GenSpecialExitSequence() = 0; |
| 1557 | |
| 1558 | /** |
| 1559 | * @brief Used to generate code for special methods that are known to be |
| 1560 | * small enough to work in frameless mode. |
| 1561 | * @param bb The basic block of the first MIR. |
| 1562 | * @param mir The first MIR of the special method. |
| 1563 | * @param special Information about the special method. |
| 1564 | * @return Returns whether or not this was handled successfully. Returns false |
| 1565 | * if caller should punt to normal MIR2LIR conversion. |
| 1566 | */ |
| 1567 | virtual bool GenSpecialCase(BasicBlock* bb, MIR* mir, const InlineMethod& special); |
| 1568 | |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1569 | protected: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1570 | void ClobberBody(RegisterInfo* p); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1571 | void SetCurrentDexPc(DexOffset dexpc) { |
| 1572 | current_dalvik_offset_ = dexpc; |
| 1573 | } |
| 1574 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1575 | /** |
| 1576 | * @brief Used to lock register if argument at in_position was passed that way. |
| 1577 | * @details Does nothing if the argument is passed via stack. |
| 1578 | * @param in_position The argument number whose register to lock. |
| 1579 | * @param wide Whether the argument is wide. |
| 1580 | */ |
| 1581 | void LockArg(int in_position, bool wide = false); |
| 1582 | |
| 1583 | /** |
| 1584 | * @brief Used to load VR argument to a physical register. |
| 1585 | * @details The load is only done if the argument is not already in physical register. |
| 1586 | * LockArg must have been previously called. |
| 1587 | * @param in_position The argument number to load. |
| 1588 | * @param wide Whether the argument is 64-bit or not. |
| 1589 | * @return Returns the register (or register pair) for the loaded argument. |
| 1590 | */ |
Vladimir Marko | c93ac8b | 2014-05-13 17:53:49 +0100 | [diff] [blame] | 1591 | RegStorage LoadArg(int in_position, RegisterClass reg_class, bool wide = false); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1592 | |
| 1593 | /** |
| 1594 | * @brief Used to load a VR argument directly to a specified register location. |
| 1595 | * @param in_position The argument number to place in register. |
| 1596 | * @param rl_dest The register location where to place argument. |
| 1597 | */ |
| 1598 | void LoadArgDirect(int in_position, RegLocation rl_dest); |
| 1599 | |
| 1600 | /** |
| 1601 | * @brief Used to generate LIR for special getter method. |
| 1602 | * @param mir The mir that represents the iget. |
| 1603 | * @param special Information about the special getter method. |
| 1604 | * @return Returns whether LIR was successfully generated. |
| 1605 | */ |
| 1606 | bool GenSpecialIGet(MIR* mir, const InlineMethod& special); |
| 1607 | |
| 1608 | /** |
| 1609 | * @brief Used to generate LIR for special setter method. |
| 1610 | * @param mir The mir that represents the iput. |
| 1611 | * @param special Information about the special setter method. |
| 1612 | * @return Returns whether LIR was successfully generated. |
| 1613 | */ |
| 1614 | bool GenSpecialIPut(MIR* mir, const InlineMethod& special); |
| 1615 | |
| 1616 | /** |
| 1617 | * @brief Used to generate LIR for special return-args method. |
| 1618 | * @param mir The mir that represents the return of argument. |
| 1619 | * @param special Information about the special return-args method. |
| 1620 | * @return Returns whether LIR was successfully generated. |
| 1621 | */ |
| 1622 | bool GenSpecialIdentity(MIR* mir, const InlineMethod& special); |
| 1623 | |
Mingyao Yang | 4289456 | 2014-04-07 12:42:16 -0700 | [diff] [blame] | 1624 | void AddDivZeroCheckSlowPath(LIR* branch); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1625 | |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 1626 | // Copy arg0 and arg1 to kArg0 and kArg1 safely, possibly using |
| 1627 | // kArg2 as temp. |
Mark Mendell | e87f9b5 | 2014-04-30 14:13:18 -0400 | [diff] [blame] | 1628 | virtual void CopyToArgumentRegs(RegStorage arg0, RegStorage arg1); |
| 1629 | |
| 1630 | /** |
| 1631 | * @brief Load Constant into RegLocation |
| 1632 | * @param rl_dest Destination RegLocation |
| 1633 | * @param value Constant value |
| 1634 | */ |
| 1635 | virtual void GenConst(RegLocation rl_dest, int value); |
Mingyao Yang | 80365d9 | 2014-04-18 12:10:58 -0700 | [diff] [blame] | 1636 | |
Serguei Katkov | 59a42af | 2014-07-05 00:55:46 +0700 | [diff] [blame] | 1637 | /** |
| 1638 | * Returns true iff wide GPRs are just different views on the same physical register. |
| 1639 | */ |
| 1640 | virtual bool WideGPRsAreAliases() = 0; |
| 1641 | |
| 1642 | /** |
| 1643 | * Returns true iff wide FPRs are just different views on the same physical register. |
| 1644 | */ |
| 1645 | virtual bool WideFPRsAreAliases() = 0; |
| 1646 | |
| 1647 | |
Andreas Gampe | 4b537a8 | 2014-06-30 22:24:53 -0700 | [diff] [blame] | 1648 | enum class WidenessCheck { // private |
| 1649 | kIgnoreWide, |
| 1650 | kCheckWide, |
| 1651 | kCheckNotWide |
| 1652 | }; |
| 1653 | |
| 1654 | enum class RefCheck { // private |
| 1655 | kIgnoreRef, |
| 1656 | kCheckRef, |
| 1657 | kCheckNotRef |
| 1658 | }; |
| 1659 | |
| 1660 | enum class FPCheck { // private |
| 1661 | kIgnoreFP, |
| 1662 | kCheckFP, |
| 1663 | kCheckNotFP |
| 1664 | }; |
| 1665 | |
| 1666 | /** |
| 1667 | * Check whether a reg storage seems well-formed, that is, if a reg storage is valid, |
| 1668 | * that it has the expected form for the flags. |
| 1669 | * A flag value of 0 means ignore. A flag value of -1 means false. A flag value of 1 means true. |
| 1670 | */ |
| 1671 | void CheckRegStorageImpl(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp, bool fail, |
| 1672 | bool report) |
| 1673 | const; |
| 1674 | |
| 1675 | /** |
| 1676 | * Check whether a reg location seems well-formed, that is, if a reg storage is encoded, |
| 1677 | * that it has the expected size. |
| 1678 | */ |
| 1679 | void CheckRegLocationImpl(RegLocation rl, bool fail, bool report) const; |
| 1680 | |
| 1681 | // See CheckRegStorageImpl. Will print or fail depending on kFailOnSizeError and |
| 1682 | // kReportSizeError. |
| 1683 | void CheckRegStorage(RegStorage rs, WidenessCheck wide, RefCheck ref, FPCheck fp) const; |
| 1684 | // See CheckRegLocationImpl. |
| 1685 | void CheckRegLocation(RegLocation rl) const; |
| 1686 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1687 | public: |
| 1688 | // TODO: add accessors for these. |
| 1689 | LIR* literal_list_; // Constants. |
| 1690 | LIR* method_literal_list_; // Method literals requiring patching. |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 1691 | LIR* class_literal_list_; // Class literals requiring patching. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1692 | LIR* code_literal_list_; // Code literals requiring patching. |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1693 | LIR* first_fixup_; // Doubly-linked list of LIR nodes requiring fixups. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1694 | |
| 1695 | protected: |
| 1696 | CompilationUnit* const cu_; |
| 1697 | MIRGraph* const mir_graph_; |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1698 | ArenaVector<SwitchTable*> switch_tables_; |
| 1699 | ArenaVector<FillArrayData*> fill_array_data_; |
| 1700 | ArenaVector<RegisterInfo*> tempreg_info_; |
| 1701 | ArenaVector<RegisterInfo*> reginfo_map_; |
| 1702 | ArenaVector<void*> pointer_storage_; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1703 | CodeOffset current_code_offset_; // Working byte offset of machine instructons. |
| 1704 | CodeOffset data_offset_; // starting offset of literal pool. |
| 1705 | size_t total_size_; // header + code size. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1706 | LIR* block_label_list_; |
| 1707 | PromotionMap* promotion_map_; |
| 1708 | /* |
| 1709 | * TODO: The code generation utilities don't have a built-in |
| 1710 | * mechanism to propagate the original Dalvik opcode address to the |
| 1711 | * associated generated instructions. For the trace compiler, this wasn't |
| 1712 | * necessary because the interpreter handled all throws and debugging |
| 1713 | * requests. For now we'll handle this by placing the Dalvik offset |
| 1714 | * in the CompilationUnit struct before codegen for each instruction. |
| 1715 | * The low-level LIR creation utilites will pull it from here. Rework this. |
| 1716 | */ |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1717 | DexOffset current_dalvik_offset_; |
| 1718 | size_t estimated_native_code_size_; // Just an estimate; used to reserve code_buffer_ size. |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1719 | std::unique_ptr<RegisterPool> reg_pool_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1720 | /* |
| 1721 | * Sanity checking for the register temp tracking. The same ssa |
| 1722 | * name should never be associated with one temp register per |
| 1723 | * instruction compilation. |
| 1724 | */ |
| 1725 | int live_sreg_; |
| 1726 | CodeBuffer code_buffer_; |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 1727 | // The source mapping table data (pc -> dex). More entries than in encoded_mapping_table_ |
| 1728 | SrcMap src_mapping_table_; |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 1729 | // The encoding mapping table data (dex -> pc offset and pc offset -> dex) with a size prefix. |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 1730 | std::vector<uint8_t> encoded_mapping_table_; |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 1731 | ArenaVector<uint32_t> core_vmap_table_; |
| 1732 | ArenaVector<uint32_t> fp_vmap_table_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1733 | std::vector<uint8_t> native_gc_map_; |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 1734 | ArenaVector<LinkerPatch> patches_; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1735 | int num_core_spills_; |
| 1736 | int num_fp_spills_; |
| 1737 | int frame_size_; |
| 1738 | unsigned int core_spill_mask_; |
| 1739 | unsigned int fp_spill_mask_; |
| 1740 | LIR* first_lir_insn_; |
| 1741 | LIR* last_lir_insn_; |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1742 | |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1743 | ArenaVector<LIRSlowPath*> slow_paths_; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1744 | |
| 1745 | // The memory reference type for new LIRs. |
| 1746 | // NOTE: Passing this as an explicit parameter by all functions that directly or indirectly |
| 1747 | // invoke RawLIR() would clutter the code and reduce the readability. |
| 1748 | ResourceMask::ResourceBit mem_ref_type_; |
| 1749 | |
| 1750 | // Each resource mask now takes 16-bytes, so having both use/def masks directly in a LIR |
| 1751 | // would consume 32 bytes per LIR. Instead, the LIR now holds only pointers to the masks |
| 1752 | // (i.e. 8 bytes on 32-bit arch, 16 bytes on 64-bit arch) and we use ResourceMaskCache |
| 1753 | // to deduplicate the masks. |
| 1754 | ResourceMaskCache mask_cache_; |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1755 | |
| 1756 | private: |
| 1757 | static bool SizeMatchesTypeForEntrypoint(OpSize size, Primitive::Type type); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1758 | }; // Class Mir2Lir |
| 1759 | |
| 1760 | } // namespace art |
| 1761 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 1762 | #endif // ART_COMPILER_DEX_QUICK_MIR_TO_LIR_H_ |