Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_ |
| 18 | #define ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_ |
| 19 | |
| 20 | #include <vector> |
| 21 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 22 | #include <android-base/logging.h> |
| 23 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 24 | #include "arch/instruction_set.h" |
| 25 | #include "base/arena_allocator.h" |
| 26 | #include "base/arena_object.h" |
David Brazdil | d9c9037 | 2016-09-14 16:53:55 +0100 | [diff] [blame] | 27 | #include "base/array_ref.h" |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 28 | #include "base/enums.h" |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 29 | #include "base/macros.h" |
| 30 | #include "managed_register.h" |
| 31 | #include "offsets.h" |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 32 | |
| 33 | namespace art { |
| 34 | |
| 35 | class ArenaAllocator; |
| 36 | class DebugFrameOpCodeWriterForAssembler; |
| 37 | class InstructionSetFeatures; |
| 38 | class MemoryRegion; |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 39 | class JNIMacroLabel; |
| 40 | |
| 41 | enum class JNIMacroUnaryCondition { |
| 42 | kZero, |
| 43 | kNotZero |
| 44 | }; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 45 | |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 46 | class ArgumentLocation { |
| 47 | public: |
| 48 | ArgumentLocation(ManagedRegister reg, size_t size) |
| 49 | : reg_(reg), frame_offset_(0u), size_(size) { |
| 50 | DCHECK(reg.IsRegister()); |
| 51 | } |
| 52 | |
| 53 | ArgumentLocation(FrameOffset frame_offset, size_t size) |
| 54 | : reg_(ManagedRegister::NoRegister()), frame_offset_(frame_offset), size_(size) {} |
| 55 | |
| 56 | bool IsRegister() const { |
| 57 | return reg_.IsRegister(); |
| 58 | } |
| 59 | |
| 60 | ManagedRegister GetRegister() const { |
| 61 | DCHECK(IsRegister()); |
| 62 | return reg_; |
| 63 | } |
| 64 | |
| 65 | FrameOffset GetFrameOffset() const { |
| 66 | DCHECK(!IsRegister()); |
| 67 | return frame_offset_; |
| 68 | } |
| 69 | |
| 70 | size_t GetSize() const { |
| 71 | return size_; |
| 72 | } |
| 73 | |
| 74 | private: |
| 75 | ManagedRegister reg_; |
| 76 | FrameOffset frame_offset_; |
| 77 | size_t size_; |
| 78 | }; |
| 79 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 80 | template <PointerSize kPointerSize> |
| 81 | class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> { |
| 82 | public: |
| 83 | static std::unique_ptr<JNIMacroAssembler<kPointerSize>> Create( |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 84 | ArenaAllocator* allocator, |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 85 | InstructionSet instruction_set, |
| 86 | const InstructionSetFeatures* instruction_set_features = nullptr); |
| 87 | |
| 88 | // Finalize the code; emit slow paths, fixup branches, add literal pool, etc. |
| 89 | virtual void FinalizeCode() = 0; |
| 90 | |
| 91 | // Size of generated code |
| 92 | virtual size_t CodeSize() const = 0; |
| 93 | |
| 94 | // Copy instructions out of assembly buffer into the given region of memory |
| 95 | virtual void FinalizeInstructions(const MemoryRegion& region) = 0; |
| 96 | |
| 97 | // Emit code that will create an activation on the stack |
| 98 | virtual void BuildFrame(size_t frame_size, |
| 99 | ManagedRegister method_reg, |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 100 | ArrayRef<const ManagedRegister> callee_save_regs) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 101 | |
| 102 | // Emit code that will remove an activation from the stack |
Roland Levillain | 0d127e1 | 2017-07-05 17:01:11 +0100 | [diff] [blame] | 103 | // |
| 104 | // Argument `may_suspend` must be `true` if the compiled method may be |
| 105 | // suspended during its execution (otherwise `false`, if it is impossible |
| 106 | // to suspend during its execution). |
| 107 | virtual void RemoveFrame(size_t frame_size, |
| 108 | ArrayRef<const ManagedRegister> callee_save_regs, |
| 109 | bool may_suspend) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 110 | |
| 111 | virtual void IncreaseFrameSize(size_t adjust) = 0; |
| 112 | virtual void DecreaseFrameSize(size_t adjust) = 0; |
| 113 | |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 114 | // Return the same core register but with correct size if the architecture-specific |
| 115 | // ManagedRegister has different representation for different sizes. |
| 116 | virtual ManagedRegister CoreRegisterWithSize(ManagedRegister src, size_t size) = 0; |
| 117 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 118 | // Store routines |
| 119 | virtual void Store(FrameOffset offs, ManagedRegister src, size_t size) = 0; |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 120 | virtual void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 121 | virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0; |
| 122 | virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0; |
| 123 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 124 | virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 125 | |
| 126 | virtual void StoreStackOffsetToThread(ThreadOffset<kPointerSize> thr_offs, |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 127 | FrameOffset fr_offs) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 128 | |
| 129 | virtual void StoreStackPointerToThread(ThreadOffset<kPointerSize> thr_offs) = 0; |
| 130 | |
| 131 | virtual void StoreSpanning(FrameOffset dest, |
| 132 | ManagedRegister src, |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 133 | FrameOffset in_off) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 134 | |
| 135 | // Load routines |
| 136 | virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0; |
Vladimir Marko | d95a1f2 | 2021-03-23 16:32:52 +0000 | [diff] [blame] | 137 | virtual void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 138 | |
| 139 | virtual void LoadFromThread(ManagedRegister dest, |
| 140 | ThreadOffset<kPointerSize> src, |
| 141 | size_t size) = 0; |
| 142 | |
| 143 | virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0; |
| 144 | // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference. |
| 145 | virtual void LoadRef(ManagedRegister dest, |
| 146 | ManagedRegister base, |
| 147 | MemberOffset offs, |
| 148 | bool unpoison_reference) = 0; |
| 149 | |
| 150 | virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0; |
| 151 | |
| 152 | virtual void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset<kPointerSize> offs) = 0; |
| 153 | |
| 154 | // Copying routines |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 155 | |
| 156 | // Move arguments from `srcs` locations to `dests` locations. |
| 157 | // |
| 158 | // References shall be spilled to `refs` frame offsets (kInvalidReferenceOffset indicates |
| 159 | // a non-reference type) if they are in registers and corresponding `dests` shall be |
| 160 | // filled with `jobject` replacements. If the first argument is a reference, it is |
| 161 | // assumed to be `this` and cannot be null, all other reference arguments can be null. |
| 162 | virtual void MoveArguments(ArrayRef<ArgumentLocation> dests, |
| 163 | ArrayRef<ArgumentLocation> srcs, |
| 164 | ArrayRef<FrameOffset> refs) = 0; |
Vladimir Marko | 0300822 | 2020-03-06 14:04:21 +0000 | [diff] [blame] | 165 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 166 | virtual void Move(ManagedRegister dest, ManagedRegister src, size_t size) = 0; |
| 167 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 168 | virtual void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset<kPointerSize> thr_offs) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 169 | |
| 170 | virtual void CopyRawPtrToThread(ThreadOffset<kPointerSize> thr_offs, |
| 171 | FrameOffset fr_offs, |
| 172 | ManagedRegister scratch) = 0; |
| 173 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 174 | virtual void CopyRef(FrameOffset dest, FrameOffset src) = 0; |
| 175 | virtual void CopyRef(FrameOffset dest, |
| 176 | ManagedRegister base, |
| 177 | MemberOffset offs, |
| 178 | bool unpoison_reference) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 179 | |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 180 | virtual void Copy(FrameOffset dest, FrameOffset src, size_t size) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 181 | |
| 182 | virtual void Copy(FrameOffset dest, |
| 183 | ManagedRegister src_base, |
| 184 | Offset src_offset, |
| 185 | ManagedRegister scratch, |
| 186 | size_t size) = 0; |
| 187 | |
| 188 | virtual void Copy(ManagedRegister dest_base, |
| 189 | Offset dest_offset, |
| 190 | FrameOffset src, |
| 191 | ManagedRegister scratch, |
| 192 | size_t size) = 0; |
| 193 | |
| 194 | virtual void Copy(FrameOffset dest, |
| 195 | FrameOffset src_base, |
| 196 | Offset src_offset, |
| 197 | ManagedRegister scratch, |
| 198 | size_t size) = 0; |
| 199 | |
| 200 | virtual void Copy(ManagedRegister dest, |
| 201 | Offset dest_offset, |
| 202 | ManagedRegister src, |
| 203 | Offset src_offset, |
| 204 | ManagedRegister scratch, |
| 205 | size_t size) = 0; |
| 206 | |
| 207 | virtual void Copy(FrameOffset dest, |
| 208 | Offset dest_offset, |
| 209 | FrameOffset src, |
| 210 | Offset src_offset, |
| 211 | ManagedRegister scratch, |
| 212 | size_t size) = 0; |
| 213 | |
| 214 | virtual void MemoryBarrier(ManagedRegister scratch) = 0; |
| 215 | |
| 216 | // Sign extension |
| 217 | virtual void SignExtend(ManagedRegister mreg, size_t size) = 0; |
| 218 | |
| 219 | // Zero extension |
| 220 | virtual void ZeroExtend(ManagedRegister mreg, size_t size) = 0; |
| 221 | |
| 222 | // Exploit fast access in managed code to Thread::Current() |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 223 | virtual void GetCurrentThread(ManagedRegister dest) = 0; |
| 224 | virtual void GetCurrentThread(FrameOffset dest_offset) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 225 | |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 226 | // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value), |
| 227 | // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly |
| 228 | // stale reference that can be used to avoid loading the spilled value to |
| 229 | // see if the value is null. |
| 230 | virtual void CreateJObject(ManagedRegister out_reg, |
| 231 | FrameOffset spilled_reference_offset, |
| 232 | ManagedRegister in_reg, |
| 233 | bool null_allowed) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 234 | |
Vladimir Marko | cedec9d | 2021-02-08 16:16:13 +0000 | [diff] [blame] | 235 | // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value), |
| 236 | // or to be null if the value is null and `null_allowed`. |
| 237 | virtual void CreateJObject(FrameOffset out_off, |
| 238 | FrameOffset spilled_reference_offset, |
| 239 | bool null_allowed) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 240 | |
| 241 | // Heap::VerifyObject on src. In some cases (such as a reference to this) we |
| 242 | // know that src may not be null. |
| 243 | virtual void VerifyObject(ManagedRegister src, bool could_be_null) = 0; |
| 244 | virtual void VerifyObject(FrameOffset src, bool could_be_null) = 0; |
| 245 | |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 246 | // Jump to address held at [base+offset] (used for tail calls). |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 247 | virtual void Jump(ManagedRegister base, Offset offset) = 0; |
Vladimir Marko | 1c3c106 | 2019-12-03 11:18:44 +0000 | [diff] [blame] | 248 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 249 | // Call to address held at [base+offset] |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 250 | virtual void Call(ManagedRegister base, Offset offset) = 0; |
| 251 | virtual void Call(FrameOffset base, Offset offset) = 0; |
| 252 | virtual void CallFromThread(ThreadOffset<kPointerSize> offset) = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 253 | |
Vladimir Marko | ce2a344 | 2021-11-24 15:10:26 +0000 | [diff] [blame] | 254 | // Generate fast-path for transition to Native. Go to `label` if any thread flag is set. |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame^] | 255 | // The implementation can use `scratch_regs` which should be callee save core registers |
| 256 | // (already saved before this call) and must preserve all argument registers. |
Vladimir Marko | ce2a344 | 2021-11-24 15:10:26 +0000 | [diff] [blame] | 257 | virtual void TryToTransitionFromRunnableToNative( |
| 258 | JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs) = 0; |
| 259 | |
Vladimir Marko | e74e0ce | 2021-12-08 14:16:21 +0000 | [diff] [blame^] | 260 | // Generate fast-path for transition to Runnable. Go to `label` if any thread flag is set. |
| 261 | // The implementation can use `scratch_regs` which should be core argument registers |
| 262 | // not used as return registers and it must preserve the `return_reg` if any. |
| 263 | virtual void TryToTransitionFromNativeToRunnable(JNIMacroLabel* label, |
| 264 | ArrayRef<const ManagedRegister> scratch_regs, |
| 265 | ManagedRegister return_reg) = 0; |
| 266 | |
Vladimir Marko | 46a8910 | 2021-10-21 13:05:46 +0000 | [diff] [blame] | 267 | // Generate suspend check and branch to `label` if there is a pending suspend request. |
| 268 | virtual void SuspendCheck(JNIMacroLabel* label) = 0; |
| 269 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 270 | // Generate code to check if Thread::Current()->exception_ is non-null |
Vladimir Marko | c8c2bb6 | 2021-10-15 09:33:09 +0100 | [diff] [blame] | 271 | // and branch to the `label` if it is. |
| 272 | virtual void ExceptionPoll(JNIMacroLabel* label) = 0; |
| 273 | // Deliver pending exception. |
| 274 | virtual void DeliverPendingException() = 0; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 275 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 276 | // Create a new label that can be used with Jump/Bind calls. |
| 277 | virtual std::unique_ptr<JNIMacroLabel> CreateLabel() = 0; |
| 278 | // Emit an unconditional jump to the label. |
| 279 | virtual void Jump(JNIMacroLabel* label) = 0; |
Vladimir Marko | 662f12e | 2020-02-26 12:46:09 +0000 | [diff] [blame] | 280 | // Emit a conditional jump to the label by applying a unary condition test to the GC marking flag. |
| 281 | virtual void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) = 0; |
Vladimir Marko | ad33392 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 282 | // Emit a conditional jump to the label by applying a unary condition test to object's mark bit. |
| 283 | virtual void TestMarkBit(ManagedRegister ref, |
| 284 | JNIMacroLabel* label, |
| 285 | JNIMacroUnaryCondition cond) = 0; |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 286 | // Code at this offset will serve as the target for the Jump call. |
| 287 | virtual void Bind(JNIMacroLabel* label) = 0; |
| 288 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 289 | virtual ~JNIMacroAssembler() {} |
| 290 | |
| 291 | /** |
| 292 | * @brief Buffer of DWARF's Call Frame Information opcodes. |
| 293 | * @details It is used by debuggers and other tools to unwind the call stack. |
| 294 | */ |
| 295 | virtual DebugFrameOpCodeWriterForAssembler& cfi() = 0; |
| 296 | |
Roland Levillain | 2b03a1f | 2017-06-06 16:09:59 +0100 | [diff] [blame] | 297 | void SetEmitRunTimeChecksInDebugMode(bool value) { |
| 298 | emit_run_time_checks_in_debug_mode_ = value; |
| 299 | } |
| 300 | |
Vladimir Marko | d3aaf94 | 2021-11-02 10:51:57 +0000 | [diff] [blame] | 301 | static constexpr FrameOffset kInvalidReferenceOffset = FrameOffset(0); |
| 302 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 303 | protected: |
Roland Levillain | 2b03a1f | 2017-06-06 16:09:59 +0100 | [diff] [blame] | 304 | JNIMacroAssembler() {} |
| 305 | |
| 306 | // Should run-time checks be emitted in debug mode? |
| 307 | bool emit_run_time_checks_in_debug_mode_ = false; |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 308 | }; |
| 309 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 310 | // A "Label" class used with the JNIMacroAssembler |
| 311 | // allowing one to use branches (jumping from one place to another). |
| 312 | // |
| 313 | // This is just an interface, so every platform must provide |
| 314 | // its own implementation of it. |
| 315 | // |
| 316 | // It is only safe to use a label created |
| 317 | // via JNIMacroAssembler::CreateLabel with that same macro assembler. |
| 318 | class JNIMacroLabel { |
| 319 | public: |
| 320 | virtual ~JNIMacroLabel() = 0; |
| 321 | |
| 322 | const InstructionSet isa_; |
| 323 | protected: |
| 324 | explicit JNIMacroLabel(InstructionSet isa) : isa_(isa) {} |
| 325 | }; |
| 326 | |
| 327 | inline JNIMacroLabel::~JNIMacroLabel() { |
| 328 | // Compulsory definition for a pure virtual destructor |
| 329 | // to avoid linking errors. |
| 330 | } |
| 331 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 332 | template <typename T, PointerSize kPointerSize> |
| 333 | class JNIMacroAssemblerFwd : public JNIMacroAssembler<kPointerSize> { |
| 334 | public: |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 335 | void FinalizeCode() override { |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 336 | asm_.FinalizeCode(); |
| 337 | } |
| 338 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 339 | size_t CodeSize() const override { |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 340 | return asm_.CodeSize(); |
| 341 | } |
| 342 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 343 | void FinalizeInstructions(const MemoryRegion& region) override { |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 344 | asm_.FinalizeInstructions(region); |
| 345 | } |
| 346 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 347 | DebugFrameOpCodeWriterForAssembler& cfi() override { |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 348 | return asm_.cfi(); |
| 349 | } |
| 350 | |
| 351 | protected: |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 352 | explicit JNIMacroAssemblerFwd(ArenaAllocator* allocator) : asm_(allocator) {} |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 353 | |
| 354 | T asm_; |
| 355 | }; |
| 356 | |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 357 | template <typename Self, typename PlatformLabel, InstructionSet kIsa> |
| 358 | class JNIMacroLabelCommon : public JNIMacroLabel { |
| 359 | public: |
| 360 | static Self* Cast(JNIMacroLabel* label) { |
| 361 | CHECK(label != nullptr); |
| 362 | CHECK_EQ(kIsa, label->isa_); |
| 363 | |
| 364 | return reinterpret_cast<Self*>(label); |
| 365 | } |
| 366 | |
| 367 | protected: |
| 368 | PlatformLabel* AsPlatformLabel() { |
| 369 | return &label_; |
| 370 | } |
| 371 | |
| 372 | JNIMacroLabelCommon() : JNIMacroLabel(kIsa) { |
| 373 | } |
| 374 | |
Roland Levillain | f73caca | 2018-08-24 17:19:07 +0100 | [diff] [blame] | 375 | ~JNIMacroLabelCommon() override {} |
Igor Murashkin | ae7ff92 | 2016-10-06 14:59:19 -0700 | [diff] [blame] | 376 | |
| 377 | private: |
| 378 | PlatformLabel label_; |
| 379 | }; |
| 380 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 381 | } // namespace art |
| 382 | |
| 383 | #endif // ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_ |