blob: 0d824587a33f8f28278f9a82bd92d38eef1c5792 [file] [log] [blame]
Andreas Gampe3b165bc2016-08-01 22:07:04 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
18#define ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
19
20#include <vector>
21
Andreas Gampe57943812017-12-06 21:39:13 -080022#include <android-base/logging.h>
23
Andreas Gampe3b165bc2016-08-01 22:07:04 -070024#include "arch/instruction_set.h"
25#include "base/arena_allocator.h"
26#include "base/arena_object.h"
David Brazdild9c90372016-09-14 16:53:55 +010027#include "base/array_ref.h"
Andreas Gampe3b165bc2016-08-01 22:07:04 -070028#include "base/enums.h"
Andreas Gampe3b165bc2016-08-01 22:07:04 -070029#include "base/macros.h"
30#include "managed_register.h"
31#include "offsets.h"
Andreas Gampe3b165bc2016-08-01 22:07:04 -070032
33namespace art {
34
35class ArenaAllocator;
36class DebugFrameOpCodeWriterForAssembler;
37class InstructionSetFeatures;
38class MemoryRegion;
Igor Murashkinae7ff922016-10-06 14:59:19 -070039class JNIMacroLabel;
40
41enum class JNIMacroUnaryCondition {
42 kZero,
43 kNotZero
44};
Andreas Gampe3b165bc2016-08-01 22:07:04 -070045
Vladimir Marko03008222020-03-06 14:04:21 +000046class ArgumentLocation {
47 public:
48 ArgumentLocation(ManagedRegister reg, size_t size)
49 : reg_(reg), frame_offset_(0u), size_(size) {
50 DCHECK(reg.IsRegister());
51 }
52
53 ArgumentLocation(FrameOffset frame_offset, size_t size)
54 : reg_(ManagedRegister::NoRegister()), frame_offset_(frame_offset), size_(size) {}
55
56 bool IsRegister() const {
57 return reg_.IsRegister();
58 }
59
60 ManagedRegister GetRegister() const {
61 DCHECK(IsRegister());
62 return reg_;
63 }
64
65 FrameOffset GetFrameOffset() const {
66 DCHECK(!IsRegister());
67 return frame_offset_;
68 }
69
70 size_t GetSize() const {
71 return size_;
72 }
73
74 private:
75 ManagedRegister reg_;
76 FrameOffset frame_offset_;
77 size_t size_;
78};
79
Andreas Gampe3b165bc2016-08-01 22:07:04 -070080template <PointerSize kPointerSize>
81class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> {
82 public:
83 static std::unique_ptr<JNIMacroAssembler<kPointerSize>> Create(
Vladimir Markoe764d2e2017-10-05 14:35:55 +010084 ArenaAllocator* allocator,
Andreas Gampe3b165bc2016-08-01 22:07:04 -070085 InstructionSet instruction_set,
86 const InstructionSetFeatures* instruction_set_features = nullptr);
87
88 // Finalize the code; emit slow paths, fixup branches, add literal pool, etc.
89 virtual void FinalizeCode() = 0;
90
91 // Size of generated code
92 virtual size_t CodeSize() const = 0;
93
94 // Copy instructions out of assembly buffer into the given region of memory
95 virtual void FinalizeInstructions(const MemoryRegion& region) = 0;
96
97 // Emit code that will create an activation on the stack
98 virtual void BuildFrame(size_t frame_size,
99 ManagedRegister method_reg,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000100 ArrayRef<const ManagedRegister> callee_save_regs) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700101
102 // Emit code that will remove an activation from the stack
Roland Levillain0d127e12017-07-05 17:01:11 +0100103 //
104 // Argument `may_suspend` must be `true` if the compiled method may be
105 // suspended during its execution (otherwise `false`, if it is impossible
106 // to suspend during its execution).
107 virtual void RemoveFrame(size_t frame_size,
108 ArrayRef<const ManagedRegister> callee_save_regs,
109 bool may_suspend) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700110
111 virtual void IncreaseFrameSize(size_t adjust) = 0;
112 virtual void DecreaseFrameSize(size_t adjust) = 0;
113
Vladimir Markod95a1f22021-03-23 16:32:52 +0000114 // Return the same core register but with correct size if the architecture-specific
115 // ManagedRegister has different representation for different sizes.
116 virtual ManagedRegister CoreRegisterWithSize(ManagedRegister src, size_t size) = 0;
117
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700118 // Store routines
119 virtual void Store(FrameOffset offs, ManagedRegister src, size_t size) = 0;
Vladimir Markod95a1f22021-03-23 16:32:52 +0000120 virtual void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700121 virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0;
122 virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0;
123
Vladimir Marko662f12e2020-02-26 12:46:09 +0000124 virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700125
126 virtual void StoreStackOffsetToThread(ThreadOffset<kPointerSize> thr_offs,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000127 FrameOffset fr_offs) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700128
129 virtual void StoreStackPointerToThread(ThreadOffset<kPointerSize> thr_offs) = 0;
130
131 virtual void StoreSpanning(FrameOffset dest,
132 ManagedRegister src,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000133 FrameOffset in_off) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700134
135 // Load routines
136 virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0;
Vladimir Markod95a1f22021-03-23 16:32:52 +0000137 virtual void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700138
139 virtual void LoadFromThread(ManagedRegister dest,
140 ThreadOffset<kPointerSize> src,
141 size_t size) = 0;
142
143 virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0;
144 // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference.
145 virtual void LoadRef(ManagedRegister dest,
146 ManagedRegister base,
147 MemberOffset offs,
148 bool unpoison_reference) = 0;
149
150 virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0;
151
152 virtual void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset<kPointerSize> offs) = 0;
153
154 // Copying routines
Vladimir Markod3aaf942021-11-02 10:51:57 +0000155
156 // Move arguments from `srcs` locations to `dests` locations.
157 //
158 // References shall be spilled to `refs` frame offsets (kInvalidReferenceOffset indicates
159 // a non-reference type) if they are in registers and corresponding `dests` shall be
160 // filled with `jobject` replacements. If the first argument is a reference, it is
161 // assumed to be `this` and cannot be null, all other reference arguments can be null.
162 virtual void MoveArguments(ArrayRef<ArgumentLocation> dests,
163 ArrayRef<ArgumentLocation> srcs,
164 ArrayRef<FrameOffset> refs) = 0;
Vladimir Marko03008222020-03-06 14:04:21 +0000165
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700166 virtual void Move(ManagedRegister dest, ManagedRegister src, size_t size) = 0;
167
Vladimir Marko662f12e2020-02-26 12:46:09 +0000168 virtual void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset<kPointerSize> thr_offs) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700169
170 virtual void CopyRawPtrToThread(ThreadOffset<kPointerSize> thr_offs,
171 FrameOffset fr_offs,
172 ManagedRegister scratch) = 0;
173
Vladimir Marko662f12e2020-02-26 12:46:09 +0000174 virtual void CopyRef(FrameOffset dest, FrameOffset src) = 0;
175 virtual void CopyRef(FrameOffset dest,
176 ManagedRegister base,
177 MemberOffset offs,
178 bool unpoison_reference) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700179
Vladimir Marko662f12e2020-02-26 12:46:09 +0000180 virtual void Copy(FrameOffset dest, FrameOffset src, size_t size) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700181
182 virtual void Copy(FrameOffset dest,
183 ManagedRegister src_base,
184 Offset src_offset,
185 ManagedRegister scratch,
186 size_t size) = 0;
187
188 virtual void Copy(ManagedRegister dest_base,
189 Offset dest_offset,
190 FrameOffset src,
191 ManagedRegister scratch,
192 size_t size) = 0;
193
194 virtual void Copy(FrameOffset dest,
195 FrameOffset src_base,
196 Offset src_offset,
197 ManagedRegister scratch,
198 size_t size) = 0;
199
200 virtual void Copy(ManagedRegister dest,
201 Offset dest_offset,
202 ManagedRegister src,
203 Offset src_offset,
204 ManagedRegister scratch,
205 size_t size) = 0;
206
207 virtual void Copy(FrameOffset dest,
208 Offset dest_offset,
209 FrameOffset src,
210 Offset src_offset,
211 ManagedRegister scratch,
212 size_t size) = 0;
213
214 virtual void MemoryBarrier(ManagedRegister scratch) = 0;
215
216 // Sign extension
217 virtual void SignExtend(ManagedRegister mreg, size_t size) = 0;
218
219 // Zero extension
220 virtual void ZeroExtend(ManagedRegister mreg, size_t size) = 0;
221
222 // Exploit fast access in managed code to Thread::Current()
Vladimir Marko662f12e2020-02-26 12:46:09 +0000223 virtual void GetCurrentThread(ManagedRegister dest) = 0;
224 virtual void GetCurrentThread(FrameOffset dest_offset) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700225
Vladimir Markocedec9d2021-02-08 16:16:13 +0000226 // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
227 // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
228 // stale reference that can be used to avoid loading the spilled value to
229 // see if the value is null.
230 virtual void CreateJObject(ManagedRegister out_reg,
231 FrameOffset spilled_reference_offset,
232 ManagedRegister in_reg,
233 bool null_allowed) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700234
Vladimir Markocedec9d2021-02-08 16:16:13 +0000235 // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
236 // or to be null if the value is null and `null_allowed`.
237 virtual void CreateJObject(FrameOffset out_off,
238 FrameOffset spilled_reference_offset,
239 bool null_allowed) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700240
241 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
242 // know that src may not be null.
243 virtual void VerifyObject(ManagedRegister src, bool could_be_null) = 0;
244 virtual void VerifyObject(FrameOffset src, bool could_be_null) = 0;
245
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000246 // Jump to address held at [base+offset] (used for tail calls).
Vladimir Marko662f12e2020-02-26 12:46:09 +0000247 virtual void Jump(ManagedRegister base, Offset offset) = 0;
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000248
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700249 // Call to address held at [base+offset]
Vladimir Marko662f12e2020-02-26 12:46:09 +0000250 virtual void Call(ManagedRegister base, Offset offset) = 0;
251 virtual void Call(FrameOffset base, Offset offset) = 0;
252 virtual void CallFromThread(ThreadOffset<kPointerSize> offset) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700253
Vladimir Markoce2a3442021-11-24 15:10:26 +0000254 // Generate fast-path for transition to Native. Go to `label` if any thread flag is set.
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000255 // The implementation can use `scratch_regs` which should be callee save core registers
256 // (already saved before this call) and must preserve all argument registers.
Vladimir Markoce2a3442021-11-24 15:10:26 +0000257 virtual void TryToTransitionFromRunnableToNative(
258 JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs) = 0;
259
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000260 // Generate fast-path for transition to Runnable. Go to `label` if any thread flag is set.
261 // The implementation can use `scratch_regs` which should be core argument registers
262 // not used as return registers and it must preserve the `return_reg` if any.
263 virtual void TryToTransitionFromNativeToRunnable(JNIMacroLabel* label,
264 ArrayRef<const ManagedRegister> scratch_regs,
265 ManagedRegister return_reg) = 0;
266
Vladimir Marko46a89102021-10-21 13:05:46 +0000267 // Generate suspend check and branch to `label` if there is a pending suspend request.
268 virtual void SuspendCheck(JNIMacroLabel* label) = 0;
269
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700270 // Generate code to check if Thread::Current()->exception_ is non-null
Vladimir Markoc8c2bb62021-10-15 09:33:09 +0100271 // and branch to the `label` if it is.
272 virtual void ExceptionPoll(JNIMacroLabel* label) = 0;
273 // Deliver pending exception.
274 virtual void DeliverPendingException() = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700275
Igor Murashkinae7ff922016-10-06 14:59:19 -0700276 // Create a new label that can be used with Jump/Bind calls.
277 virtual std::unique_ptr<JNIMacroLabel> CreateLabel() = 0;
278 // Emit an unconditional jump to the label.
279 virtual void Jump(JNIMacroLabel* label) = 0;
Vladimir Marko662f12e2020-02-26 12:46:09 +0000280 // Emit a conditional jump to the label by applying a unary condition test to the GC marking flag.
281 virtual void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) = 0;
Vladimir Markoad333922021-11-02 10:51:57 +0000282 // Emit a conditional jump to the label by applying a unary condition test to object's mark bit.
283 virtual void TestMarkBit(ManagedRegister ref,
284 JNIMacroLabel* label,
285 JNIMacroUnaryCondition cond) = 0;
Igor Murashkinae7ff922016-10-06 14:59:19 -0700286 // Code at this offset will serve as the target for the Jump call.
287 virtual void Bind(JNIMacroLabel* label) = 0;
288
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700289 virtual ~JNIMacroAssembler() {}
290
291 /**
292 * @brief Buffer of DWARF's Call Frame Information opcodes.
293 * @details It is used by debuggers and other tools to unwind the call stack.
294 */
295 virtual DebugFrameOpCodeWriterForAssembler& cfi() = 0;
296
Roland Levillain2b03a1f2017-06-06 16:09:59 +0100297 void SetEmitRunTimeChecksInDebugMode(bool value) {
298 emit_run_time_checks_in_debug_mode_ = value;
299 }
300
Vladimir Markod3aaf942021-11-02 10:51:57 +0000301 static constexpr FrameOffset kInvalidReferenceOffset = FrameOffset(0);
302
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700303 protected:
Roland Levillain2b03a1f2017-06-06 16:09:59 +0100304 JNIMacroAssembler() {}
305
306 // Should run-time checks be emitted in debug mode?
307 bool emit_run_time_checks_in_debug_mode_ = false;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700308};
309
Igor Murashkinae7ff922016-10-06 14:59:19 -0700310// A "Label" class used with the JNIMacroAssembler
311// allowing one to use branches (jumping from one place to another).
312//
313// This is just an interface, so every platform must provide
314// its own implementation of it.
315//
316// It is only safe to use a label created
317// via JNIMacroAssembler::CreateLabel with that same macro assembler.
318class JNIMacroLabel {
319 public:
320 virtual ~JNIMacroLabel() = 0;
321
322 const InstructionSet isa_;
323 protected:
324 explicit JNIMacroLabel(InstructionSet isa) : isa_(isa) {}
325};
326
327inline JNIMacroLabel::~JNIMacroLabel() {
328 // Compulsory definition for a pure virtual destructor
329 // to avoid linking errors.
330}
331
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700332template <typename T, PointerSize kPointerSize>
333class JNIMacroAssemblerFwd : public JNIMacroAssembler<kPointerSize> {
334 public:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100335 void FinalizeCode() override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700336 asm_.FinalizeCode();
337 }
338
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100339 size_t CodeSize() const override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700340 return asm_.CodeSize();
341 }
342
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100343 void FinalizeInstructions(const MemoryRegion& region) override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700344 asm_.FinalizeInstructions(region);
345 }
346
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100347 DebugFrameOpCodeWriterForAssembler& cfi() override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700348 return asm_.cfi();
349 }
350
351 protected:
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100352 explicit JNIMacroAssemblerFwd(ArenaAllocator* allocator) : asm_(allocator) {}
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700353
354 T asm_;
355};
356
Igor Murashkinae7ff922016-10-06 14:59:19 -0700357template <typename Self, typename PlatformLabel, InstructionSet kIsa>
358class JNIMacroLabelCommon : public JNIMacroLabel {
359 public:
360 static Self* Cast(JNIMacroLabel* label) {
361 CHECK(label != nullptr);
362 CHECK_EQ(kIsa, label->isa_);
363
364 return reinterpret_cast<Self*>(label);
365 }
366
367 protected:
368 PlatformLabel* AsPlatformLabel() {
369 return &label_;
370 }
371
372 JNIMacroLabelCommon() : JNIMacroLabel(kIsa) {
373 }
374
Roland Levillainf73caca2018-08-24 17:19:07 +0100375 ~JNIMacroLabelCommon() override {}
Igor Murashkinae7ff922016-10-06 14:59:19 -0700376
377 private:
378 PlatformLabel label_;
379};
380
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700381} // namespace art
382
383#endif // ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_