blob: c8c713ae67297c8aaadd91d5bbd9e963c5e17c72 [file] [log] [blame]
Andreas Gampe3b165bc2016-08-01 22:07:04 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
18#define ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
19
20#include <vector>
21
Andreas Gampe57943812017-12-06 21:39:13 -080022#include <android-base/logging.h>
23
Andreas Gampe3b165bc2016-08-01 22:07:04 -070024#include "arch/instruction_set.h"
25#include "base/arena_allocator.h"
26#include "base/arena_object.h"
David Brazdild9c90372016-09-14 16:53:55 +010027#include "base/array_ref.h"
Andreas Gampe3b165bc2016-08-01 22:07:04 -070028#include "base/enums.h"
Andreas Gampe3b165bc2016-08-01 22:07:04 -070029#include "base/macros.h"
30#include "managed_register.h"
31#include "offsets.h"
Andreas Gampe3b165bc2016-08-01 22:07:04 -070032
33namespace art {
34
35class ArenaAllocator;
36class DebugFrameOpCodeWriterForAssembler;
37class InstructionSetFeatures;
38class MemoryRegion;
Igor Murashkinae7ff922016-10-06 14:59:19 -070039class JNIMacroLabel;
40
41enum class JNIMacroUnaryCondition {
42 kZero,
43 kNotZero
44};
Andreas Gampe3b165bc2016-08-01 22:07:04 -070045
Vladimir Marko03008222020-03-06 14:04:21 +000046class ArgumentLocation {
47 public:
48 ArgumentLocation(ManagedRegister reg, size_t size)
49 : reg_(reg), frame_offset_(0u), size_(size) {
50 DCHECK(reg.IsRegister());
51 }
52
53 ArgumentLocation(FrameOffset frame_offset, size_t size)
54 : reg_(ManagedRegister::NoRegister()), frame_offset_(frame_offset), size_(size) {}
55
56 bool IsRegister() const {
57 return reg_.IsRegister();
58 }
59
60 ManagedRegister GetRegister() const {
61 DCHECK(IsRegister());
62 return reg_;
63 }
64
65 FrameOffset GetFrameOffset() const {
66 DCHECK(!IsRegister());
67 return frame_offset_;
68 }
69
70 size_t GetSize() const {
71 return size_;
72 }
73
74 private:
75 ManagedRegister reg_;
76 FrameOffset frame_offset_;
77 size_t size_;
78};
79
Andreas Gampe3b165bc2016-08-01 22:07:04 -070080template <PointerSize kPointerSize>
81class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> {
82 public:
83 static std::unique_ptr<JNIMacroAssembler<kPointerSize>> Create(
Vladimir Markoe764d2e2017-10-05 14:35:55 +010084 ArenaAllocator* allocator,
Andreas Gampe3b165bc2016-08-01 22:07:04 -070085 InstructionSet instruction_set,
86 const InstructionSetFeatures* instruction_set_features = nullptr);
87
88 // Finalize the code; emit slow paths, fixup branches, add literal pool, etc.
89 virtual void FinalizeCode() = 0;
90
91 // Size of generated code
92 virtual size_t CodeSize() const = 0;
93
94 // Copy instructions out of assembly buffer into the given region of memory
95 virtual void FinalizeInstructions(const MemoryRegion& region) = 0;
96
97 // Emit code that will create an activation on the stack
98 virtual void BuildFrame(size_t frame_size,
99 ManagedRegister method_reg,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000100 ArrayRef<const ManagedRegister> callee_save_regs) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700101
102 // Emit code that will remove an activation from the stack
Roland Levillain0d127e12017-07-05 17:01:11 +0100103 //
104 // Argument `may_suspend` must be `true` if the compiled method may be
105 // suspended during its execution (otherwise `false`, if it is impossible
106 // to suspend during its execution).
107 virtual void RemoveFrame(size_t frame_size,
108 ArrayRef<const ManagedRegister> callee_save_regs,
109 bool may_suspend) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700110
111 virtual void IncreaseFrameSize(size_t adjust) = 0;
112 virtual void DecreaseFrameSize(size_t adjust) = 0;
113
Vladimir Markod95a1f22021-03-23 16:32:52 +0000114 // Return the same core register but with correct size if the architecture-specific
115 // ManagedRegister has different representation for different sizes.
116 virtual ManagedRegister CoreRegisterWithSize(ManagedRegister src, size_t size) = 0;
117
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700118 // Store routines
119 virtual void Store(FrameOffset offs, ManagedRegister src, size_t size) = 0;
Vladimir Markod95a1f22021-03-23 16:32:52 +0000120 virtual void Store(ManagedRegister base, MemberOffset offs, ManagedRegister src, size_t size) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700121 virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0;
122 virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0;
123
Vladimir Marko662f12e2020-02-26 12:46:09 +0000124 virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700125
126 virtual void StoreStackOffsetToThread(ThreadOffset<kPointerSize> thr_offs,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000127 FrameOffset fr_offs) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700128
Mythri Allec2632ac2022-05-13 14:37:52 +0000129 // Stores stack pointer by tagging it if required so we can walk the stack. In debuggable runtimes
130 // we use tag to tell if we are using JITed code or AOT code. In non-debuggable runtimes we never
131 // use JITed code when AOT code is present. So checking for AOT code is sufficient to detect which
132 // code is being executed. We avoid tagging in non-debuggable runtimes to reduce instructions.
133 virtual void StoreStackPointerToThread(ThreadOffset<kPointerSize> thr_offs, bool tag_sp) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700134
135 virtual void StoreSpanning(FrameOffset dest,
136 ManagedRegister src,
Vladimir Marko662f12e2020-02-26 12:46:09 +0000137 FrameOffset in_off) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700138
139 // Load routines
140 virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0;
Vladimir Markod95a1f22021-03-23 16:32:52 +0000141 virtual void Load(ManagedRegister dest, ManagedRegister base, MemberOffset offs, size_t size) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700142
143 virtual void LoadFromThread(ManagedRegister dest,
144 ThreadOffset<kPointerSize> src,
145 size_t size) = 0;
146
147 virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0;
148 // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference.
149 virtual void LoadRef(ManagedRegister dest,
150 ManagedRegister base,
151 MemberOffset offs,
152 bool unpoison_reference) = 0;
153
154 virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0;
155
156 virtual void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset<kPointerSize> offs) = 0;
157
158 // Copying routines
Vladimir Markod3aaf942021-11-02 10:51:57 +0000159
160 // Move arguments from `srcs` locations to `dests` locations.
161 //
162 // References shall be spilled to `refs` frame offsets (kInvalidReferenceOffset indicates
163 // a non-reference type) if they are in registers and corresponding `dests` shall be
164 // filled with `jobject` replacements. If the first argument is a reference, it is
165 // assumed to be `this` and cannot be null, all other reference arguments can be null.
166 virtual void MoveArguments(ArrayRef<ArgumentLocation> dests,
167 ArrayRef<ArgumentLocation> srcs,
168 ArrayRef<FrameOffset> refs) = 0;
Vladimir Marko03008222020-03-06 14:04:21 +0000169
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700170 virtual void Move(ManagedRegister dest, ManagedRegister src, size_t size) = 0;
171
Vladimir Marko662f12e2020-02-26 12:46:09 +0000172 virtual void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset<kPointerSize> thr_offs) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700173
174 virtual void CopyRawPtrToThread(ThreadOffset<kPointerSize> thr_offs,
175 FrameOffset fr_offs,
176 ManagedRegister scratch) = 0;
177
Vladimir Marko662f12e2020-02-26 12:46:09 +0000178 virtual void CopyRef(FrameOffset dest, FrameOffset src) = 0;
179 virtual void CopyRef(FrameOffset dest,
180 ManagedRegister base,
181 MemberOffset offs,
182 bool unpoison_reference) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700183
Vladimir Marko662f12e2020-02-26 12:46:09 +0000184 virtual void Copy(FrameOffset dest, FrameOffset src, size_t size) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700185
186 virtual void Copy(FrameOffset dest,
187 ManagedRegister src_base,
188 Offset src_offset,
189 ManagedRegister scratch,
190 size_t size) = 0;
191
192 virtual void Copy(ManagedRegister dest_base,
193 Offset dest_offset,
194 FrameOffset src,
195 ManagedRegister scratch,
196 size_t size) = 0;
197
198 virtual void Copy(FrameOffset dest,
199 FrameOffset src_base,
200 Offset src_offset,
201 ManagedRegister scratch,
202 size_t size) = 0;
203
204 virtual void Copy(ManagedRegister dest,
205 Offset dest_offset,
206 ManagedRegister src,
207 Offset src_offset,
208 ManagedRegister scratch,
209 size_t size) = 0;
210
211 virtual void Copy(FrameOffset dest,
212 Offset dest_offset,
213 FrameOffset src,
214 Offset src_offset,
215 ManagedRegister scratch,
216 size_t size) = 0;
217
218 virtual void MemoryBarrier(ManagedRegister scratch) = 0;
219
220 // Sign extension
221 virtual void SignExtend(ManagedRegister mreg, size_t size) = 0;
222
223 // Zero extension
224 virtual void ZeroExtend(ManagedRegister mreg, size_t size) = 0;
225
226 // Exploit fast access in managed code to Thread::Current()
Vladimir Marko662f12e2020-02-26 12:46:09 +0000227 virtual void GetCurrentThread(ManagedRegister dest) = 0;
228 virtual void GetCurrentThread(FrameOffset dest_offset) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700229
Vladimir Markocedec9d2021-02-08 16:16:13 +0000230 // Set up `out_reg` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
231 // or to be null if the value is null and `null_allowed`. `in_reg` holds a possibly
232 // stale reference that can be used to avoid loading the spilled value to
233 // see if the value is null.
234 virtual void CreateJObject(ManagedRegister out_reg,
235 FrameOffset spilled_reference_offset,
236 ManagedRegister in_reg,
237 bool null_allowed) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700238
Vladimir Markocedec9d2021-02-08 16:16:13 +0000239 // Set up `out_off` to hold a `jobject` (`StackReference<Object>*` to a spilled value),
240 // or to be null if the value is null and `null_allowed`.
241 virtual void CreateJObject(FrameOffset out_off,
242 FrameOffset spilled_reference_offset,
243 bool null_allowed) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700244
245 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
246 // know that src may not be null.
247 virtual void VerifyObject(ManagedRegister src, bool could_be_null) = 0;
248 virtual void VerifyObject(FrameOffset src, bool could_be_null) = 0;
249
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000250 // Jump to address held at [base+offset] (used for tail calls).
Vladimir Marko662f12e2020-02-26 12:46:09 +0000251 virtual void Jump(ManagedRegister base, Offset offset) = 0;
Vladimir Marko1c3c1062019-12-03 11:18:44 +0000252
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700253 // Call to address held at [base+offset]
Vladimir Marko662f12e2020-02-26 12:46:09 +0000254 virtual void Call(ManagedRegister base, Offset offset) = 0;
Vladimir Marko662f12e2020-02-26 12:46:09 +0000255 virtual void CallFromThread(ThreadOffset<kPointerSize> offset) = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700256
Vladimir Markoce2a3442021-11-24 15:10:26 +0000257 // Generate fast-path for transition to Native. Go to `label` if any thread flag is set.
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000258 // The implementation can use `scratch_regs` which should be callee save core registers
259 // (already saved before this call) and must preserve all argument registers.
Vladimir Markoce2a3442021-11-24 15:10:26 +0000260 virtual void TryToTransitionFromRunnableToNative(
261 JNIMacroLabel* label, ArrayRef<const ManagedRegister> scratch_regs) = 0;
262
Vladimir Markoe74e0ce2021-12-08 14:16:21 +0000263 // Generate fast-path for transition to Runnable. Go to `label` if any thread flag is set.
264 // The implementation can use `scratch_regs` which should be core argument registers
265 // not used as return registers and it must preserve the `return_reg` if any.
266 virtual void TryToTransitionFromNativeToRunnable(JNIMacroLabel* label,
267 ArrayRef<const ManagedRegister> scratch_regs,
268 ManagedRegister return_reg) = 0;
269
Vladimir Marko46a89102021-10-21 13:05:46 +0000270 // Generate suspend check and branch to `label` if there is a pending suspend request.
271 virtual void SuspendCheck(JNIMacroLabel* label) = 0;
272
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700273 // Generate code to check if Thread::Current()->exception_ is non-null
Vladimir Markoc8c2bb62021-10-15 09:33:09 +0100274 // and branch to the `label` if it is.
275 virtual void ExceptionPoll(JNIMacroLabel* label) = 0;
276 // Deliver pending exception.
277 virtual void DeliverPendingException() = 0;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700278
Igor Murashkinae7ff922016-10-06 14:59:19 -0700279 // Create a new label that can be used with Jump/Bind calls.
280 virtual std::unique_ptr<JNIMacroLabel> CreateLabel() = 0;
281 // Emit an unconditional jump to the label.
282 virtual void Jump(JNIMacroLabel* label) = 0;
Vladimir Marko662f12e2020-02-26 12:46:09 +0000283 // Emit a conditional jump to the label by applying a unary condition test to the GC marking flag.
284 virtual void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) = 0;
Vladimir Markoad333922021-11-02 10:51:57 +0000285 // Emit a conditional jump to the label by applying a unary condition test to object's mark bit.
286 virtual void TestMarkBit(ManagedRegister ref,
287 JNIMacroLabel* label,
288 JNIMacroUnaryCondition cond) = 0;
Igor Murashkinae7ff922016-10-06 14:59:19 -0700289 // Code at this offset will serve as the target for the Jump call.
290 virtual void Bind(JNIMacroLabel* label) = 0;
291
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700292 virtual ~JNIMacroAssembler() {}
293
294 /**
295 * @brief Buffer of DWARF's Call Frame Information opcodes.
296 * @details It is used by debuggers and other tools to unwind the call stack.
297 */
298 virtual DebugFrameOpCodeWriterForAssembler& cfi() = 0;
299
Roland Levillain2b03a1f2017-06-06 16:09:59 +0100300 void SetEmitRunTimeChecksInDebugMode(bool value) {
301 emit_run_time_checks_in_debug_mode_ = value;
302 }
303
Vladimir Markod3aaf942021-11-02 10:51:57 +0000304 static constexpr FrameOffset kInvalidReferenceOffset = FrameOffset(0);
305
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700306 protected:
Roland Levillain2b03a1f2017-06-06 16:09:59 +0100307 JNIMacroAssembler() {}
308
309 // Should run-time checks be emitted in debug mode?
310 bool emit_run_time_checks_in_debug_mode_ = false;
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700311};
312
Igor Murashkinae7ff922016-10-06 14:59:19 -0700313// A "Label" class used with the JNIMacroAssembler
314// allowing one to use branches (jumping from one place to another).
315//
316// This is just an interface, so every platform must provide
317// its own implementation of it.
318//
319// It is only safe to use a label created
320// via JNIMacroAssembler::CreateLabel with that same macro assembler.
321class JNIMacroLabel {
322 public:
323 virtual ~JNIMacroLabel() = 0;
324
325 const InstructionSet isa_;
326 protected:
327 explicit JNIMacroLabel(InstructionSet isa) : isa_(isa) {}
328};
329
330inline JNIMacroLabel::~JNIMacroLabel() {
331 // Compulsory definition for a pure virtual destructor
332 // to avoid linking errors.
333}
334
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700335template <typename T, PointerSize kPointerSize>
336class JNIMacroAssemblerFwd : public JNIMacroAssembler<kPointerSize> {
337 public:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100338 void FinalizeCode() override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700339 asm_.FinalizeCode();
340 }
341
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100342 size_t CodeSize() const override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700343 return asm_.CodeSize();
344 }
345
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100346 void FinalizeInstructions(const MemoryRegion& region) override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700347 asm_.FinalizeInstructions(region);
348 }
349
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100350 DebugFrameOpCodeWriterForAssembler& cfi() override {
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700351 return asm_.cfi();
352 }
353
354 protected:
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100355 explicit JNIMacroAssemblerFwd(ArenaAllocator* allocator) : asm_(allocator) {}
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700356
357 T asm_;
358};
359
Igor Murashkinae7ff922016-10-06 14:59:19 -0700360template <typename Self, typename PlatformLabel, InstructionSet kIsa>
361class JNIMacroLabelCommon : public JNIMacroLabel {
362 public:
363 static Self* Cast(JNIMacroLabel* label) {
364 CHECK(label != nullptr);
365 CHECK_EQ(kIsa, label->isa_);
366
367 return reinterpret_cast<Self*>(label);
368 }
369
370 protected:
371 PlatformLabel* AsPlatformLabel() {
372 return &label_;
373 }
374
375 JNIMacroLabelCommon() : JNIMacroLabel(kIsa) {
376 }
377
Roland Levillainf73caca2018-08-24 17:19:07 +0100378 ~JNIMacroLabelCommon() override {}
Igor Murashkinae7ff922016-10-06 14:59:19 -0700379
380 private:
381 PlatformLabel label_;
382};
383
Andreas Gampe3b165bc2016-08-01 22:07:04 -0700384} // namespace art
385
386#endif // ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_