blob: 7c828aebdcf56485567a2102fb1b91117070bc06 [file] [log] [blame]
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001/*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
18#define ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
19
Andreas Gampef0f3c592018-06-26 13:28:00 -070020#include <iosfwd>
21#include <memory>
22#include <set>
23#include <string>
24#include <unordered_set>
25#include <vector>
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080026
Mingyao Yang063fc772016-08-02 11:02:54 -070027#include "base/arena_containers.h"
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010028#include "base/array_ref.h"
David Sehrc431b9d2018-03-02 12:01:51 -080029#include "base/atomic.h"
Andreas Gampef0f3c592018-06-26 13:28:00 -070030#include "base/histogram.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080031#include "base/macros.h"
Vladimir Markoc34bebf2018-08-16 16:12:49 +010032#include "base/mem_map.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080033#include "base/mutex.h"
David Sehr67bf42e2018-02-26 16:43:04 -080034#include "base/safe_map.h"
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +010035#include "compilation_kind.h"
Nicolas Geoffray2a905b22019-06-06 09:04:07 +010036#include "jit_memory_region.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080037
38namespace art {
39
Mathieu Chartiere401d142015-04-22 13:56:20 -070040class ArtMethod;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000041template<class T> class Handle;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010042class LinearAlloc;
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +000043class InlineCache;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070044class IsMarkedVisitor;
Vladimir Marko2196c652017-11-30 16:16:07 +000045class JitJniStubTestHelper;
Andreas Gampe513061a2017-06-01 09:17:34 -070046class OatQuickMethodHeader;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000047struct ProfileMethodInfo;
Nicolas Geoffray26705e22015-10-28 12:50:11 +000048class ProfilingInfo;
Vladimir Marko2196c652017-11-30 16:16:07 +000049class Thread;
50
51namespace gc {
52namespace accounting {
53template<size_t kAlignment> class MemoryRangeBitmap;
54} // namespace accounting
55} // namespace gc
56
57namespace mirror {
58class Class;
59class Object;
60template<class T> class ObjectArray;
61} // namespace mirror
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080062
Vladimir Markob0b68cf2017-11-14 18:11:50 +000063namespace gc {
64namespace accounting {
65template<size_t kAlignment> class MemoryRangeBitmap;
66} // namespace accounting
67} // namespace gc
68
69namespace mirror {
70class Class;
71class Object;
72template<class T> class ObjectArray;
73} // namespace mirror
74
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080075namespace jit {
76
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +000077class MarkCodeClosure;
Orion Hodsondf1ab202019-06-02 16:45:03 +010078
Orion Hodson521ff982019-06-18 13:56:28 +010079// Type of bitmap used for tracking live functions in the JIT code cache for the purposes
80// of garbage collecting code.
81using CodeCacheBitmap = gc::accounting::MemoryRangeBitmap<kJitCodeAccountingBytes>;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010082
Nicolas Geoffraye3884e32019-10-28 17:04:49 +000083// The state of profile-based compilation in the zygote.
84// - kInProgress: JIT compilation is happening
85// - kDone: JIT compilation is finished, and the zygote is preparing notifying
86// the other processes.
87// - kNotifiedOk: the zygote has notified the other processes, which can start
88// sharing the boot image method mappings.
89// - kNotifiedFailure: the zygote has notified the other processes, but they
90// cannot share the boot image method mappings due to
91// unexpected errors
92enum class ZygoteCompilationState : uint8_t {
93 kInProgress = 0,
94 kDone = 1,
95 kNotifiedOk = 2,
96 kNotifiedFailure = 3,
97};
98
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010099// Class abstraction over a map of ArtMethod -> compiled code, where the
100// ArtMethod are compiled by the zygote, and the map acts as a communication
101// channel between the zygote and the other processes.
102// For the zygote process, this map is the only map it is placing the compiled
103// code. JitCodeCache.method_code_map_ is empty.
104//
105// This map is writable only by the zygote, and readable by all children.
106class ZygoteMap {
107 public:
Alex Lightb28e3042020-03-06 13:02:46 -0800108 struct Entry {
109 ArtMethod* method;
110 // Note we currently only allocate code in the low 4g, so we could just reserve 4 bytes
111 // for the code pointer. For simplicity and in the case we move to 64bit
112 // addresses for code, just keep it void* for now.
113 const void* code_ptr;
114 };
115
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000116 explicit ZygoteMap(JitMemoryRegion* region)
117 : map_(), region_(region), compilation_state_(nullptr) {}
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100118
119 // Initialize the data structure so it can hold `number_of_methods` mappings.
120 // Note that the map is fixed size and never grows.
121 void Initialize(uint32_t number_of_methods) REQUIRES(!Locks::jit_lock_);
122
123 // Add the mapping method -> code.
124 void Put(const void* code, ArtMethod* method) REQUIRES(Locks::jit_lock_);
125
126 // Return the code pointer for the given method. If pc is not zero, check that
127 // the pc falls into that code range. Return null otherwise.
128 const void* GetCodeFor(ArtMethod* method, uintptr_t pc = 0) const;
129
130 // Return whether the map has associated code for the given method.
131 bool ContainsMethod(ArtMethod* method) const {
132 return GetCodeFor(method) != nullptr;
133 }
134
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000135 void SetCompilationState(ZygoteCompilationState state) {
136 region_->WriteData(compilation_state_, state);
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100137 }
138
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000139 bool IsCompilationDoneButNotNotified() const {
140 return compilation_state_ != nullptr && *compilation_state_ == ZygoteCompilationState::kDone;
141 }
142
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000143 bool IsCompilationNotified() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000144 return compilation_state_ != nullptr && *compilation_state_ > ZygoteCompilationState::kDone;
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000145 }
146
147 bool CanMapBootImageMethods() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000148 return compilation_state_ != nullptr &&
149 *compilation_state_ == ZygoteCompilationState::kNotifiedOk;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100150 }
151
Alex Lightb28e3042020-03-06 13:02:46 -0800152 ArrayRef<const Entry>::const_iterator cbegin() const {
153 return map_.cbegin();
154 }
155 ArrayRef<const Entry>::iterator begin() {
156 return map_.begin();
157 }
158 ArrayRef<const Entry>::const_iterator cend() const {
159 return map_.cend();
160 }
161 ArrayRef<const Entry>::iterator end() {
162 return map_.end();
163 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100164
Alex Lightb28e3042020-03-06 13:02:46 -0800165 private:
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100166 // The map allocated with `region_`.
David Srbecky87fb0322019-08-20 10:34:02 +0100167 ArrayRef<const Entry> map_;
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100168
169 // The region in which the map is allocated.
170 JitMemoryRegion* const region_;
171
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000172 // The current state of compilation in the zygote. Starts with kInProgress,
173 // and should end with kNotifiedOk or kNotifiedFailure.
174 const ZygoteCompilationState* compilation_state_;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100175
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100176 DISALLOW_COPY_AND_ASSIGN(ZygoteMap);
177};
178
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800179class JitCodeCache {
180 public:
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000181 static constexpr size_t kMaxCapacity = 64 * MB;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100182 // Put the default to a very low amount for debug builds to stress the code cache
183 // collection.
Nicolas Geoffray7ca4b772016-02-23 13:52:01 +0000184 static constexpr size_t kInitialCapacity = kIsDebugBuild ? 8 * KB : 64 * KB;
Nicolas Geoffray65b83d82016-02-22 13:14:04 +0000185
186 // By default, do not GC until reaching 256KB.
187 static constexpr size_t kReservedCapacity = kInitialCapacity * 4;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800188
Mathieu Chartierbce416f2015-03-23 12:37:35 -0700189 // Create the code cache with a code + data capacity equal to "capacity", error message is passed
190 // in the out arg error_msg.
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000191 static JitCodeCache* Create(bool used_only_for_profile_data,
Orion Hodsonad28f5e2018-10-17 09:08:17 +0100192 bool rwx_memory_allowed,
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000193 bool is_zygote,
Nicolas Geoffraya25dce92016-01-12 16:41:10 +0000194 std::string* error_msg);
Vladimir Markob0b68cf2017-11-14 18:11:50 +0000195 ~JitCodeCache();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800196
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000197 bool NotifyCompilationOf(ArtMethod* method,
198 Thread* self,
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100199 CompilationKind compilation_kind,
Nicolas Geoffray60ef3992020-08-07 07:49:57 +0000200 bool prejit)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700201 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100202 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100203
Alex Lightdba61482016-12-21 08:20:29 -0800204 void NotifyMethodRedefined(ArtMethod* method)
205 REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100206 REQUIRES(!Locks::jit_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800207
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000208 // Notify to the code cache that the compiler wants to use the
209 // profiling info of `method` to drive optimizations,
210 // and therefore ensure the returned profiling info object is not
211 // collected.
212 ProfilingInfo* NotifyCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700213 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100214 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000215
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100216 void DoneCompiling(ArtMethod* method, Thread* self, CompilationKind compilation_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700217 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100218 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100219
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000220 void DoneCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700221 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100222 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000223
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100224 // Return true if the code cache contains this pc.
225 bool ContainsPc(const void* pc) const;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800226
Alex Light280e6c32020-03-03 13:52:07 -0800227 // Return true if the code cache contains this pc in the private region (i.e. not from zygote).
228 bool PrivateRegionContainsPc(const void* pc) const;
229
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000230 // Return true if the code cache contains this method.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100231 bool ContainsMethod(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000232
Vladimir Marko2196c652017-11-30 16:16:07 +0000233 // Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100234 const void* GetJniStubCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Vladimir Marko2196c652017-11-30 16:16:07 +0000235
David Srbeckyadb66f92019-10-10 12:59:43 +0000236 // Allocate a region for both code and data in the JIT code cache.
237 // The reserved memory is left completely uninitialized.
238 bool Reserve(Thread* self,
239 JitMemoryRegion* region,
240 size_t code_size,
241 size_t stack_map_size,
242 size_t number_of_roots,
243 ArtMethod* method,
244 /*out*/ArrayRef<const uint8_t>* reserved_code,
245 /*out*/ArrayRef<const uint8_t>* reserved_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700246 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100247 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100248
David Srbeckyadb66f92019-10-10 12:59:43 +0000249 // Initialize code and data of previously allocated memory.
250 //
251 // `cha_single_implementation_list` needs to be registered via CHA (if it's
252 // still valid), since the compiled code still needs to be invalidated if the
253 // single-implementation assumptions are violated later. This needs to be done
254 // even if `has_should_deoptimize_flag` is false, which can happen due to CHA
255 // guard elimination.
256 bool Commit(Thread* self,
257 JitMemoryRegion* region,
258 ArtMethod* method,
259 ArrayRef<const uint8_t> reserved_code, // Uninitialized destination.
260 ArrayRef<const uint8_t> code, // Compiler output (source).
261 ArrayRef<const uint8_t> reserved_data, // Uninitialized destination.
262 const std::vector<Handle<mirror::Object>>& roots,
263 ArrayRef<const uint8_t> stack_map, // Compiler output (source).
David Srbecky41617b12020-03-18 21:19:06 +0000264 const std::vector<uint8_t>& debug_info,
265 bool is_full_debug_info,
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100266 CompilationKind compilation_kind,
David Srbeckyadb66f92019-10-10 12:59:43 +0000267 bool has_should_deoptimize_flag,
268 const ArenaSet<ArtMethod*>& cha_single_implementation_list)
269 REQUIRES_SHARED(Locks::mutator_lock_)
270 REQUIRES(!Locks::jit_lock_);
271
272 // Free the previously allocated memory regions.
273 void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700274 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100275 REQUIRES(!Locks::jit_lock_);
David Srbecky30fd8512020-02-20 20:27:58 +0000276 void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
277 REQUIRES_SHARED(Locks::mutator_lock_)
278 REQUIRES(Locks::jit_lock_);
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000279
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100280 // Perform a collection on the code cache.
281 void GarbageCollectCache(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100282 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700283 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100284
Orion Hodson853fc2e2020-07-27 17:01:44 +0100285 // Given the 'pc', try to find the JIT compiled code associated with it. 'method' may be null
286 // when LookupMethodHeader is called from MarkCodeClosure::Run() in debug builds. Return null
287 // if 'pc' is not in the code cache.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100288 OatQuickMethodHeader* LookupMethodHeader(uintptr_t pc, ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100289 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700290 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100291
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000292 OatQuickMethodHeader* LookupOsrMethodHeader(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100293 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700294 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000295
Orion Hodsoneced6922017-06-01 10:54:28 +0100296 // Removes method from the cache for testing purposes. The caller
297 // must ensure that all threads are suspended and the method should
298 // not be in any thread's stack.
299 bool RemoveMethod(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100300 REQUIRES(!Locks::jit_lock_)
Orion Hodsoneced6922017-06-01 10:54:28 +0100301 REQUIRES(Locks::mutator_lock_);
302
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000303 // Remove all methods in our cache that were allocated by 'alloc'.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100304 void RemoveMethodsIn(Thread* self, const LinearAlloc& alloc)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100305 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700306 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800307
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000308 void CopyInlineCacheInto(const InlineCache& ic, Handle<mirror::ObjectArray<mirror::Class>> array)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100309 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000310 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000311
Nicolas Geoffray60ef3992020-08-07 07:49:57 +0000312 // Create a 'ProfileInfo' for 'method'.
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000313 ProfilingInfo* AddProfilingInfo(Thread* self,
314 ArtMethod* method,
Nicolas Geoffray60ef3992020-08-07 07:49:57 +0000315 const std::vector<uint32_t>& entries)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100316 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700317 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000318
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000319 bool OwnsSpace(const void* mspace) const NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000320 return private_region_.OwnsSpace(mspace) || shared_region_.OwnsSpace(mspace);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000321 }
322
323 void* MoreCore(const void* mspace, intptr_t increment);
324
Calin Juravle99629622016-04-19 16:33:46 +0100325 // Adds to `methods` all profiled methods which are part of any of the given dex locations.
326 void GetProfiledMethods(const std::set<std::string>& dex_base_locations,
Calin Juravle940eb0c2017-01-30 19:30:44 -0800327 std::vector<ProfileMethodInfo>& methods)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100328 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700329 REQUIRES_SHARED(Locks::mutator_lock_);
Calin Juravle31f2c152015-10-23 17:56:15 +0100330
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000331 void InvalidateAllCompiledCode()
332 REQUIRES(!Locks::jit_lock_)
333 REQUIRES_SHARED(Locks::mutator_lock_);
334
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000335 void InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* code)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100336 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700337 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000338
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100339 void Dump(std::ostream& os) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraybcd94c82016-03-03 13:23:33 +0000340
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100341 bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100342
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000343 void SweepRootTables(IsMarkedVisitor* visitor)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100344 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000345 REQUIRES_SHARED(Locks::mutator_lock_);
346
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000347 // The GC needs to disallow the reading of inline caches when it processes them,
348 // to avoid having a class being used while it is being deleted.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100349 void AllowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
350 void DisallowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
351 void BroadcastForInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000352
Alex Lightdba61482016-12-21 08:20:29 -0800353 // Notify the code cache that the method at the pointer 'old_method' is being moved to the pointer
354 // 'new_method' since it is being made obsolete.
355 void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100356 REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800357
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000358 // Dynamically change whether we want to garbage collect code.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100359 void SetGarbageCollectCode(bool value) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray169722b2017-02-27 14:01:59 +0000360
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100361 bool GetGarbageCollectCode() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000362
363 // Unsafe variant for debug checks.
364 bool GetGarbageCollectCodeUnsafe() const NO_THREAD_SAFETY_ANALYSIS {
Alex Light2d441b12018-06-08 15:33:21 -0700365 return garbage_collect_code_;
366 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100367 ZygoteMap* GetZygoteMap() {
368 return &zygote_map_;
369 }
Alex Light2d441b12018-06-08 15:33:21 -0700370
Nicolas Geoffray32384402019-07-17 20:06:44 +0100371 // Fetch the code of a method that was JITted, but the JIT could not
372 // update its entrypoint due to the resolution trampoline.
373 const void* GetSavedEntryPointOfPreCompiledMethod(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100374 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray7989ac92019-04-10 12:42:30 +0100375 REQUIRES_SHARED(Locks::mutator_lock_);
376
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000377 void PostForkChildAction(bool is_system_server, bool is_zygote);
378
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000379 // Clear the entrypoints of JIT compiled methods that belong in the zygote space.
380 // This is used for removing non-debuggable JIT code at the point we realize the runtime
Alex Lightb28e3042020-03-06 13:02:46 -0800381 // is debuggable. Also clear the Precompiled flag from all methods so the non-debuggable code
382 // doesn't come back.
383 void TransitionToDebuggable() REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000384
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000385 JitMemoryRegion* GetCurrentRegion();
386 bool IsSharedRegion(const JitMemoryRegion& region) const { return &region == &shared_region_; }
387 bool CanAllocateProfilingInfo() {
388 // If we don't have a private region, we cannot allocate a profiling info.
389 // A shared region doesn't support in general GC objects, which a profiling info
390 // can reference.
391 JitMemoryRegion* region = GetCurrentRegion();
392 return region->IsValid() && !IsSharedRegion(*region);
393 }
Nicolas Geoffray7f7539b2019-06-06 16:20:54 +0100394
Nicolas Geoffrayc76232e2020-03-18 11:23:33 +0000395 // Return whether the given `ptr` is in the zygote executable memory space.
396 bool IsInZygoteExecSpace(const void* ptr) const {
397 return shared_region_.IsInExecSpace(ptr);
398 }
399
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800400 private:
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000401 JitCodeCache();
402
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000403 ProfilingInfo* AddProfilingInfoInternal(Thread* self,
404 ArtMethod* method,
405 const std::vector<uint32_t>& entries)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100406 REQUIRES(Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700407 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000408
Alex Light33b7b5d2018-08-07 19:13:51 +0000409 // If a collection is in progress, wait for it to finish. Must be called with the mutator lock.
410 // The non-mutator lock version should be used if possible. This method will release then
411 // re-acquire the mutator lock.
412 void WaitForPotentialCollectionToCompleteRunnable(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100413 REQUIRES(Locks::jit_lock_, !Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_);
Alex Light33b7b5d2018-08-07 19:13:51 +0000414
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100415 // If a collection is in progress, wait for it to finish. Return
416 // whether the thread actually waited.
417 bool WaitForPotentialCollectionToComplete(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100418 REQUIRES(Locks::jit_lock_) REQUIRES(!Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100419
Mingyao Yang063fc772016-08-02 11:02:54 -0700420 // Remove CHA dependents and underlying allocations for entries in `method_headers`.
421 void FreeAllMethodHeaders(const std::unordered_set<OatQuickMethodHeader*>& method_headers)
David Srbecky30fd8512020-02-20 20:27:58 +0000422 REQUIRES_SHARED(Locks::mutator_lock_)
David Srbecky521644b2020-03-21 13:17:52 +0000423 REQUIRES(Locks::jit_lock_)
Mingyao Yang063fc772016-08-02 11:02:54 -0700424 REQUIRES(!Locks::cha_lock_);
425
Vladimir Marko2196c652017-11-30 16:16:07 +0000426 // Removes method from the cache. The caller must ensure that all threads
427 // are suspended and the method should not be in any thread's stack.
428 bool RemoveMethodLocked(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100429 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000430 REQUIRES(Locks::mutator_lock_);
431
David Srbecky41617b12020-03-18 21:19:06 +0000432 // Call given callback for every compiled method in the code cache.
433 void VisitAllMethods(const std::function<void(const void*, ArtMethod*)>& cb)
434 REQUIRES(Locks::jit_lock_);
435
Orion Hodson607624f2018-05-11 10:10:46 +0100436 // Free code and data allocations for `code_ptr`.
David Srbecky30fd8512020-02-20 20:27:58 +0000437 void FreeCodeAndData(const void* code_ptr)
438 REQUIRES(Locks::jit_lock_)
439 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100440
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000441 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100442 size_t CodeCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000443
444 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100445 size_t DataCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000446
447 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100448 size_t CodeCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000449
450 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100451 size_t DataCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000452
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000453 // Notify all waiting threads that a collection is done.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100454 void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000455
Nicolas Geoffray62dd4e82020-05-10 15:00:56 +0100456 // Return whether the code cache's capacity is at its maximum.
457 bool IsAtMaxCapacity() const REQUIRES(Locks::jit_lock_);
458
Vladimir Marko2196c652017-11-30 16:16:07 +0000459 // Return whether we should do a full collection given the current state of the cache.
460 bool ShouldDoFullCollection()
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100461 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000462 REQUIRES_SHARED(Locks::mutator_lock_);
463
Nicolas Geoffray35122442016-03-02 12:05:30 +0000464 void DoCollection(Thread* self, bool collect_profiling_info)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100465 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700466 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000467
Nicolas Geoffray9abb2972016-03-04 14:32:59 +0000468 void RemoveUnmarkedCode(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100469 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700470 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000471
472 void MarkCompiledCodeOnThreadStacks(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100473 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700474 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000475
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000476 CodeCacheBitmap* GetLiveBitmap() const {
477 return live_bitmap_.get();
478 }
479
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000480 bool IsInZygoteDataSpace(const void* ptr) const {
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100481 return shared_region_.IsInDataSpace(ptr);
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000482 }
483
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000484 bool IsWeakAccessEnabled(Thread* self) const;
485 void WaitUntilInlineCacheAccessible(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100486 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000487 REQUIRES_SHARED(Locks::mutator_lock_);
488
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100489 // Record that `method` is being compiled with the given mode.
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100490 void AddMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100491 REQUIRES(Locks::jit_lock_);
492
493 // Remove `method` from the list of methods meing compiled with the given mode.
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100494 void RemoveMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100495 REQUIRES(Locks::jit_lock_);
496
497 // Return whether `method` is being compiled with the given mode.
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100498 bool IsMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100499 REQUIRES(Locks::jit_lock_);
500
501 // Return whether `method` is being compiled in any mode.
502 bool IsMethodBeingCompiled(ArtMethod* method) REQUIRES(Locks::jit_lock_);
503
Vladimir Marko2196c652017-11-30 16:16:07 +0000504 class JniStubKey;
505 class JniStubData;
506
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000507 // Whether the GC allows accessing weaks in inline caches. Note that this
508 // is not used by the concurrent collector, which uses
509 // Thread::SetWeakRefAccessEnabled instead.
510 Atomic<bool> is_weak_access_enabled_;
511
512 // Condition to wait on for accessing inline caches.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100513 ConditionVariable inline_cache_cond_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000514
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100515 // -------------- JIT memory regions ------------------------------------- //
516
517 // Shared region, inherited from the zygote.
518 JitMemoryRegion shared_region_;
519
520 // Process's own region.
521 JitMemoryRegion private_region_;
522
523 // -------------- Global JIT maps --------------------------------------- //
524
525 // Holds compiled code associated with the shorty for a JNI stub.
526 SafeMap<JniStubKey, JniStubData> jni_stubs_map_ GUARDED_BY(Locks::jit_lock_);
527
528 // Holds compiled code associated to the ArtMethod.
529 SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
530
Nicolas Geoffray32384402019-07-17 20:06:44 +0100531 // Holds compiled code associated to the ArtMethod. Used when pre-jitting
532 // methods whose entrypoints have the resolution stub.
533 SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_);
534
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100535 // Holds osr compiled code associated to the ArtMethod.
536 SafeMap<ArtMethod*, const void*> osr_code_map_ GUARDED_BY(Locks::jit_lock_);
537
538 // ProfilingInfo objects we have allocated.
539 std::vector<ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_);
540
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100541 // Methods we are currently compiling, one set for each kind of compilation.
542 std::set<ArtMethod*> current_optimized_compilations_ GUARDED_BY(Locks::jit_lock_);
543 std::set<ArtMethod*> current_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
544 std::set<ArtMethod*> current_baseline_compilations_ GUARDED_BY(Locks::jit_lock_);
545
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100546 // Methods that the zygote has compiled and can be shared across processes
547 // forked from the zygote.
548 ZygoteMap zygote_map_;
549
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100550 // -------------- JIT GC related data structures ----------------------- //
551
552 // Condition to wait on during collection.
553 ConditionVariable lock_cond_ GUARDED_BY(Locks::jit_lock_);
554
555 // Whether there is a code cache collection in progress.
556 bool collection_in_progress_ GUARDED_BY(Locks::jit_lock_);
557
558 // Bitmap for collecting code and data.
559 std::unique_ptr<CodeCacheBitmap> live_bitmap_;
560
561 // Whether the last collection round increased the code cache.
562 bool last_collection_increased_code_cache_ GUARDED_BY(Locks::jit_lock_);
563
564 // Whether we can do garbage collection. Not 'const' as tests may override this.
565 bool garbage_collect_code_ GUARDED_BY(Locks::jit_lock_);
566
567 // ---------------- JIT statistics -------------------------------------- //
568
Nicolas Geoffrayc473dc72020-07-03 15:04:21 +0100569 // Number of baseline compilations done throughout the lifetime of the JIT.
570 size_t number_of_baseline_compilations_ GUARDED_BY(Locks::jit_lock_);
571
572 // Number of optimized compilations done throughout the lifetime of the JIT.
573 size_t number_of_optimized_compilations_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100574
575 // Number of compilations for on-stack-replacement done throughout the lifetime of the JIT.
576 size_t number_of_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
577
578 // Number of code cache collections done throughout the lifetime of the JIT.
579 size_t number_of_collections_ GUARDED_BY(Locks::jit_lock_);
580
581 // Histograms for keeping track of stack map size statistics.
582 Histogram<uint64_t> histogram_stack_map_memory_use_ GUARDED_BY(Locks::jit_lock_);
583
584 // Histograms for keeping track of code size statistics.
585 Histogram<uint64_t> histogram_code_memory_use_ GUARDED_BY(Locks::jit_lock_);
586
587 // Histograms for keeping track of profiling info statistics.
588 Histogram<uint64_t> histogram_profiling_info_memory_use_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000589
Vladimir Marko2196c652017-11-30 16:16:07 +0000590 friend class art::JitJniStubTestHelper;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700591 friend class ScopedCodeCacheWrite;
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000592 friend class MarkCodeClosure;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700593
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000594 DISALLOW_COPY_AND_ASSIGN(JitCodeCache);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800595};
596
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800597} // namespace jit
598} // namespace art
599
600#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_H_