blob: 76b7f77968195d0a5af6067e03da6de65f4535eb [file] [log] [blame]
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001/*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
18#define ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
19
Andreas Gampef0f3c592018-06-26 13:28:00 -070020#include <iosfwd>
21#include <memory>
22#include <set>
23#include <string>
24#include <unordered_set>
25#include <vector>
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080026
Mingyao Yang063fc772016-08-02 11:02:54 -070027#include "base/arena_containers.h"
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010028#include "base/array_ref.h"
David Sehrc431b9d2018-03-02 12:01:51 -080029#include "base/atomic.h"
Andreas Gampef0f3c592018-06-26 13:28:00 -070030#include "base/histogram.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080031#include "base/macros.h"
Vladimir Markoc34bebf2018-08-16 16:12:49 +010032#include "base/mem_map.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080033#include "base/mutex.h"
David Sehr67bf42e2018-02-26 16:43:04 -080034#include "base/safe_map.h"
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +010035#include "compilation_kind.h"
Nicolas Geoffray2a905b22019-06-06 09:04:07 +010036#include "jit_memory_region.h"
Vladimir Markoe9fb3dc2021-03-10 12:17:53 +000037#include "profiling_info.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080038
39namespace art {
40
Mathieu Chartiere401d142015-04-22 13:56:20 -070041class ArtMethod;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000042template<class T> class Handle;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010043class LinearAlloc;
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +000044class InlineCache;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070045class IsMarkedVisitor;
Vladimir Marko2196c652017-11-30 16:16:07 +000046class JitJniStubTestHelper;
Andreas Gampe513061a2017-06-01 09:17:34 -070047class OatQuickMethodHeader;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000048struct ProfileMethodInfo;
Nicolas Geoffray26705e22015-10-28 12:50:11 +000049class ProfilingInfo;
Vladimir Marko2196c652017-11-30 16:16:07 +000050class Thread;
51
52namespace gc {
53namespace accounting {
54template<size_t kAlignment> class MemoryRangeBitmap;
55} // namespace accounting
56} // namespace gc
57
58namespace mirror {
59class Class;
60class Object;
61template<class T> class ObjectArray;
62} // namespace mirror
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080063
Vladimir Markob0b68cf2017-11-14 18:11:50 +000064namespace gc {
65namespace accounting {
66template<size_t kAlignment> class MemoryRangeBitmap;
67} // namespace accounting
68} // namespace gc
69
70namespace mirror {
71class Class;
72class Object;
73template<class T> class ObjectArray;
74} // namespace mirror
75
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080076namespace jit {
77
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +000078class MarkCodeClosure;
Orion Hodsondf1ab202019-06-02 16:45:03 +010079
Orion Hodson521ff982019-06-18 13:56:28 +010080// Type of bitmap used for tracking live functions in the JIT code cache for the purposes
81// of garbage collecting code.
82using CodeCacheBitmap = gc::accounting::MemoryRangeBitmap<kJitCodeAccountingBytes>;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010083
Nicolas Geoffraye3884e32019-10-28 17:04:49 +000084// The state of profile-based compilation in the zygote.
85// - kInProgress: JIT compilation is happening
86// - kDone: JIT compilation is finished, and the zygote is preparing notifying
87// the other processes.
88// - kNotifiedOk: the zygote has notified the other processes, which can start
89// sharing the boot image method mappings.
90// - kNotifiedFailure: the zygote has notified the other processes, but they
91// cannot share the boot image method mappings due to
92// unexpected errors
93enum class ZygoteCompilationState : uint8_t {
94 kInProgress = 0,
95 kDone = 1,
96 kNotifiedOk = 2,
97 kNotifiedFailure = 3,
98};
99
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100100// Class abstraction over a map of ArtMethod -> compiled code, where the
101// ArtMethod are compiled by the zygote, and the map acts as a communication
102// channel between the zygote and the other processes.
103// For the zygote process, this map is the only map it is placing the compiled
104// code. JitCodeCache.method_code_map_ is empty.
105//
106// This map is writable only by the zygote, and readable by all children.
107class ZygoteMap {
108 public:
Alex Lightb28e3042020-03-06 13:02:46 -0800109 struct Entry {
110 ArtMethod* method;
111 // Note we currently only allocate code in the low 4g, so we could just reserve 4 bytes
112 // for the code pointer. For simplicity and in the case we move to 64bit
113 // addresses for code, just keep it void* for now.
114 const void* code_ptr;
115 };
116
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000117 explicit ZygoteMap(JitMemoryRegion* region)
118 : map_(), region_(region), compilation_state_(nullptr) {}
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100119
120 // Initialize the data structure so it can hold `number_of_methods` mappings.
121 // Note that the map is fixed size and never grows.
122 void Initialize(uint32_t number_of_methods) REQUIRES(!Locks::jit_lock_);
123
124 // Add the mapping method -> code.
125 void Put(const void* code, ArtMethod* method) REQUIRES(Locks::jit_lock_);
126
127 // Return the code pointer for the given method. If pc is not zero, check that
128 // the pc falls into that code range. Return null otherwise.
129 const void* GetCodeFor(ArtMethod* method, uintptr_t pc = 0) const;
130
131 // Return whether the map has associated code for the given method.
132 bool ContainsMethod(ArtMethod* method) const {
133 return GetCodeFor(method) != nullptr;
134 }
135
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000136 void SetCompilationState(ZygoteCompilationState state) {
137 region_->WriteData(compilation_state_, state);
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100138 }
139
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000140 bool IsCompilationDoneButNotNotified() const {
141 return compilation_state_ != nullptr && *compilation_state_ == ZygoteCompilationState::kDone;
142 }
143
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000144 bool IsCompilationNotified() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000145 return compilation_state_ != nullptr && *compilation_state_ > ZygoteCompilationState::kDone;
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000146 }
147
148 bool CanMapBootImageMethods() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000149 return compilation_state_ != nullptr &&
150 *compilation_state_ == ZygoteCompilationState::kNotifiedOk;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100151 }
152
Alex Lightb28e3042020-03-06 13:02:46 -0800153 ArrayRef<const Entry>::const_iterator cbegin() const {
154 return map_.cbegin();
155 }
156 ArrayRef<const Entry>::iterator begin() {
157 return map_.begin();
158 }
159 ArrayRef<const Entry>::const_iterator cend() const {
160 return map_.cend();
161 }
162 ArrayRef<const Entry>::iterator end() {
163 return map_.end();
164 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100165
Alex Lightb28e3042020-03-06 13:02:46 -0800166 private:
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100167 // The map allocated with `region_`.
David Srbecky87fb0322019-08-20 10:34:02 +0100168 ArrayRef<const Entry> map_;
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100169
170 // The region in which the map is allocated.
171 JitMemoryRegion* const region_;
172
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000173 // The current state of compilation in the zygote. Starts with kInProgress,
174 // and should end with kNotifiedOk or kNotifiedFailure.
175 const ZygoteCompilationState* compilation_state_;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100176
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100177 DISALLOW_COPY_AND_ASSIGN(ZygoteMap);
178};
179
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800180class JitCodeCache {
181 public:
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000182 static constexpr size_t kMaxCapacity = 64 * MB;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100183 // Put the default to a very low amount for debug builds to stress the code cache
184 // collection.
Nicolas Geoffray7ca4b772016-02-23 13:52:01 +0000185 static constexpr size_t kInitialCapacity = kIsDebugBuild ? 8 * KB : 64 * KB;
Nicolas Geoffray65b83d82016-02-22 13:14:04 +0000186
187 // By default, do not GC until reaching 256KB.
188 static constexpr size_t kReservedCapacity = kInitialCapacity * 4;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800189
Mathieu Chartierbce416f2015-03-23 12:37:35 -0700190 // Create the code cache with a code + data capacity equal to "capacity", error message is passed
191 // in the out arg error_msg.
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000192 static JitCodeCache* Create(bool used_only_for_profile_data,
Orion Hodsonad28f5e2018-10-17 09:08:17 +0100193 bool rwx_memory_allowed,
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000194 bool is_zygote,
Nicolas Geoffraya25dce92016-01-12 16:41:10 +0000195 std::string* error_msg);
Vladimir Markob0b68cf2017-11-14 18:11:50 +0000196 ~JitCodeCache();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800197
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000198 bool NotifyCompilationOf(ArtMethod* method,
199 Thread* self,
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100200 CompilationKind compilation_kind,
Nicolas Geoffray60ef3992020-08-07 07:49:57 +0000201 bool prejit)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700202 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100203 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100204
Alex Lightdba61482016-12-21 08:20:29 -0800205 void NotifyMethodRedefined(ArtMethod* method)
206 REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100207 REQUIRES(!Locks::jit_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800208
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000209 // Notify to the code cache that the compiler wants to use the
210 // profiling info of `method` to drive optimizations,
211 // and therefore ensure the returned profiling info object is not
212 // collected.
213 ProfilingInfo* NotifyCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700214 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100215 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000216
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100217 void DoneCompiling(ArtMethod* method, Thread* self, CompilationKind compilation_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700218 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100219 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100220
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000221 void DoneCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700222 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100223 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000224
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100225 // Return true if the code cache contains this pc.
226 bool ContainsPc(const void* pc) const;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800227
Alex Light280e6c32020-03-03 13:52:07 -0800228 // Return true if the code cache contains this pc in the private region (i.e. not from zygote).
229 bool PrivateRegionContainsPc(const void* pc) const;
230
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000231 // Return true if the code cache contains this method.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100232 bool ContainsMethod(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000233
Vladimir Marko2196c652017-11-30 16:16:07 +0000234 // Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100235 const void* GetJniStubCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Vladimir Marko2196c652017-11-30 16:16:07 +0000236
David Srbeckyadb66f92019-10-10 12:59:43 +0000237 // Allocate a region for both code and data in the JIT code cache.
238 // The reserved memory is left completely uninitialized.
239 bool Reserve(Thread* self,
240 JitMemoryRegion* region,
241 size_t code_size,
242 size_t stack_map_size,
243 size_t number_of_roots,
244 ArtMethod* method,
245 /*out*/ArrayRef<const uint8_t>* reserved_code,
246 /*out*/ArrayRef<const uint8_t>* reserved_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700247 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100248 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100249
David Srbeckyadb66f92019-10-10 12:59:43 +0000250 // Initialize code and data of previously allocated memory.
251 //
252 // `cha_single_implementation_list` needs to be registered via CHA (if it's
253 // still valid), since the compiled code still needs to be invalidated if the
254 // single-implementation assumptions are violated later. This needs to be done
255 // even if `has_should_deoptimize_flag` is false, which can happen due to CHA
256 // guard elimination.
257 bool Commit(Thread* self,
258 JitMemoryRegion* region,
259 ArtMethod* method,
260 ArrayRef<const uint8_t> reserved_code, // Uninitialized destination.
261 ArrayRef<const uint8_t> code, // Compiler output (source).
262 ArrayRef<const uint8_t> reserved_data, // Uninitialized destination.
263 const std::vector<Handle<mirror::Object>>& roots,
264 ArrayRef<const uint8_t> stack_map, // Compiler output (source).
David Srbecky41617b12020-03-18 21:19:06 +0000265 const std::vector<uint8_t>& debug_info,
266 bool is_full_debug_info,
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100267 CompilationKind compilation_kind,
David Srbeckyadb66f92019-10-10 12:59:43 +0000268 bool has_should_deoptimize_flag,
269 const ArenaSet<ArtMethod*>& cha_single_implementation_list)
270 REQUIRES_SHARED(Locks::mutator_lock_)
271 REQUIRES(!Locks::jit_lock_);
272
273 // Free the previously allocated memory regions.
274 void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700275 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100276 REQUIRES(!Locks::jit_lock_);
David Srbecky30fd8512020-02-20 20:27:58 +0000277 void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
278 REQUIRES_SHARED(Locks::mutator_lock_)
279 REQUIRES(Locks::jit_lock_);
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000280
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100281 // Perform a collection on the code cache.
282 void GarbageCollectCache(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100283 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700284 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100285
Orion Hodson853fc2e2020-07-27 17:01:44 +0100286 // Given the 'pc', try to find the JIT compiled code associated with it. 'method' may be null
287 // when LookupMethodHeader is called from MarkCodeClosure::Run() in debug builds. Return null
288 // if 'pc' is not in the code cache.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100289 OatQuickMethodHeader* LookupMethodHeader(uintptr_t pc, ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100290 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700291 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100292
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000293 OatQuickMethodHeader* LookupOsrMethodHeader(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100294 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700295 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000296
Orion Hodsoneced6922017-06-01 10:54:28 +0100297 // Removes method from the cache for testing purposes. The caller
298 // must ensure that all threads are suspended and the method should
299 // not be in any thread's stack.
300 bool RemoveMethod(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100301 REQUIRES(!Locks::jit_lock_)
Orion Hodsoneced6922017-06-01 10:54:28 +0100302 REQUIRES(Locks::mutator_lock_);
303
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000304 // Remove all methods in our cache that were allocated by 'alloc'.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100305 void RemoveMethodsIn(Thread* self, const LinearAlloc& alloc)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100306 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700307 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800308
Vladimir Markoe9fb3dc2021-03-10 12:17:53 +0000309 void CopyInlineCacheInto(const InlineCache& ic,
310 /*out*/StackHandleScope<InlineCache::kIndividualCacheSize>* classes)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100311 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000312 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000313
Nicolas Geoffray60ef3992020-08-07 07:49:57 +0000314 // Create a 'ProfileInfo' for 'method'.
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000315 ProfilingInfo* AddProfilingInfo(Thread* self,
316 ArtMethod* method,
Nicolas Geoffray60ef3992020-08-07 07:49:57 +0000317 const std::vector<uint32_t>& entries)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100318 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700319 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000320
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000321 bool OwnsSpace(const void* mspace) const NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000322 return private_region_.OwnsSpace(mspace) || shared_region_.OwnsSpace(mspace);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000323 }
324
325 void* MoreCore(const void* mspace, intptr_t increment);
326
Calin Juravle99629622016-04-19 16:33:46 +0100327 // Adds to `methods` all profiled methods which are part of any of the given dex locations.
328 void GetProfiledMethods(const std::set<std::string>& dex_base_locations,
Calin Juravle940eb0c2017-01-30 19:30:44 -0800329 std::vector<ProfileMethodInfo>& methods)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100330 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700331 REQUIRES_SHARED(Locks::mutator_lock_);
Calin Juravle31f2c152015-10-23 17:56:15 +0100332
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000333 void InvalidateAllCompiledCode()
334 REQUIRES(!Locks::jit_lock_)
335 REQUIRES_SHARED(Locks::mutator_lock_);
336
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000337 void InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* code)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100338 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700339 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000340
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100341 void Dump(std::ostream& os) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraybcd94c82016-03-03 13:23:33 +0000342
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100343 bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100344
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000345 void SweepRootTables(IsMarkedVisitor* visitor)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100346 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000347 REQUIRES_SHARED(Locks::mutator_lock_);
348
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000349 // The GC needs to disallow the reading of inline caches when it processes them,
350 // to avoid having a class being used while it is being deleted.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100351 void AllowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
352 void DisallowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
353 void BroadcastForInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000354
Alex Lightdba61482016-12-21 08:20:29 -0800355 // Notify the code cache that the method at the pointer 'old_method' is being moved to the pointer
356 // 'new_method' since it is being made obsolete.
357 void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100358 REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800359
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000360 // Dynamically change whether we want to garbage collect code.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100361 void SetGarbageCollectCode(bool value) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray169722b2017-02-27 14:01:59 +0000362
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100363 bool GetGarbageCollectCode() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000364
365 // Unsafe variant for debug checks.
366 bool GetGarbageCollectCodeUnsafe() const NO_THREAD_SAFETY_ANALYSIS {
Alex Light2d441b12018-06-08 15:33:21 -0700367 return garbage_collect_code_;
368 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100369 ZygoteMap* GetZygoteMap() {
370 return &zygote_map_;
371 }
Alex Light2d441b12018-06-08 15:33:21 -0700372
Nicolas Geoffray32384402019-07-17 20:06:44 +0100373 // Fetch the code of a method that was JITted, but the JIT could not
374 // update its entrypoint due to the resolution trampoline.
375 const void* GetSavedEntryPointOfPreCompiledMethod(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100376 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray7989ac92019-04-10 12:42:30 +0100377 REQUIRES_SHARED(Locks::mutator_lock_);
378
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000379 void PostForkChildAction(bool is_system_server, bool is_zygote);
380
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000381 // Clear the entrypoints of JIT compiled methods that belong in the zygote space.
382 // This is used for removing non-debuggable JIT code at the point we realize the runtime
Alex Lightb28e3042020-03-06 13:02:46 -0800383 // is debuggable. Also clear the Precompiled flag from all methods so the non-debuggable code
384 // doesn't come back.
385 void TransitionToDebuggable() REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000386
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000387 JitMemoryRegion* GetCurrentRegion();
388 bool IsSharedRegion(const JitMemoryRegion& region) const { return &region == &shared_region_; }
389 bool CanAllocateProfilingInfo() {
390 // If we don't have a private region, we cannot allocate a profiling info.
391 // A shared region doesn't support in general GC objects, which a profiling info
392 // can reference.
393 JitMemoryRegion* region = GetCurrentRegion();
394 return region->IsValid() && !IsSharedRegion(*region);
395 }
Nicolas Geoffray7f7539b2019-06-06 16:20:54 +0100396
Nicolas Geoffrayc76232e2020-03-18 11:23:33 +0000397 // Return whether the given `ptr` is in the zygote executable memory space.
398 bool IsInZygoteExecSpace(const void* ptr) const {
399 return shared_region_.IsInExecSpace(ptr);
400 }
401
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800402 private:
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000403 JitCodeCache();
404
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000405 ProfilingInfo* AddProfilingInfoInternal(Thread* self,
406 ArtMethod* method,
407 const std::vector<uint32_t>& entries)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100408 REQUIRES(Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700409 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000410
Alex Light33b7b5d2018-08-07 19:13:51 +0000411 // If a collection is in progress, wait for it to finish. Must be called with the mutator lock.
412 // The non-mutator lock version should be used if possible. This method will release then
413 // re-acquire the mutator lock.
414 void WaitForPotentialCollectionToCompleteRunnable(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100415 REQUIRES(Locks::jit_lock_, !Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_);
Alex Light33b7b5d2018-08-07 19:13:51 +0000416
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100417 // If a collection is in progress, wait for it to finish. Return
418 // whether the thread actually waited.
419 bool WaitForPotentialCollectionToComplete(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100420 REQUIRES(Locks::jit_lock_) REQUIRES(!Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100421
Mingyao Yang063fc772016-08-02 11:02:54 -0700422 // Remove CHA dependents and underlying allocations for entries in `method_headers`.
423 void FreeAllMethodHeaders(const std::unordered_set<OatQuickMethodHeader*>& method_headers)
David Srbecky30fd8512020-02-20 20:27:58 +0000424 REQUIRES_SHARED(Locks::mutator_lock_)
David Srbecky521644b2020-03-21 13:17:52 +0000425 REQUIRES(Locks::jit_lock_)
Mingyao Yang063fc772016-08-02 11:02:54 -0700426 REQUIRES(!Locks::cha_lock_);
427
Vladimir Marko2196c652017-11-30 16:16:07 +0000428 // Removes method from the cache. The caller must ensure that all threads
429 // are suspended and the method should not be in any thread's stack.
430 bool RemoveMethodLocked(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100431 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000432 REQUIRES(Locks::mutator_lock_);
433
David Srbecky41617b12020-03-18 21:19:06 +0000434 // Call given callback for every compiled method in the code cache.
435 void VisitAllMethods(const std::function<void(const void*, ArtMethod*)>& cb)
436 REQUIRES(Locks::jit_lock_);
437
Orion Hodson607624f2018-05-11 10:10:46 +0100438 // Free code and data allocations for `code_ptr`.
David Srbecky30fd8512020-02-20 20:27:58 +0000439 void FreeCodeAndData(const void* code_ptr)
440 REQUIRES(Locks::jit_lock_)
441 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100442
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000443 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100444 size_t CodeCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000445
446 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100447 size_t DataCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000448
449 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100450 size_t CodeCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000451
452 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100453 size_t DataCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000454
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000455 // Notify all waiting threads that a collection is done.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100456 void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000457
Nicolas Geoffray62dd4e82020-05-10 15:00:56 +0100458 // Return whether the code cache's capacity is at its maximum.
459 bool IsAtMaxCapacity() const REQUIRES(Locks::jit_lock_);
460
Vladimir Marko2196c652017-11-30 16:16:07 +0000461 // Return whether we should do a full collection given the current state of the cache.
462 bool ShouldDoFullCollection()
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100463 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000464 REQUIRES_SHARED(Locks::mutator_lock_);
465
Nicolas Geoffray35122442016-03-02 12:05:30 +0000466 void DoCollection(Thread* self, bool collect_profiling_info)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100467 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700468 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000469
Nicolas Geoffray9abb2972016-03-04 14:32:59 +0000470 void RemoveUnmarkedCode(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100471 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700472 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000473
474 void MarkCompiledCodeOnThreadStacks(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100475 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700476 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000477
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000478 CodeCacheBitmap* GetLiveBitmap() const {
479 return live_bitmap_.get();
480 }
481
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000482 bool IsInZygoteDataSpace(const void* ptr) const {
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100483 return shared_region_.IsInDataSpace(ptr);
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000484 }
485
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000486 bool IsWeakAccessEnabled(Thread* self) const;
487 void WaitUntilInlineCacheAccessible(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100488 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000489 REQUIRES_SHARED(Locks::mutator_lock_);
490
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100491 // Record that `method` is being compiled with the given mode.
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100492 void AddMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100493 REQUIRES(Locks::jit_lock_);
494
495 // Remove `method` from the list of methods meing compiled with the given mode.
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100496 void RemoveMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100497 REQUIRES(Locks::jit_lock_);
498
499 // Return whether `method` is being compiled with the given mode.
Nicolas Geoffray0d60a2b2020-06-17 14:31:56 +0100500 bool IsMethodBeingCompiled(ArtMethod* method, CompilationKind compilation_kind)
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100501 REQUIRES(Locks::jit_lock_);
502
503 // Return whether `method` is being compiled in any mode.
504 bool IsMethodBeingCompiled(ArtMethod* method) REQUIRES(Locks::jit_lock_);
505
Vladimir Marko2196c652017-11-30 16:16:07 +0000506 class JniStubKey;
507 class JniStubData;
508
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000509 // Whether the GC allows accessing weaks in inline caches. Note that this
510 // is not used by the concurrent collector, which uses
511 // Thread::SetWeakRefAccessEnabled instead.
512 Atomic<bool> is_weak_access_enabled_;
513
514 // Condition to wait on for accessing inline caches.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100515 ConditionVariable inline_cache_cond_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000516
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100517 // -------------- JIT memory regions ------------------------------------- //
518
519 // Shared region, inherited from the zygote.
520 JitMemoryRegion shared_region_;
521
522 // Process's own region.
523 JitMemoryRegion private_region_;
524
525 // -------------- Global JIT maps --------------------------------------- //
526
527 // Holds compiled code associated with the shorty for a JNI stub.
528 SafeMap<JniStubKey, JniStubData> jni_stubs_map_ GUARDED_BY(Locks::jit_lock_);
529
530 // Holds compiled code associated to the ArtMethod.
531 SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
532
Nicolas Geoffray32384402019-07-17 20:06:44 +0100533 // Holds compiled code associated to the ArtMethod. Used when pre-jitting
534 // methods whose entrypoints have the resolution stub.
535 SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_);
536
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100537 // Holds osr compiled code associated to the ArtMethod.
538 SafeMap<ArtMethod*, const void*> osr_code_map_ GUARDED_BY(Locks::jit_lock_);
539
540 // ProfilingInfo objects we have allocated.
Nicolas Geoffray095dc462020-08-17 16:40:28 +0100541 SafeMap<ArtMethod*, ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100542
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100543 // Methods we are currently compiling, one set for each kind of compilation.
544 std::set<ArtMethod*> current_optimized_compilations_ GUARDED_BY(Locks::jit_lock_);
545 std::set<ArtMethod*> current_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
546 std::set<ArtMethod*> current_baseline_compilations_ GUARDED_BY(Locks::jit_lock_);
547
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100548 // Methods that the zygote has compiled and can be shared across processes
549 // forked from the zygote.
550 ZygoteMap zygote_map_;
551
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100552 // -------------- JIT GC related data structures ----------------------- //
553
554 // Condition to wait on during collection.
555 ConditionVariable lock_cond_ GUARDED_BY(Locks::jit_lock_);
556
557 // Whether there is a code cache collection in progress.
558 bool collection_in_progress_ GUARDED_BY(Locks::jit_lock_);
559
560 // Bitmap for collecting code and data.
561 std::unique_ptr<CodeCacheBitmap> live_bitmap_;
562
563 // Whether the last collection round increased the code cache.
564 bool last_collection_increased_code_cache_ GUARDED_BY(Locks::jit_lock_);
565
566 // Whether we can do garbage collection. Not 'const' as tests may override this.
567 bool garbage_collect_code_ GUARDED_BY(Locks::jit_lock_);
568
569 // ---------------- JIT statistics -------------------------------------- //
570
Nicolas Geoffrayc473dc72020-07-03 15:04:21 +0100571 // Number of baseline compilations done throughout the lifetime of the JIT.
572 size_t number_of_baseline_compilations_ GUARDED_BY(Locks::jit_lock_);
573
574 // Number of optimized compilations done throughout the lifetime of the JIT.
575 size_t number_of_optimized_compilations_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100576
577 // Number of compilations for on-stack-replacement done throughout the lifetime of the JIT.
578 size_t number_of_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
579
580 // Number of code cache collections done throughout the lifetime of the JIT.
581 size_t number_of_collections_ GUARDED_BY(Locks::jit_lock_);
582
583 // Histograms for keeping track of stack map size statistics.
584 Histogram<uint64_t> histogram_stack_map_memory_use_ GUARDED_BY(Locks::jit_lock_);
585
586 // Histograms for keeping track of code size statistics.
587 Histogram<uint64_t> histogram_code_memory_use_ GUARDED_BY(Locks::jit_lock_);
588
589 // Histograms for keeping track of profiling info statistics.
590 Histogram<uint64_t> histogram_profiling_info_memory_use_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000591
Vladimir Marko2196c652017-11-30 16:16:07 +0000592 friend class art::JitJniStubTestHelper;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700593 friend class ScopedCodeCacheWrite;
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000594 friend class MarkCodeClosure;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700595
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000596 DISALLOW_COPY_AND_ASSIGN(JitCodeCache);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800597};
598
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800599} // namespace jit
600} // namespace art
601
602#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_H_