blob: 7e00bcb6da8ba388dc31592eef898445a6f7be4f [file] [log] [blame]
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001/*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
18#define ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
19
Andreas Gampef0f3c592018-06-26 13:28:00 -070020#include <iosfwd>
21#include <memory>
22#include <set>
23#include <string>
24#include <unordered_set>
25#include <vector>
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080026
Mingyao Yang063fc772016-08-02 11:02:54 -070027#include "base/arena_containers.h"
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010028#include "base/array_ref.h"
David Sehrc431b9d2018-03-02 12:01:51 -080029#include "base/atomic.h"
Andreas Gampef0f3c592018-06-26 13:28:00 -070030#include "base/histogram.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080031#include "base/macros.h"
Vladimir Markoc34bebf2018-08-16 16:12:49 +010032#include "base/mem_map.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080033#include "base/mutex.h"
David Sehr67bf42e2018-02-26 16:43:04 -080034#include "base/safe_map.h"
Nicolas Geoffray2a905b22019-06-06 09:04:07 +010035#include "jit_memory_region.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080036
37namespace art {
38
Mathieu Chartiere401d142015-04-22 13:56:20 -070039class ArtMethod;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000040template<class T> class Handle;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010041class LinearAlloc;
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +000042class InlineCache;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070043class IsMarkedVisitor;
Vladimir Marko2196c652017-11-30 16:16:07 +000044class JitJniStubTestHelper;
Andreas Gampe513061a2017-06-01 09:17:34 -070045class OatQuickMethodHeader;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000046struct ProfileMethodInfo;
Nicolas Geoffray26705e22015-10-28 12:50:11 +000047class ProfilingInfo;
Vladimir Marko2196c652017-11-30 16:16:07 +000048class Thread;
49
50namespace gc {
51namespace accounting {
52template<size_t kAlignment> class MemoryRangeBitmap;
53} // namespace accounting
54} // namespace gc
55
56namespace mirror {
57class Class;
58class Object;
59template<class T> class ObjectArray;
60} // namespace mirror
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080061
Vladimir Markob0b68cf2017-11-14 18:11:50 +000062namespace gc {
63namespace accounting {
64template<size_t kAlignment> class MemoryRangeBitmap;
65} // namespace accounting
66} // namespace gc
67
68namespace mirror {
69class Class;
70class Object;
71template<class T> class ObjectArray;
72} // namespace mirror
73
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080074namespace jit {
75
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +000076class MarkCodeClosure;
Orion Hodsondf1ab202019-06-02 16:45:03 +010077
Orion Hodson521ff982019-06-18 13:56:28 +010078// Type of bitmap used for tracking live functions in the JIT code cache for the purposes
79// of garbage collecting code.
80using CodeCacheBitmap = gc::accounting::MemoryRangeBitmap<kJitCodeAccountingBytes>;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010081
Nicolas Geoffraye3884e32019-10-28 17:04:49 +000082// The state of profile-based compilation in the zygote.
83// - kInProgress: JIT compilation is happening
84// - kDone: JIT compilation is finished, and the zygote is preparing notifying
85// the other processes.
86// - kNotifiedOk: the zygote has notified the other processes, which can start
87// sharing the boot image method mappings.
88// - kNotifiedFailure: the zygote has notified the other processes, but they
89// cannot share the boot image method mappings due to
90// unexpected errors
91enum class ZygoteCompilationState : uint8_t {
92 kInProgress = 0,
93 kDone = 1,
94 kNotifiedOk = 2,
95 kNotifiedFailure = 3,
96};
97
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010098// Class abstraction over a map of ArtMethod -> compiled code, where the
99// ArtMethod are compiled by the zygote, and the map acts as a communication
100// channel between the zygote and the other processes.
101// For the zygote process, this map is the only map it is placing the compiled
102// code. JitCodeCache.method_code_map_ is empty.
103//
104// This map is writable only by the zygote, and readable by all children.
105class ZygoteMap {
106 public:
Alex Lightb28e3042020-03-06 13:02:46 -0800107 struct Entry {
108 ArtMethod* method;
109 // Note we currently only allocate code in the low 4g, so we could just reserve 4 bytes
110 // for the code pointer. For simplicity and in the case we move to 64bit
111 // addresses for code, just keep it void* for now.
112 const void* code_ptr;
113 };
114
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000115 explicit ZygoteMap(JitMemoryRegion* region)
116 : map_(), region_(region), compilation_state_(nullptr) {}
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100117
118 // Initialize the data structure so it can hold `number_of_methods` mappings.
119 // Note that the map is fixed size and never grows.
120 void Initialize(uint32_t number_of_methods) REQUIRES(!Locks::jit_lock_);
121
122 // Add the mapping method -> code.
123 void Put(const void* code, ArtMethod* method) REQUIRES(Locks::jit_lock_);
124
125 // Return the code pointer for the given method. If pc is not zero, check that
126 // the pc falls into that code range. Return null otherwise.
127 const void* GetCodeFor(ArtMethod* method, uintptr_t pc = 0) const;
128
129 // Return whether the map has associated code for the given method.
130 bool ContainsMethod(ArtMethod* method) const {
131 return GetCodeFor(method) != nullptr;
132 }
133
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000134 void SetCompilationState(ZygoteCompilationState state) {
135 region_->WriteData(compilation_state_, state);
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100136 }
137
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000138 bool IsCompilationDoneButNotNotified() const {
139 return compilation_state_ != nullptr && *compilation_state_ == ZygoteCompilationState::kDone;
140 }
141
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000142 bool IsCompilationNotified() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000143 return compilation_state_ != nullptr && *compilation_state_ > ZygoteCompilationState::kDone;
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000144 }
145
146 bool CanMapBootImageMethods() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000147 return compilation_state_ != nullptr &&
148 *compilation_state_ == ZygoteCompilationState::kNotifiedOk;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100149 }
150
Alex Lightb28e3042020-03-06 13:02:46 -0800151 ArrayRef<const Entry>::const_iterator cbegin() const {
152 return map_.cbegin();
153 }
154 ArrayRef<const Entry>::iterator begin() {
155 return map_.begin();
156 }
157 ArrayRef<const Entry>::const_iterator cend() const {
158 return map_.cend();
159 }
160 ArrayRef<const Entry>::iterator end() {
161 return map_.end();
162 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100163
Alex Lightb28e3042020-03-06 13:02:46 -0800164 private:
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100165 // The map allocated with `region_`.
David Srbecky87fb0322019-08-20 10:34:02 +0100166 ArrayRef<const Entry> map_;
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100167
168 // The region in which the map is allocated.
169 JitMemoryRegion* const region_;
170
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000171 // The current state of compilation in the zygote. Starts with kInProgress,
172 // and should end with kNotifiedOk or kNotifiedFailure.
173 const ZygoteCompilationState* compilation_state_;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100174
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100175 DISALLOW_COPY_AND_ASSIGN(ZygoteMap);
176};
177
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800178class JitCodeCache {
179 public:
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000180 static constexpr size_t kMaxCapacity = 64 * MB;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100181 // Put the default to a very low amount for debug builds to stress the code cache
182 // collection.
Nicolas Geoffray7ca4b772016-02-23 13:52:01 +0000183 static constexpr size_t kInitialCapacity = kIsDebugBuild ? 8 * KB : 64 * KB;
Nicolas Geoffray65b83d82016-02-22 13:14:04 +0000184
185 // By default, do not GC until reaching 256KB.
186 static constexpr size_t kReservedCapacity = kInitialCapacity * 4;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800187
Mathieu Chartierbce416f2015-03-23 12:37:35 -0700188 // Create the code cache with a code + data capacity equal to "capacity", error message is passed
189 // in the out arg error_msg.
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000190 static JitCodeCache* Create(bool used_only_for_profile_data,
Orion Hodsonad28f5e2018-10-17 09:08:17 +0100191 bool rwx_memory_allowed,
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000192 bool is_zygote,
Nicolas Geoffraya25dce92016-01-12 16:41:10 +0000193 std::string* error_msg);
Vladimir Markob0b68cf2017-11-14 18:11:50 +0000194 ~JitCodeCache();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800195
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000196 bool NotifyCompilationOf(ArtMethod* method,
197 Thread* self,
198 bool osr,
199 bool prejit,
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +0000200 bool baseline,
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000201 JitMemoryRegion* region)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700202 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100203 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100204
Alex Lightdba61482016-12-21 08:20:29 -0800205 void NotifyMethodRedefined(ArtMethod* method)
206 REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100207 REQUIRES(!Locks::jit_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800208
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000209 // Notify to the code cache that the compiler wants to use the
210 // profiling info of `method` to drive optimizations,
211 // and therefore ensure the returned profiling info object is not
212 // collected.
213 ProfilingInfo* NotifyCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700214 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100215 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000216
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100217 void DoneCompiling(ArtMethod* method, Thread* self, bool osr, bool baseline)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700218 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100219 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100220
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000221 void DoneCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700222 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100223 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000224
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100225 // Return true if the code cache contains this pc.
226 bool ContainsPc(const void* pc) const;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800227
Alex Light280e6c32020-03-03 13:52:07 -0800228 // Return true if the code cache contains this pc in the private region (i.e. not from zygote).
229 bool PrivateRegionContainsPc(const void* pc) const;
230
Alex Light2d441b12018-06-08 15:33:21 -0700231 // Returns true if either the method's entrypoint is JIT compiled code or it is the
232 // instrumentation entrypoint and we can jump to jit code for this method. For testing use only.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100233 bool WillExecuteJitCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Alex Light2d441b12018-06-08 15:33:21 -0700234
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000235 // Return true if the code cache contains this method.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100236 bool ContainsMethod(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000237
Vladimir Marko2196c652017-11-30 16:16:07 +0000238 // Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100239 const void* GetJniStubCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Vladimir Marko2196c652017-11-30 16:16:07 +0000240
David Srbeckyadb66f92019-10-10 12:59:43 +0000241 // Allocate a region for both code and data in the JIT code cache.
242 // The reserved memory is left completely uninitialized.
243 bool Reserve(Thread* self,
244 JitMemoryRegion* region,
245 size_t code_size,
246 size_t stack_map_size,
247 size_t number_of_roots,
248 ArtMethod* method,
249 /*out*/ArrayRef<const uint8_t>* reserved_code,
250 /*out*/ArrayRef<const uint8_t>* reserved_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700251 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100252 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100253
David Srbeckyadb66f92019-10-10 12:59:43 +0000254 // Initialize code and data of previously allocated memory.
255 //
256 // `cha_single_implementation_list` needs to be registered via CHA (if it's
257 // still valid), since the compiled code still needs to be invalidated if the
258 // single-implementation assumptions are violated later. This needs to be done
259 // even if `has_should_deoptimize_flag` is false, which can happen due to CHA
260 // guard elimination.
261 bool Commit(Thread* self,
262 JitMemoryRegion* region,
263 ArtMethod* method,
264 ArrayRef<const uint8_t> reserved_code, // Uninitialized destination.
265 ArrayRef<const uint8_t> code, // Compiler output (source).
266 ArrayRef<const uint8_t> reserved_data, // Uninitialized destination.
267 const std::vector<Handle<mirror::Object>>& roots,
268 ArrayRef<const uint8_t> stack_map, // Compiler output (source).
David Srbecky41617b12020-03-18 21:19:06 +0000269 const std::vector<uint8_t>& debug_info,
270 bool is_full_debug_info,
David Srbeckyadb66f92019-10-10 12:59:43 +0000271 bool osr,
272 bool has_should_deoptimize_flag,
273 const ArenaSet<ArtMethod*>& cha_single_implementation_list)
274 REQUIRES_SHARED(Locks::mutator_lock_)
275 REQUIRES(!Locks::jit_lock_);
276
277 // Free the previously allocated memory regions.
278 void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700279 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100280 REQUIRES(!Locks::jit_lock_);
David Srbecky30fd8512020-02-20 20:27:58 +0000281 void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
282 REQUIRES_SHARED(Locks::mutator_lock_)
283 REQUIRES(Locks::jit_lock_);
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000284
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100285 // Perform a collection on the code cache.
286 void GarbageCollectCache(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100287 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700288 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100289
290 // Given the 'pc', try to find the JIT compiled code associated with it.
291 // Return null if 'pc' is not in the code cache. 'method' is passed for
292 // sanity check.
293 OatQuickMethodHeader* LookupMethodHeader(uintptr_t pc, ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100294 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700295 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100296
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000297 OatQuickMethodHeader* LookupOsrMethodHeader(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100298 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700299 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000300
Orion Hodsoneced6922017-06-01 10:54:28 +0100301 // Removes method from the cache for testing purposes. The caller
302 // must ensure that all threads are suspended and the method should
303 // not be in any thread's stack.
304 bool RemoveMethod(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100305 REQUIRES(!Locks::jit_lock_)
Orion Hodsoneced6922017-06-01 10:54:28 +0100306 REQUIRES(Locks::mutator_lock_);
307
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000308 // Remove all methods in our cache that were allocated by 'alloc'.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100309 void RemoveMethodsIn(Thread* self, const LinearAlloc& alloc)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100310 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700311 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800312
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000313 void CopyInlineCacheInto(const InlineCache& ic, Handle<mirror::ObjectArray<mirror::Class>> array)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100314 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000315 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000316
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000317 // Create a 'ProfileInfo' for 'method'. If 'retry_allocation' is true,
318 // will collect and retry if the first allocation is unsuccessful.
319 ProfilingInfo* AddProfilingInfo(Thread* self,
320 ArtMethod* method,
321 const std::vector<uint32_t>& entries,
322 bool retry_allocation)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100323 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700324 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000325
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000326 bool OwnsSpace(const void* mspace) const NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000327 return private_region_.OwnsSpace(mspace) || shared_region_.OwnsSpace(mspace);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000328 }
329
330 void* MoreCore(const void* mspace, intptr_t increment);
331
Calin Juravle99629622016-04-19 16:33:46 +0100332 // Adds to `methods` all profiled methods which are part of any of the given dex locations.
333 void GetProfiledMethods(const std::set<std::string>& dex_base_locations,
Calin Juravle940eb0c2017-01-30 19:30:44 -0800334 std::vector<ProfileMethodInfo>& methods)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100335 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700336 REQUIRES_SHARED(Locks::mutator_lock_);
Calin Juravle31f2c152015-10-23 17:56:15 +0100337
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000338 void InvalidateAllCompiledCode()
339 REQUIRES(!Locks::jit_lock_)
340 REQUIRES_SHARED(Locks::mutator_lock_);
341
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000342 void InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* code)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100343 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700344 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000345
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100346 void Dump(std::ostream& os) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraybcd94c82016-03-03 13:23:33 +0000347
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100348 bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100349
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000350 void SweepRootTables(IsMarkedVisitor* visitor)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100351 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000352 REQUIRES_SHARED(Locks::mutator_lock_);
353
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000354 // The GC needs to disallow the reading of inline caches when it processes them,
355 // to avoid having a class being used while it is being deleted.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100356 void AllowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
357 void DisallowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
358 void BroadcastForInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000359
Alex Lightdba61482016-12-21 08:20:29 -0800360 // Notify the code cache that the method at the pointer 'old_method' is being moved to the pointer
361 // 'new_method' since it is being made obsolete.
362 void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100363 REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800364
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000365 // Dynamically change whether we want to garbage collect code.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100366 void SetGarbageCollectCode(bool value) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray169722b2017-02-27 14:01:59 +0000367
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100368 bool GetGarbageCollectCode() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000369
370 // Unsafe variant for debug checks.
371 bool GetGarbageCollectCodeUnsafe() const NO_THREAD_SAFETY_ANALYSIS {
Alex Light2d441b12018-06-08 15:33:21 -0700372 return garbage_collect_code_;
373 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100374 ZygoteMap* GetZygoteMap() {
375 return &zygote_map_;
376 }
Alex Light2d441b12018-06-08 15:33:21 -0700377
378 // If Jit-gc has been disabled (and instrumentation has been enabled) this will return the
379 // jit-compiled entrypoint for this method. Otherwise it will return null.
380 const void* FindCompiledCodeForInstrumentation(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100381 REQUIRES(!Locks::jit_lock_)
Alex Light2d441b12018-06-08 15:33:21 -0700382 REQUIRES_SHARED(Locks::mutator_lock_);
383
Nicolas Geoffray32384402019-07-17 20:06:44 +0100384 // Fetch the code of a method that was JITted, but the JIT could not
385 // update its entrypoint due to the resolution trampoline.
386 const void* GetSavedEntryPointOfPreCompiledMethod(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100387 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray7989ac92019-04-10 12:42:30 +0100388 REQUIRES_SHARED(Locks::mutator_lock_);
389
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000390 void PostForkChildAction(bool is_system_server, bool is_zygote);
391
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000392 // Clear the entrypoints of JIT compiled methods that belong in the zygote space.
393 // This is used for removing non-debuggable JIT code at the point we realize the runtime
Alex Lightb28e3042020-03-06 13:02:46 -0800394 // is debuggable. Also clear the Precompiled flag from all methods so the non-debuggable code
395 // doesn't come back.
396 void TransitionToDebuggable() REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000397
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000398 JitMemoryRegion* GetCurrentRegion();
399 bool IsSharedRegion(const JitMemoryRegion& region) const { return &region == &shared_region_; }
400 bool CanAllocateProfilingInfo() {
401 // If we don't have a private region, we cannot allocate a profiling info.
402 // A shared region doesn't support in general GC objects, which a profiling info
403 // can reference.
404 JitMemoryRegion* region = GetCurrentRegion();
405 return region->IsValid() && !IsSharedRegion(*region);
406 }
Nicolas Geoffray7f7539b2019-06-06 16:20:54 +0100407
Nicolas Geoffrayc76232e2020-03-18 11:23:33 +0000408 // Return whether the given `ptr` is in the zygote executable memory space.
409 bool IsInZygoteExecSpace(const void* ptr) const {
410 return shared_region_.IsInExecSpace(ptr);
411 }
412
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800413 private:
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000414 JitCodeCache();
415
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000416 ProfilingInfo* AddProfilingInfoInternal(Thread* self,
417 ArtMethod* method,
418 const std::vector<uint32_t>& entries)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100419 REQUIRES(Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700420 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000421
Alex Light33b7b5d2018-08-07 19:13:51 +0000422 // If a collection is in progress, wait for it to finish. Must be called with the mutator lock.
423 // The non-mutator lock version should be used if possible. This method will release then
424 // re-acquire the mutator lock.
425 void WaitForPotentialCollectionToCompleteRunnable(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100426 REQUIRES(Locks::jit_lock_, !Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_);
Alex Light33b7b5d2018-08-07 19:13:51 +0000427
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100428 // If a collection is in progress, wait for it to finish. Return
429 // whether the thread actually waited.
430 bool WaitForPotentialCollectionToComplete(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100431 REQUIRES(Locks::jit_lock_) REQUIRES(!Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100432
Mingyao Yang063fc772016-08-02 11:02:54 -0700433 // Remove CHA dependents and underlying allocations for entries in `method_headers`.
434 void FreeAllMethodHeaders(const std::unordered_set<OatQuickMethodHeader*>& method_headers)
David Srbecky30fd8512020-02-20 20:27:58 +0000435 REQUIRES_SHARED(Locks::mutator_lock_)
David Srbecky521644b2020-03-21 13:17:52 +0000436 REQUIRES(Locks::jit_lock_)
Mingyao Yang063fc772016-08-02 11:02:54 -0700437 REQUIRES(!Locks::cha_lock_);
438
Vladimir Marko2196c652017-11-30 16:16:07 +0000439 // Removes method from the cache. The caller must ensure that all threads
440 // are suspended and the method should not be in any thread's stack.
441 bool RemoveMethodLocked(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100442 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000443 REQUIRES(Locks::mutator_lock_);
444
David Srbecky41617b12020-03-18 21:19:06 +0000445 // Call given callback for every compiled method in the code cache.
446 void VisitAllMethods(const std::function<void(const void*, ArtMethod*)>& cb)
447 REQUIRES(Locks::jit_lock_);
448
Orion Hodson607624f2018-05-11 10:10:46 +0100449 // Free code and data allocations for `code_ptr`.
David Srbecky30fd8512020-02-20 20:27:58 +0000450 void FreeCodeAndData(const void* code_ptr)
451 REQUIRES(Locks::jit_lock_)
452 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100453
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000454 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100455 size_t CodeCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000456
457 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100458 size_t DataCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000459
460 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100461 size_t CodeCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000462
463 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100464 size_t DataCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000465
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000466 // Notify all waiting threads that a collection is done.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100467 void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000468
Nicolas Geoffray62dd4e82020-05-10 15:00:56 +0100469 // Return whether the code cache's capacity is at its maximum.
470 bool IsAtMaxCapacity() const REQUIRES(Locks::jit_lock_);
471
Vladimir Marko2196c652017-11-30 16:16:07 +0000472 // Return whether we should do a full collection given the current state of the cache.
473 bool ShouldDoFullCollection()
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100474 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000475 REQUIRES_SHARED(Locks::mutator_lock_);
476
Nicolas Geoffray35122442016-03-02 12:05:30 +0000477 void DoCollection(Thread* self, bool collect_profiling_info)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100478 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700479 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000480
Nicolas Geoffray9abb2972016-03-04 14:32:59 +0000481 void RemoveUnmarkedCode(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100482 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700483 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000484
485 void MarkCompiledCodeOnThreadStacks(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100486 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700487 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000488
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000489 CodeCacheBitmap* GetLiveBitmap() const {
490 return live_bitmap_.get();
491 }
492
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000493 bool IsInZygoteDataSpace(const void* ptr) const {
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100494 return shared_region_.IsInDataSpace(ptr);
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000495 }
496
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000497 bool IsWeakAccessEnabled(Thread* self) const;
498 void WaitUntilInlineCacheAccessible(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100499 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000500 REQUIRES_SHARED(Locks::mutator_lock_);
501
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100502 // Record that `method` is being compiled with the given mode.
503 // TODO: introduce an enum for the mode.
504 void AddMethodBeingCompiled(ArtMethod* method, bool osr, bool baseline)
505 REQUIRES(Locks::jit_lock_);
506
507 // Remove `method` from the list of methods meing compiled with the given mode.
508 void RemoveMethodBeingCompiled(ArtMethod* method, bool osr, bool baseline)
509 REQUIRES(Locks::jit_lock_);
510
511 // Return whether `method` is being compiled with the given mode.
512 bool IsMethodBeingCompiled(ArtMethod* method, bool osr, bool baseline)
513 REQUIRES(Locks::jit_lock_);
514
515 // Return whether `method` is being compiled in any mode.
516 bool IsMethodBeingCompiled(ArtMethod* method) REQUIRES(Locks::jit_lock_);
517
Vladimir Marko2196c652017-11-30 16:16:07 +0000518 class JniStubKey;
519 class JniStubData;
520
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000521 // Whether the GC allows accessing weaks in inline caches. Note that this
522 // is not used by the concurrent collector, which uses
523 // Thread::SetWeakRefAccessEnabled instead.
524 Atomic<bool> is_weak_access_enabled_;
525
526 // Condition to wait on for accessing inline caches.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100527 ConditionVariable inline_cache_cond_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000528
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100529 // -------------- JIT memory regions ------------------------------------- //
530
531 // Shared region, inherited from the zygote.
532 JitMemoryRegion shared_region_;
533
534 // Process's own region.
535 JitMemoryRegion private_region_;
536
537 // -------------- Global JIT maps --------------------------------------- //
538
539 // Holds compiled code associated with the shorty for a JNI stub.
540 SafeMap<JniStubKey, JniStubData> jni_stubs_map_ GUARDED_BY(Locks::jit_lock_);
541
542 // Holds compiled code associated to the ArtMethod.
543 SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
544
Nicolas Geoffray32384402019-07-17 20:06:44 +0100545 // Holds compiled code associated to the ArtMethod. Used when pre-jitting
546 // methods whose entrypoints have the resolution stub.
547 SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_);
548
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100549 // Holds osr compiled code associated to the ArtMethod.
550 SafeMap<ArtMethod*, const void*> osr_code_map_ GUARDED_BY(Locks::jit_lock_);
551
552 // ProfilingInfo objects we have allocated.
553 std::vector<ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_);
554
Nicolas Geoffrayf8cc26e2020-06-10 15:37:37 +0100555 // Methods we are currently compiling, one set for each kind of compilation.
556 std::set<ArtMethod*> current_optimized_compilations_ GUARDED_BY(Locks::jit_lock_);
557 std::set<ArtMethod*> current_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
558 std::set<ArtMethod*> current_baseline_compilations_ GUARDED_BY(Locks::jit_lock_);
559
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100560 // Methods that the zygote has compiled and can be shared across processes
561 // forked from the zygote.
562 ZygoteMap zygote_map_;
563
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100564 // -------------- JIT GC related data structures ----------------------- //
565
566 // Condition to wait on during collection.
567 ConditionVariable lock_cond_ GUARDED_BY(Locks::jit_lock_);
568
569 // Whether there is a code cache collection in progress.
570 bool collection_in_progress_ GUARDED_BY(Locks::jit_lock_);
571
572 // Bitmap for collecting code and data.
573 std::unique_ptr<CodeCacheBitmap> live_bitmap_;
574
575 // Whether the last collection round increased the code cache.
576 bool last_collection_increased_code_cache_ GUARDED_BY(Locks::jit_lock_);
577
578 // Whether we can do garbage collection. Not 'const' as tests may override this.
579 bool garbage_collect_code_ GUARDED_BY(Locks::jit_lock_);
580
581 // ---------------- JIT statistics -------------------------------------- //
582
583 // Number of compilations done throughout the lifetime of the JIT.
584 size_t number_of_compilations_ GUARDED_BY(Locks::jit_lock_);
585
586 // Number of compilations for on-stack-replacement done throughout the lifetime of the JIT.
587 size_t number_of_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
588
589 // Number of code cache collections done throughout the lifetime of the JIT.
590 size_t number_of_collections_ GUARDED_BY(Locks::jit_lock_);
591
592 // Histograms for keeping track of stack map size statistics.
593 Histogram<uint64_t> histogram_stack_map_memory_use_ GUARDED_BY(Locks::jit_lock_);
594
595 // Histograms for keeping track of code size statistics.
596 Histogram<uint64_t> histogram_code_memory_use_ GUARDED_BY(Locks::jit_lock_);
597
598 // Histograms for keeping track of profiling info statistics.
599 Histogram<uint64_t> histogram_profiling_info_memory_use_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000600
Vladimir Marko2196c652017-11-30 16:16:07 +0000601 friend class art::JitJniStubTestHelper;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700602 friend class ScopedCodeCacheWrite;
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000603 friend class MarkCodeClosure;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700604
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000605 DISALLOW_COPY_AND_ASSIGN(JitCodeCache);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800606};
607
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800608} // namespace jit
609} // namespace art
610
611#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_H_