Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_H_ |
| 18 | #define ART_RUNTIME_JIT_JIT_CODE_CACHE_H_ |
| 19 | |
Andreas Gampe | f0f3c59 | 2018-06-26 13:28:00 -0700 | [diff] [blame] | 20 | #include <iosfwd> |
| 21 | #include <memory> |
| 22 | #include <set> |
| 23 | #include <string> |
| 24 | #include <unordered_set> |
| 25 | #include <vector> |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 26 | |
Mingyao Yang | 063fc77 | 2016-08-02 11:02:54 -0700 | [diff] [blame] | 27 | #include "base/arena_containers.h" |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 28 | #include "base/array_ref.h" |
David Sehr | c431b9d | 2018-03-02 12:01:51 -0800 | [diff] [blame] | 29 | #include "base/atomic.h" |
Andreas Gampe | f0f3c59 | 2018-06-26 13:28:00 -0700 | [diff] [blame] | 30 | #include "base/histogram.h" |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 31 | #include "base/macros.h" |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 32 | #include "base/mem_map.h" |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 33 | #include "base/mutex.h" |
David Sehr | 67bf42e | 2018-02-26 16:43:04 -0800 | [diff] [blame] | 34 | #include "base/safe_map.h" |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 35 | #include "jit_memory_region.h" |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 36 | |
| 37 | namespace art { |
| 38 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 39 | class ArtMethod; |
Vladimir Marko | b0b68cf | 2017-11-14 18:11:50 +0000 | [diff] [blame] | 40 | template<class T> class Handle; |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 41 | class LinearAlloc; |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 42 | class InlineCache; |
Andreas Gampe | 5d08fcc | 2017-06-05 17:56:46 -0700 | [diff] [blame] | 43 | class IsMarkedVisitor; |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 44 | class JitJniStubTestHelper; |
Andreas Gampe | 513061a | 2017-06-01 09:17:34 -0700 | [diff] [blame] | 45 | class OatQuickMethodHeader; |
Vladimir Marko | b0b68cf | 2017-11-14 18:11:50 +0000 | [diff] [blame] | 46 | struct ProfileMethodInfo; |
Nicolas Geoffray | 26705e2 | 2015-10-28 12:50:11 +0000 | [diff] [blame] | 47 | class ProfilingInfo; |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 48 | class Thread; |
| 49 | |
| 50 | namespace gc { |
| 51 | namespace accounting { |
| 52 | template<size_t kAlignment> class MemoryRangeBitmap; |
| 53 | } // namespace accounting |
| 54 | } // namespace gc |
| 55 | |
| 56 | namespace mirror { |
| 57 | class Class; |
| 58 | class Object; |
| 59 | template<class T> class ObjectArray; |
| 60 | } // namespace mirror |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 61 | |
Vladimir Marko | b0b68cf | 2017-11-14 18:11:50 +0000 | [diff] [blame] | 62 | namespace gc { |
| 63 | namespace accounting { |
| 64 | template<size_t kAlignment> class MemoryRangeBitmap; |
| 65 | } // namespace accounting |
| 66 | } // namespace gc |
| 67 | |
| 68 | namespace mirror { |
| 69 | class Class; |
| 70 | class Object; |
| 71 | template<class T> class ObjectArray; |
| 72 | } // namespace mirror |
| 73 | |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 74 | namespace jit { |
| 75 | |
Nicolas Geoffray | ce9ed36 | 2018-11-29 03:19:28 +0000 | [diff] [blame] | 76 | class MarkCodeClosure; |
Orion Hodson | df1ab20 | 2019-06-02 16:45:03 +0100 | [diff] [blame] | 77 | |
Orion Hodson | 521ff98 | 2019-06-18 13:56:28 +0100 | [diff] [blame] | 78 | // Type of bitmap used for tracking live functions in the JIT code cache for the purposes |
| 79 | // of garbage collecting code. |
| 80 | using CodeCacheBitmap = gc::accounting::MemoryRangeBitmap<kJitCodeAccountingBytes>; |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 81 | |
Nicolas Geoffray | e3884e3 | 2019-10-28 17:04:49 +0000 | [diff] [blame] | 82 | // The state of profile-based compilation in the zygote. |
| 83 | // - kInProgress: JIT compilation is happening |
| 84 | // - kDone: JIT compilation is finished, and the zygote is preparing notifying |
| 85 | // the other processes. |
| 86 | // - kNotifiedOk: the zygote has notified the other processes, which can start |
| 87 | // sharing the boot image method mappings. |
| 88 | // - kNotifiedFailure: the zygote has notified the other processes, but they |
| 89 | // cannot share the boot image method mappings due to |
| 90 | // unexpected errors |
| 91 | enum class ZygoteCompilationState : uint8_t { |
| 92 | kInProgress = 0, |
| 93 | kDone = 1, |
| 94 | kNotifiedOk = 2, |
| 95 | kNotifiedFailure = 3, |
| 96 | }; |
| 97 | |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 98 | // Class abstraction over a map of ArtMethod -> compiled code, where the |
| 99 | // ArtMethod are compiled by the zygote, and the map acts as a communication |
| 100 | // channel between the zygote and the other processes. |
| 101 | // For the zygote process, this map is the only map it is placing the compiled |
| 102 | // code. JitCodeCache.method_code_map_ is empty. |
| 103 | // |
| 104 | // This map is writable only by the zygote, and readable by all children. |
| 105 | class ZygoteMap { |
| 106 | public: |
Alex Light | b28e304 | 2020-03-06 13:02:46 -0800 | [diff] [blame] | 107 | struct Entry { |
| 108 | ArtMethod* method; |
| 109 | // Note we currently only allocate code in the low 4g, so we could just reserve 4 bytes |
| 110 | // for the code pointer. For simplicity and in the case we move to 64bit |
| 111 | // addresses for code, just keep it void* for now. |
| 112 | const void* code_ptr; |
| 113 | }; |
| 114 | |
Nicolas Geoffray | e3884e3 | 2019-10-28 17:04:49 +0000 | [diff] [blame] | 115 | explicit ZygoteMap(JitMemoryRegion* region) |
| 116 | : map_(), region_(region), compilation_state_(nullptr) {} |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 117 | |
| 118 | // Initialize the data structure so it can hold `number_of_methods` mappings. |
| 119 | // Note that the map is fixed size and never grows. |
| 120 | void Initialize(uint32_t number_of_methods) REQUIRES(!Locks::jit_lock_); |
| 121 | |
| 122 | // Add the mapping method -> code. |
| 123 | void Put(const void* code, ArtMethod* method) REQUIRES(Locks::jit_lock_); |
| 124 | |
| 125 | // Return the code pointer for the given method. If pc is not zero, check that |
| 126 | // the pc falls into that code range. Return null otherwise. |
| 127 | const void* GetCodeFor(ArtMethod* method, uintptr_t pc = 0) const; |
| 128 | |
| 129 | // Return whether the map has associated code for the given method. |
| 130 | bool ContainsMethod(ArtMethod* method) const { |
| 131 | return GetCodeFor(method) != nullptr; |
| 132 | } |
| 133 | |
Nicolas Geoffray | e3884e3 | 2019-10-28 17:04:49 +0000 | [diff] [blame] | 134 | void SetCompilationState(ZygoteCompilationState state) { |
| 135 | region_->WriteData(compilation_state_, state); |
Nicolas Geoffray | 623d4f1 | 2019-09-30 13:45:51 +0100 | [diff] [blame] | 136 | } |
| 137 | |
Nicolas Geoffray | 8852e53 | 2019-10-30 09:43:35 +0000 | [diff] [blame] | 138 | bool IsCompilationDoneButNotNotified() const { |
| 139 | return compilation_state_ != nullptr && *compilation_state_ == ZygoteCompilationState::kDone; |
| 140 | } |
| 141 | |
Nicolas Geoffray | e3884e3 | 2019-10-28 17:04:49 +0000 | [diff] [blame] | 142 | bool IsCompilationNotified() const { |
Nicolas Geoffray | 8852e53 | 2019-10-30 09:43:35 +0000 | [diff] [blame] | 143 | return compilation_state_ != nullptr && *compilation_state_ > ZygoteCompilationState::kDone; |
Nicolas Geoffray | e3884e3 | 2019-10-28 17:04:49 +0000 | [diff] [blame] | 144 | } |
| 145 | |
| 146 | bool CanMapBootImageMethods() const { |
Nicolas Geoffray | 8852e53 | 2019-10-30 09:43:35 +0000 | [diff] [blame] | 147 | return compilation_state_ != nullptr && |
| 148 | *compilation_state_ == ZygoteCompilationState::kNotifiedOk; |
Nicolas Geoffray | 623d4f1 | 2019-09-30 13:45:51 +0100 | [diff] [blame] | 149 | } |
| 150 | |
Alex Light | b28e304 | 2020-03-06 13:02:46 -0800 | [diff] [blame] | 151 | ArrayRef<const Entry>::const_iterator cbegin() const { |
| 152 | return map_.cbegin(); |
| 153 | } |
| 154 | ArrayRef<const Entry>::iterator begin() { |
| 155 | return map_.begin(); |
| 156 | } |
| 157 | ArrayRef<const Entry>::const_iterator cend() const { |
| 158 | return map_.cend(); |
| 159 | } |
| 160 | ArrayRef<const Entry>::iterator end() { |
| 161 | return map_.end(); |
| 162 | } |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 163 | |
Alex Light | b28e304 | 2020-03-06 13:02:46 -0800 | [diff] [blame] | 164 | private: |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 165 | // The map allocated with `region_`. |
David Srbecky | 87fb032 | 2019-08-20 10:34:02 +0100 | [diff] [blame] | 166 | ArrayRef<const Entry> map_; |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 167 | |
| 168 | // The region in which the map is allocated. |
| 169 | JitMemoryRegion* const region_; |
| 170 | |
Nicolas Geoffray | e3884e3 | 2019-10-28 17:04:49 +0000 | [diff] [blame] | 171 | // The current state of compilation in the zygote. Starts with kInProgress, |
| 172 | // and should end with kNotifiedOk or kNotifiedFailure. |
| 173 | const ZygoteCompilationState* compilation_state_; |
Nicolas Geoffray | 623d4f1 | 2019-09-30 13:45:51 +0100 | [diff] [blame] | 174 | |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 175 | DISALLOW_COPY_AND_ASSIGN(ZygoteMap); |
| 176 | }; |
| 177 | |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 178 | class JitCodeCache { |
| 179 | public: |
Nicolas Geoffray | 0a3be16 | 2015-11-18 11:15:22 +0000 | [diff] [blame] | 180 | static constexpr size_t kMaxCapacity = 64 * MB; |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 181 | // Put the default to a very low amount for debug builds to stress the code cache |
| 182 | // collection. |
Nicolas Geoffray | 7ca4b77 | 2016-02-23 13:52:01 +0000 | [diff] [blame] | 183 | static constexpr size_t kInitialCapacity = kIsDebugBuild ? 8 * KB : 64 * KB; |
Nicolas Geoffray | 65b83d8 | 2016-02-22 13:14:04 +0000 | [diff] [blame] | 184 | |
| 185 | // By default, do not GC until reaching 256KB. |
| 186 | static constexpr size_t kReservedCapacity = kInitialCapacity * 4; |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 187 | |
Mathieu Chartier | bce416f | 2015-03-23 12:37:35 -0700 | [diff] [blame] | 188 | // Create the code cache with a code + data capacity equal to "capacity", error message is passed |
| 189 | // in the out arg error_msg. |
Nicolas Geoffray | c9de61c | 2018-11-27 17:34:31 +0000 | [diff] [blame] | 190 | static JitCodeCache* Create(bool used_only_for_profile_data, |
Orion Hodson | ad28f5e | 2018-10-17 09:08:17 +0100 | [diff] [blame] | 191 | bool rwx_memory_allowed, |
Nicolas Geoffray | c9de61c | 2018-11-27 17:34:31 +0000 | [diff] [blame] | 192 | bool is_zygote, |
Nicolas Geoffray | a25dce9 | 2016-01-12 16:41:10 +0000 | [diff] [blame] | 193 | std::string* error_msg); |
Vladimir Marko | b0b68cf | 2017-11-14 18:11:50 +0000 | [diff] [blame] | 194 | ~JitCodeCache(); |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 195 | |
Nicolas Geoffray | a48c3df | 2019-06-27 13:11:12 +0000 | [diff] [blame] | 196 | bool NotifyCompilationOf(ArtMethod* method, |
| 197 | Thread* self, |
| 198 | bool osr, |
| 199 | bool prejit, |
Nicolas Geoffray | a59af8a | 2019-11-27 17:42:32 +0000 | [diff] [blame] | 200 | bool baseline, |
Nicolas Geoffray | a48c3df | 2019-06-27 13:11:12 +0000 | [diff] [blame] | 201 | JitMemoryRegion* region) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 202 | REQUIRES_SHARED(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 203 | REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | 73be1e8 | 2015-09-17 15:22:56 +0100 | [diff] [blame] | 204 | |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 205 | void NotifyMethodRedefined(ArtMethod* method) |
| 206 | REQUIRES(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 207 | REQUIRES(!Locks::jit_lock_); |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 208 | |
Nicolas Geoffray | 07e3ca9 | 2016-03-11 09:57:57 +0000 | [diff] [blame] | 209 | // Notify to the code cache that the compiler wants to use the |
| 210 | // profiling info of `method` to drive optimizations, |
| 211 | // and therefore ensure the returned profiling info object is not |
| 212 | // collected. |
| 213 | ProfilingInfo* NotifyCompilerUse(ArtMethod* method, Thread* self) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 214 | REQUIRES_SHARED(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 215 | REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | b6e20ae | 2016-03-07 14:29:04 +0000 | [diff] [blame] | 216 | |
Nicolas Geoffray | f8cc26e | 2020-06-10 15:37:37 +0100 | [diff] [blame^] | 217 | void DoneCompiling(ArtMethod* method, Thread* self, bool osr, bool baseline) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 218 | REQUIRES_SHARED(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 219 | REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | 73be1e8 | 2015-09-17 15:22:56 +0100 | [diff] [blame] | 220 | |
Nicolas Geoffray | 07e3ca9 | 2016-03-11 09:57:57 +0000 | [diff] [blame] | 221 | void DoneCompilerUse(ArtMethod* method, Thread* self) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 222 | REQUIRES_SHARED(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 223 | REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | b6e20ae | 2016-03-07 14:29:04 +0000 | [diff] [blame] | 224 | |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 225 | // Return true if the code cache contains this pc. |
| 226 | bool ContainsPc(const void* pc) const; |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 227 | |
Alex Light | 280e6c3 | 2020-03-03 13:52:07 -0800 | [diff] [blame] | 228 | // Return true if the code cache contains this pc in the private region (i.e. not from zygote). |
| 229 | bool PrivateRegionContainsPc(const void* pc) const; |
| 230 | |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 231 | // Returns true if either the method's entrypoint is JIT compiled code or it is the |
| 232 | // instrumentation entrypoint and we can jump to jit code for this method. For testing use only. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 233 | bool WillExecuteJitCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_); |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 234 | |
Nicolas Geoffray | a5891e8 | 2015-11-06 14:18:27 +0000 | [diff] [blame] | 235 | // Return true if the code cache contains this method. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 236 | bool ContainsMethod(ArtMethod* method) REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | a5891e8 | 2015-11-06 14:18:27 +0000 | [diff] [blame] | 237 | |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 238 | // Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 239 | const void* GetJniStubCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_); |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 240 | |
David Srbecky | adb66f9 | 2019-10-10 12:59:43 +0000 | [diff] [blame] | 241 | // Allocate a region for both code and data in the JIT code cache. |
| 242 | // The reserved memory is left completely uninitialized. |
| 243 | bool Reserve(Thread* self, |
| 244 | JitMemoryRegion* region, |
| 245 | size_t code_size, |
| 246 | size_t stack_map_size, |
| 247 | size_t number_of_roots, |
| 248 | ArtMethod* method, |
| 249 | /*out*/ArrayRef<const uint8_t>* reserved_code, |
| 250 | /*out*/ArrayRef<const uint8_t>* reserved_data) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 251 | REQUIRES_SHARED(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 252 | REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | 5550ca8 | 2015-08-21 18:38:30 +0100 | [diff] [blame] | 253 | |
David Srbecky | adb66f9 | 2019-10-10 12:59:43 +0000 | [diff] [blame] | 254 | // Initialize code and data of previously allocated memory. |
| 255 | // |
| 256 | // `cha_single_implementation_list` needs to be registered via CHA (if it's |
| 257 | // still valid), since the compiled code still needs to be invalidated if the |
| 258 | // single-implementation assumptions are violated later. This needs to be done |
| 259 | // even if `has_should_deoptimize_flag` is false, which can happen due to CHA |
| 260 | // guard elimination. |
| 261 | bool Commit(Thread* self, |
| 262 | JitMemoryRegion* region, |
| 263 | ArtMethod* method, |
| 264 | ArrayRef<const uint8_t> reserved_code, // Uninitialized destination. |
| 265 | ArrayRef<const uint8_t> code, // Compiler output (source). |
| 266 | ArrayRef<const uint8_t> reserved_data, // Uninitialized destination. |
| 267 | const std::vector<Handle<mirror::Object>>& roots, |
| 268 | ArrayRef<const uint8_t> stack_map, // Compiler output (source). |
David Srbecky | 41617b1 | 2020-03-18 21:19:06 +0000 | [diff] [blame] | 269 | const std::vector<uint8_t>& debug_info, |
| 270 | bool is_full_debug_info, |
David Srbecky | adb66f9 | 2019-10-10 12:59:43 +0000 | [diff] [blame] | 271 | bool osr, |
| 272 | bool has_should_deoptimize_flag, |
| 273 | const ArenaSet<ArtMethod*>& cha_single_implementation_list) |
| 274 | REQUIRES_SHARED(Locks::mutator_lock_) |
| 275 | REQUIRES(!Locks::jit_lock_); |
| 276 | |
| 277 | // Free the previously allocated memory regions. |
| 278 | void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 279 | REQUIRES_SHARED(Locks::mutator_lock_) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 280 | REQUIRES(!Locks::jit_lock_); |
David Srbecky | 30fd851 | 2020-02-20 20:27:58 +0000 | [diff] [blame] | 281 | void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data) |
| 282 | REQUIRES_SHARED(Locks::mutator_lock_) |
| 283 | REQUIRES(Locks::jit_lock_); |
Nicolas Geoffray | d28b969 | 2015-11-04 14:36:55 +0000 | [diff] [blame] | 284 | |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 285 | // Perform a collection on the code cache. |
| 286 | void GarbageCollectCache(Thread* self) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 287 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 288 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 289 | |
| 290 | // Given the 'pc', try to find the JIT compiled code associated with it. |
| 291 | // Return null if 'pc' is not in the code cache. 'method' is passed for |
| 292 | // sanity check. |
| 293 | OatQuickMethodHeader* LookupMethodHeader(uintptr_t pc, ArtMethod* method) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 294 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 295 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 296 | |
Nicolas Geoffray | b331feb | 2016-02-05 16:51:53 +0000 | [diff] [blame] | 297 | OatQuickMethodHeader* LookupOsrMethodHeader(ArtMethod* method) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 298 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 299 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | b331feb | 2016-02-05 16:51:53 +0000 | [diff] [blame] | 300 | |
Orion Hodson | eced692 | 2017-06-01 10:54:28 +0100 | [diff] [blame] | 301 | // Removes method from the cache for testing purposes. The caller |
| 302 | // must ensure that all threads are suspended and the method should |
| 303 | // not be in any thread's stack. |
| 304 | bool RemoveMethod(ArtMethod* method, bool release_memory) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 305 | REQUIRES(!Locks::jit_lock_) |
Orion Hodson | eced692 | 2017-06-01 10:54:28 +0100 | [diff] [blame] | 306 | REQUIRES(Locks::mutator_lock_); |
| 307 | |
Nicolas Geoffray | 26705e2 | 2015-10-28 12:50:11 +0000 | [diff] [blame] | 308 | // Remove all methods in our cache that were allocated by 'alloc'. |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 309 | void RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 310 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 311 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 312 | |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 313 | void CopyInlineCacheInto(const InlineCache& ic, Handle<mirror::ObjectArray<mirror::Class>> array) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 314 | REQUIRES(!Locks::jit_lock_) |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 315 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | b6e20ae | 2016-03-07 14:29:04 +0000 | [diff] [blame] | 316 | |
Nicolas Geoffray | 26705e2 | 2015-10-28 12:50:11 +0000 | [diff] [blame] | 317 | // Create a 'ProfileInfo' for 'method'. If 'retry_allocation' is true, |
| 318 | // will collect and retry if the first allocation is unsuccessful. |
| 319 | ProfilingInfo* AddProfilingInfo(Thread* self, |
| 320 | ArtMethod* method, |
| 321 | const std::vector<uint32_t>& entries, |
| 322 | bool retry_allocation) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 323 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 324 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 26705e2 | 2015-10-28 12:50:11 +0000 | [diff] [blame] | 325 | |
Nicolas Geoffray | 0a3be16 | 2015-11-18 11:15:22 +0000 | [diff] [blame] | 326 | bool OwnsSpace(const void* mspace) const NO_THREAD_SAFETY_ANALYSIS { |
Nicolas Geoffray | a48c3df | 2019-06-27 13:11:12 +0000 | [diff] [blame] | 327 | return private_region_.OwnsSpace(mspace) || shared_region_.OwnsSpace(mspace); |
Nicolas Geoffray | 0a3be16 | 2015-11-18 11:15:22 +0000 | [diff] [blame] | 328 | } |
| 329 | |
| 330 | void* MoreCore(const void* mspace, intptr_t increment); |
| 331 | |
Calin Juravle | 9962962 | 2016-04-19 16:33:46 +0100 | [diff] [blame] | 332 | // Adds to `methods` all profiled methods which are part of any of the given dex locations. |
| 333 | void GetProfiledMethods(const std::set<std::string>& dex_base_locations, |
Calin Juravle | 940eb0c | 2017-01-30 19:30:44 -0800 | [diff] [blame] | 334 | std::vector<ProfileMethodInfo>& methods) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 335 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 336 | REQUIRES_SHARED(Locks::mutator_lock_); |
Calin Juravle | 31f2c15 | 2015-10-23 17:56:15 +0100 | [diff] [blame] | 337 | |
Nicolas Geoffray | 4ac0e15 | 2019-09-18 06:14:50 +0000 | [diff] [blame] | 338 | void InvalidateAllCompiledCode() |
| 339 | REQUIRES(!Locks::jit_lock_) |
| 340 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 341 | |
Nicolas Geoffray | b88d59e | 2016-02-17 11:31:49 +0000 | [diff] [blame] | 342 | void InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* code) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 343 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 344 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | b88d59e | 2016-02-17 11:31:49 +0000 | [diff] [blame] | 345 | |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 346 | void Dump(std::ostream& os) REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | bcd94c8 | 2016-03-03 13:23:33 +0000 | [diff] [blame] | 347 | |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 348 | bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | 71cd50f | 2016-04-14 15:00:33 +0100 | [diff] [blame] | 349 | |
Nicolas Geoffray | 132d836 | 2016-11-16 09:19:42 +0000 | [diff] [blame] | 350 | void SweepRootTables(IsMarkedVisitor* visitor) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 351 | REQUIRES(!Locks::jit_lock_) |
Nicolas Geoffray | 132d836 | 2016-11-16 09:19:42 +0000 | [diff] [blame] | 352 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 353 | |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 354 | // The GC needs to disallow the reading of inline caches when it processes them, |
| 355 | // to avoid having a class being used while it is being deleted. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 356 | void AllowInlineCacheAccess() REQUIRES(!Locks::jit_lock_); |
| 357 | void DisallowInlineCacheAccess() REQUIRES(!Locks::jit_lock_); |
| 358 | void BroadcastForInlineCacheAccess() REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 359 | |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 360 | // Notify the code cache that the method at the pointer 'old_method' is being moved to the pointer |
| 361 | // 'new_method' since it is being made obsolete. |
| 362 | void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 363 | REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_); |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 364 | |
Nicolas Geoffray | 226805d | 2018-12-14 10:59:02 +0000 | [diff] [blame] | 365 | // Dynamically change whether we want to garbage collect code. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 366 | void SetGarbageCollectCode(bool value) REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | 169722b | 2017-02-27 14:01:59 +0000 | [diff] [blame] | 367 | |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 368 | bool GetGarbageCollectCode() REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | 226805d | 2018-12-14 10:59:02 +0000 | [diff] [blame] | 369 | |
| 370 | // Unsafe variant for debug checks. |
| 371 | bool GetGarbageCollectCodeUnsafe() const NO_THREAD_SAFETY_ANALYSIS { |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 372 | return garbage_collect_code_; |
| 373 | } |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 374 | ZygoteMap* GetZygoteMap() { |
| 375 | return &zygote_map_; |
| 376 | } |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 377 | |
| 378 | // If Jit-gc has been disabled (and instrumentation has been enabled) this will return the |
| 379 | // jit-compiled entrypoint for this method. Otherwise it will return null. |
| 380 | const void* FindCompiledCodeForInstrumentation(ArtMethod* method) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 381 | REQUIRES(!Locks::jit_lock_) |
Alex Light | 2d441b1 | 2018-06-08 15:33:21 -0700 | [diff] [blame] | 382 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 383 | |
Nicolas Geoffray | 3238440 | 2019-07-17 20:06:44 +0100 | [diff] [blame] | 384 | // Fetch the code of a method that was JITted, but the JIT could not |
| 385 | // update its entrypoint due to the resolution trampoline. |
| 386 | const void* GetSavedEntryPointOfPreCompiledMethod(ArtMethod* method) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 387 | REQUIRES(!Locks::jit_lock_) |
Nicolas Geoffray | 7989ac9 | 2019-04-10 12:42:30 +0100 | [diff] [blame] | 388 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 389 | |
Nicolas Geoffray | c9de61c | 2018-11-27 17:34:31 +0000 | [diff] [blame] | 390 | void PostForkChildAction(bool is_system_server, bool is_zygote); |
| 391 | |
Nicolas Geoffray | 226805d | 2018-12-14 10:59:02 +0000 | [diff] [blame] | 392 | // Clear the entrypoints of JIT compiled methods that belong in the zygote space. |
| 393 | // This is used for removing non-debuggable JIT code at the point we realize the runtime |
Alex Light | b28e304 | 2020-03-06 13:02:46 -0800 | [diff] [blame] | 394 | // is debuggable. Also clear the Precompiled flag from all methods so the non-debuggable code |
| 395 | // doesn't come back. |
| 396 | void TransitionToDebuggable() REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_); |
Nicolas Geoffray | 226805d | 2018-12-14 10:59:02 +0000 | [diff] [blame] | 397 | |
Nicolas Geoffray | a48c3df | 2019-06-27 13:11:12 +0000 | [diff] [blame] | 398 | JitMemoryRegion* GetCurrentRegion(); |
| 399 | bool IsSharedRegion(const JitMemoryRegion& region) const { return ®ion == &shared_region_; } |
| 400 | bool CanAllocateProfilingInfo() { |
| 401 | // If we don't have a private region, we cannot allocate a profiling info. |
| 402 | // A shared region doesn't support in general GC objects, which a profiling info |
| 403 | // can reference. |
| 404 | JitMemoryRegion* region = GetCurrentRegion(); |
| 405 | return region->IsValid() && !IsSharedRegion(*region); |
| 406 | } |
Nicolas Geoffray | 7f7539b | 2019-06-06 16:20:54 +0100 | [diff] [blame] | 407 | |
Nicolas Geoffray | c76232e | 2020-03-18 11:23:33 +0000 | [diff] [blame] | 408 | // Return whether the given `ptr` is in the zygote executable memory space. |
| 409 | bool IsInZygoteExecSpace(const void* ptr) const { |
| 410 | return shared_region_.IsInExecSpace(ptr); |
| 411 | } |
| 412 | |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 413 | private: |
Nicolas Geoffray | c9de61c | 2018-11-27 17:34:31 +0000 | [diff] [blame] | 414 | JitCodeCache(); |
| 415 | |
Nicolas Geoffray | 26705e2 | 2015-10-28 12:50:11 +0000 | [diff] [blame] | 416 | ProfilingInfo* AddProfilingInfoInternal(Thread* self, |
| 417 | ArtMethod* method, |
| 418 | const std::vector<uint32_t>& entries) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 419 | REQUIRES(Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 420 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 26705e2 | 2015-10-28 12:50:11 +0000 | [diff] [blame] | 421 | |
Alex Light | 33b7b5d | 2018-08-07 19:13:51 +0000 | [diff] [blame] | 422 | // If a collection is in progress, wait for it to finish. Must be called with the mutator lock. |
| 423 | // The non-mutator lock version should be used if possible. This method will release then |
| 424 | // re-acquire the mutator lock. |
| 425 | void WaitForPotentialCollectionToCompleteRunnable(Thread* self) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 426 | REQUIRES(Locks::jit_lock_, !Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_); |
Alex Light | 33b7b5d | 2018-08-07 19:13:51 +0000 | [diff] [blame] | 427 | |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 428 | // If a collection is in progress, wait for it to finish. Return |
| 429 | // whether the thread actually waited. |
| 430 | bool WaitForPotentialCollectionToComplete(Thread* self) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 431 | REQUIRES(Locks::jit_lock_) REQUIRES(!Locks::mutator_lock_); |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 432 | |
Mingyao Yang | 063fc77 | 2016-08-02 11:02:54 -0700 | [diff] [blame] | 433 | // Remove CHA dependents and underlying allocations for entries in `method_headers`. |
| 434 | void FreeAllMethodHeaders(const std::unordered_set<OatQuickMethodHeader*>& method_headers) |
David Srbecky | 30fd851 | 2020-02-20 20:27:58 +0000 | [diff] [blame] | 435 | REQUIRES_SHARED(Locks::mutator_lock_) |
David Srbecky | 521644b | 2020-03-21 13:17:52 +0000 | [diff] [blame] | 436 | REQUIRES(Locks::jit_lock_) |
Mingyao Yang | 063fc77 | 2016-08-02 11:02:54 -0700 | [diff] [blame] | 437 | REQUIRES(!Locks::cha_lock_); |
| 438 | |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 439 | // Removes method from the cache. The caller must ensure that all threads |
| 440 | // are suspended and the method should not be in any thread's stack. |
| 441 | bool RemoveMethodLocked(ArtMethod* method, bool release_memory) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 442 | REQUIRES(Locks::jit_lock_) |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 443 | REQUIRES(Locks::mutator_lock_); |
| 444 | |
David Srbecky | 41617b1 | 2020-03-18 21:19:06 +0000 | [diff] [blame] | 445 | // Call given callback for every compiled method in the code cache. |
| 446 | void VisitAllMethods(const std::function<void(const void*, ArtMethod*)>& cb) |
| 447 | REQUIRES(Locks::jit_lock_); |
| 448 | |
Orion Hodson | 607624f | 2018-05-11 10:10:46 +0100 | [diff] [blame] | 449 | // Free code and data allocations for `code_ptr`. |
David Srbecky | 30fd851 | 2020-02-20 20:27:58 +0000 | [diff] [blame] | 450 | void FreeCodeAndData(const void* code_ptr) |
| 451 | REQUIRES(Locks::jit_lock_) |
| 452 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 1dad3f6 | 2015-10-23 14:59:54 +0100 | [diff] [blame] | 453 | |
Nicolas Geoffray | a5891e8 | 2015-11-06 14:18:27 +0000 | [diff] [blame] | 454 | // Number of bytes allocated in the code cache. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 455 | size_t CodeCacheSize() REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | b9f1af5 | 2018-11-16 10:30:29 +0000 | [diff] [blame] | 456 | |
| 457 | // Number of bytes allocated in the data cache. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 458 | size_t DataCacheSize() REQUIRES(!Locks::jit_lock_); |
Nicolas Geoffray | b9f1af5 | 2018-11-16 10:30:29 +0000 | [diff] [blame] | 459 | |
| 460 | // Number of bytes allocated in the code cache. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 461 | size_t CodeCacheSizeLocked() REQUIRES(Locks::jit_lock_); |
Nicolas Geoffray | a5891e8 | 2015-11-06 14:18:27 +0000 | [diff] [blame] | 462 | |
| 463 | // Number of bytes allocated in the data cache. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 464 | size_t DataCacheSizeLocked() REQUIRES(Locks::jit_lock_); |
Nicolas Geoffray | a5891e8 | 2015-11-06 14:18:27 +0000 | [diff] [blame] | 465 | |
Nicolas Geoffray | 0a3be16 | 2015-11-18 11:15:22 +0000 | [diff] [blame] | 466 | // Notify all waiting threads that a collection is done. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 467 | void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_); |
Nicolas Geoffray | 0a3be16 | 2015-11-18 11:15:22 +0000 | [diff] [blame] | 468 | |
Nicolas Geoffray | 62dd4e8 | 2020-05-10 15:00:56 +0100 | [diff] [blame] | 469 | // Return whether the code cache's capacity is at its maximum. |
| 470 | bool IsAtMaxCapacity() const REQUIRES(Locks::jit_lock_); |
| 471 | |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 472 | // Return whether we should do a full collection given the current state of the cache. |
| 473 | bool ShouldDoFullCollection() |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 474 | REQUIRES(Locks::jit_lock_) |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 475 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 476 | |
Nicolas Geoffray | 3512244 | 2016-03-02 12:05:30 +0000 | [diff] [blame] | 477 | void DoCollection(Thread* self, bool collect_profiling_info) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 478 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 479 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 8d37250 | 2016-02-23 13:56:43 +0000 | [diff] [blame] | 480 | |
Nicolas Geoffray | 9abb297 | 2016-03-04 14:32:59 +0000 | [diff] [blame] | 481 | void RemoveUnmarkedCode(Thread* self) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 482 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 483 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 8d37250 | 2016-02-23 13:56:43 +0000 | [diff] [blame] | 484 | |
| 485 | void MarkCompiledCodeOnThreadStacks(Thread* self) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 486 | REQUIRES(!Locks::jit_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 487 | REQUIRES_SHARED(Locks::mutator_lock_); |
Nicolas Geoffray | 8d37250 | 2016-02-23 13:56:43 +0000 | [diff] [blame] | 488 | |
Nicolas Geoffray | b9f1af5 | 2018-11-16 10:30:29 +0000 | [diff] [blame] | 489 | CodeCacheBitmap* GetLiveBitmap() const { |
| 490 | return live_bitmap_.get(); |
| 491 | } |
| 492 | |
Nicolas Geoffray | ce9ed36 | 2018-11-29 03:19:28 +0000 | [diff] [blame] | 493 | bool IsInZygoteDataSpace(const void* ptr) const { |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 494 | return shared_region_.IsInDataSpace(ptr); |
Nicolas Geoffray | ce9ed36 | 2018-11-29 03:19:28 +0000 | [diff] [blame] | 495 | } |
| 496 | |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 497 | bool IsWeakAccessEnabled(Thread* self) const; |
| 498 | void WaitUntilInlineCacheAccessible(Thread* self) |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 499 | REQUIRES(!Locks::jit_lock_) |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 500 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 501 | |
Nicolas Geoffray | f8cc26e | 2020-06-10 15:37:37 +0100 | [diff] [blame^] | 502 | // Record that `method` is being compiled with the given mode. |
| 503 | // TODO: introduce an enum for the mode. |
| 504 | void AddMethodBeingCompiled(ArtMethod* method, bool osr, bool baseline) |
| 505 | REQUIRES(Locks::jit_lock_); |
| 506 | |
| 507 | // Remove `method` from the list of methods meing compiled with the given mode. |
| 508 | void RemoveMethodBeingCompiled(ArtMethod* method, bool osr, bool baseline) |
| 509 | REQUIRES(Locks::jit_lock_); |
| 510 | |
| 511 | // Return whether `method` is being compiled with the given mode. |
| 512 | bool IsMethodBeingCompiled(ArtMethod* method, bool osr, bool baseline) |
| 513 | REQUIRES(Locks::jit_lock_); |
| 514 | |
| 515 | // Return whether `method` is being compiled in any mode. |
| 516 | bool IsMethodBeingCompiled(ArtMethod* method) REQUIRES(Locks::jit_lock_); |
| 517 | |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 518 | class JniStubKey; |
| 519 | class JniStubData; |
| 520 | |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 521 | // Whether the GC allows accessing weaks in inline caches. Note that this |
| 522 | // is not used by the concurrent collector, which uses |
| 523 | // Thread::SetWeakRefAccessEnabled instead. |
| 524 | Atomic<bool> is_weak_access_enabled_; |
| 525 | |
| 526 | // Condition to wait on for accessing inline caches. |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 527 | ConditionVariable inline_cache_cond_ GUARDED_BY(Locks::jit_lock_); |
Nicolas Geoffray | e51ca8b | 2016-11-22 14:49:31 +0000 | [diff] [blame] | 528 | |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 529 | // -------------- JIT memory regions ------------------------------------- // |
| 530 | |
| 531 | // Shared region, inherited from the zygote. |
| 532 | JitMemoryRegion shared_region_; |
| 533 | |
| 534 | // Process's own region. |
| 535 | JitMemoryRegion private_region_; |
| 536 | |
| 537 | // -------------- Global JIT maps --------------------------------------- // |
| 538 | |
| 539 | // Holds compiled code associated with the shorty for a JNI stub. |
| 540 | SafeMap<JniStubKey, JniStubData> jni_stubs_map_ GUARDED_BY(Locks::jit_lock_); |
| 541 | |
| 542 | // Holds compiled code associated to the ArtMethod. |
| 543 | SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_); |
| 544 | |
Nicolas Geoffray | 3238440 | 2019-07-17 20:06:44 +0100 | [diff] [blame] | 545 | // Holds compiled code associated to the ArtMethod. Used when pre-jitting |
| 546 | // methods whose entrypoints have the resolution stub. |
| 547 | SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_); |
| 548 | |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 549 | // Holds osr compiled code associated to the ArtMethod. |
| 550 | SafeMap<ArtMethod*, const void*> osr_code_map_ GUARDED_BY(Locks::jit_lock_); |
| 551 | |
| 552 | // ProfilingInfo objects we have allocated. |
| 553 | std::vector<ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_); |
| 554 | |
Nicolas Geoffray | f8cc26e | 2020-06-10 15:37:37 +0100 | [diff] [blame^] | 555 | // Methods we are currently compiling, one set for each kind of compilation. |
| 556 | std::set<ArtMethod*> current_optimized_compilations_ GUARDED_BY(Locks::jit_lock_); |
| 557 | std::set<ArtMethod*> current_osr_compilations_ GUARDED_BY(Locks::jit_lock_); |
| 558 | std::set<ArtMethod*> current_baseline_compilations_ GUARDED_BY(Locks::jit_lock_); |
| 559 | |
Nicolas Geoffray | e32d24c | 2019-07-05 10:28:59 +0100 | [diff] [blame] | 560 | // Methods that the zygote has compiled and can be shared across processes |
| 561 | // forked from the zygote. |
| 562 | ZygoteMap zygote_map_; |
| 563 | |
Nicolas Geoffray | 2a905b2 | 2019-06-06 09:04:07 +0100 | [diff] [blame] | 564 | // -------------- JIT GC related data structures ----------------------- // |
| 565 | |
| 566 | // Condition to wait on during collection. |
| 567 | ConditionVariable lock_cond_ GUARDED_BY(Locks::jit_lock_); |
| 568 | |
| 569 | // Whether there is a code cache collection in progress. |
| 570 | bool collection_in_progress_ GUARDED_BY(Locks::jit_lock_); |
| 571 | |
| 572 | // Bitmap for collecting code and data. |
| 573 | std::unique_ptr<CodeCacheBitmap> live_bitmap_; |
| 574 | |
| 575 | // Whether the last collection round increased the code cache. |
| 576 | bool last_collection_increased_code_cache_ GUARDED_BY(Locks::jit_lock_); |
| 577 | |
| 578 | // Whether we can do garbage collection. Not 'const' as tests may override this. |
| 579 | bool garbage_collect_code_ GUARDED_BY(Locks::jit_lock_); |
| 580 | |
| 581 | // ---------------- JIT statistics -------------------------------------- // |
| 582 | |
| 583 | // Number of compilations done throughout the lifetime of the JIT. |
| 584 | size_t number_of_compilations_ GUARDED_BY(Locks::jit_lock_); |
| 585 | |
| 586 | // Number of compilations for on-stack-replacement done throughout the lifetime of the JIT. |
| 587 | size_t number_of_osr_compilations_ GUARDED_BY(Locks::jit_lock_); |
| 588 | |
| 589 | // Number of code cache collections done throughout the lifetime of the JIT. |
| 590 | size_t number_of_collections_ GUARDED_BY(Locks::jit_lock_); |
| 591 | |
| 592 | // Histograms for keeping track of stack map size statistics. |
| 593 | Histogram<uint64_t> histogram_stack_map_memory_use_ GUARDED_BY(Locks::jit_lock_); |
| 594 | |
| 595 | // Histograms for keeping track of code size statistics. |
| 596 | Histogram<uint64_t> histogram_code_memory_use_ GUARDED_BY(Locks::jit_lock_); |
| 597 | |
| 598 | // Histograms for keeping track of profiling info statistics. |
| 599 | Histogram<uint64_t> histogram_profiling_info_memory_use_ GUARDED_BY(Locks::jit_lock_); |
Nicolas Geoffray | c9de61c | 2018-11-27 17:34:31 +0000 | [diff] [blame] | 600 | |
Vladimir Marko | 2196c65 | 2017-11-30 16:16:07 +0000 | [diff] [blame] | 601 | friend class art::JitJniStubTestHelper; |
Calin Juravle | 016fcbe2 | 2018-05-03 19:47:35 -0700 | [diff] [blame] | 602 | friend class ScopedCodeCacheWrite; |
Nicolas Geoffray | ce9ed36 | 2018-11-29 03:19:28 +0000 | [diff] [blame] | 603 | friend class MarkCodeClosure; |
Calin Juravle | 016fcbe2 | 2018-05-03 19:47:35 -0700 | [diff] [blame] | 604 | |
Nicolas Geoffray | c9de61c | 2018-11-27 17:34:31 +0000 | [diff] [blame] | 605 | DISALLOW_COPY_AND_ASSIGN(JitCodeCache); |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 606 | }; |
| 607 | |
Mathieu Chartier | e5f13e5 | 2015-02-24 09:37:21 -0800 | [diff] [blame] | 608 | } // namespace jit |
| 609 | } // namespace art |
| 610 | |
| 611 | #endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_H_ |