blob: a2f451347a6d299134dc531545de133903d9ce2e [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "class_linker.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070018
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070019#include <unistd.h>
20
Alex Lighteb7c1442015-08-31 13:17:42 -070021#include <algorithm>
Brian Carlstromdbc05252011-09-09 01:59:59 -070022#include <deque>
Vladimir Markobf121912019-06-04 13:49:05 +010023#include <forward_list>
Ian Rogerscf7f1912014-10-22 22:06:39 -070024#include <iostream>
Vladimir Marko21300532017-01-24 18:06:55 +000025#include <map>
Ian Rogers700a4022014-05-19 16:49:03 -070026#include <memory>
Fred Shih381e4ca2014-08-25 17:24:27 -070027#include <queue>
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070028#include <string>
Andreas Gampe9f3928f2019-02-04 11:19:31 -080029#include <string_view>
Alex Lighteb7c1442015-08-31 13:17:42 -070030#include <tuple>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070031#include <utility>
Elliott Hughes90a33692011-08-30 13:27:07 -070032#include <vector>
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070033
Andreas Gampe46ee31b2016-12-14 10:11:49 -080034#include "android-base/stringprintf.h"
35
Mathieu Chartierc7853442015-03-27 14:35:38 -070036#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "art_method-inl.h"
Vladimir Markobf121912019-06-04 13:49:05 +010038#include "barrier.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070039#include "base/arena_allocator.h"
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +000040#include "base/arena_bit_vector.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080041#include "base/casts.h"
Andreas Gampe19f54162019-05-14 16:16:28 -070042#include "base/file_utils.h"
Vladimir Marko782fb712020-12-23 12:47:31 +000043#include "base/hash_map.h"
44#include "base/hash_set.h"
David Sehr67bf42e2018-02-26 16:43:04 -080045#include "base/leb128.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080046#include "base/logging.h"
Lokesh Gidra606bd942022-05-23 19:00:09 +000047#include "base/mem_map_arena_pool.h"
Eric Holk74584e62021-02-18 14:39:17 -080048#include "base/metrics/metrics.h"
Vladimir Markobf121912019-06-04 13:49:05 +010049#include "base/mutex-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080050#include "base/os.h"
51#include "base/quasi_atomic.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070052#include "base/scoped_arena_containers.h"
Narayan Kamathd1c606f2014-06-09 16:50:19 +010053#include "base/scoped_flock.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080054#include "base/stl_util.h"
Vladimir Markob9c29f62019-03-20 14:22:51 +000055#include "base/string_view_cpp20.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080056#include "base/systrace.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010057#include "base/time_utils.h"
Elliott Hughes76160052012-12-12 16:31:20 -080058#include "base/unix_file/fd_file.h"
David Sehrc431b9d2018-03-02 12:01:51 -080059#include "base/utils.h"
Andreas Gampeb9aec2c2015-04-23 22:23:47 -070060#include "base/value_object.h"
Mingyao Yang063fc772016-08-02 11:02:54 -070061#include "cha.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080062#include "class_linker-inl.h"
Calin Juravle57d0acc2017-07-11 17:41:30 -070063#include "class_loader_utils.h"
Vladimir Marko5868ada2020-05-12 11:50:34 +010064#include "class_root-inl.h"
Mathieu Chartiere4275c02015-08-06 15:34:15 -070065#include "class_table-inl.h"
Vladimir Marko2b5eaa22013-12-13 13:59:30 +000066#include "compiler_callbacks.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010067#include "debug_print.h"
Elliott Hughes4740cdf2011-12-07 14:07:12 -080068#include "debugger.h"
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -070069#include "dex/class_accessor-inl.h"
David Sehrb2ec9f52018-02-21 13:20:31 -080070#include "dex/descriptors_names.h"
David Sehr9e734c72018-01-04 17:56:19 -080071#include "dex/dex_file-inl.h"
72#include "dex/dex_file_exception_helpers.h"
73#include "dex/dex_file_loader.h"
Andreas Gampead1aa632019-01-02 10:30:54 -080074#include "dex/signature-inl.h"
David Sehr0225f8e2018-01-31 08:52:24 +000075#include "dex/utf.h"
Vladimir Marko5115a4d2019-10-17 14:56:47 +010076#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070077#include "entrypoints/runtime_asm_entrypoints.h"
Alex Light705ad492015-09-21 11:36:30 -070078#include "experimental_flags.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070079#include "gc/accounting/card_table-inl.h"
Mathieu Chartier03c1dd92016-03-07 16:13:54 -080080#include "gc/accounting/heap_bitmap-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +000081#include "gc/accounting/space_bitmap-inl.h"
Andreas Gampe1c158a02017-07-13 17:26:19 -070082#include "gc/heap-visit-objects-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070083#include "gc/heap.h"
Mathieu Chartier1b1e31f2016-05-19 10:13:04 -070084#include "gc/scoped_gc_critical_section.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070085#include "gc/space/image_space.h"
Vladimir Marko8d6768d2017-03-14 10:13:21 +000086#include "gc/space/space-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -070087#include "gc_root-inl.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -070088#include "handle_scope-inl.h"
Andreas Gampeaa120012018-03-28 16:23:24 -070089#include "hidden_api.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080090#include "image-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070091#include "imt_conflict_table.h"
92#include "imtable-inl.h"
Mathieu Chartier74ccee62018-10-10 10:30:29 -070093#include "intern_table-inl.h"
Ian Rogers64b6d142012-10-29 16:34:15 -070094#include "interpreter/interpreter.h"
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010095#include "interpreter/mterp/nterp.h"
David Srbeckyfb3de3d2018-01-29 16:11:49 +000096#include "jit/debugger_interface.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080097#include "jit/jit.h"
98#include "jit/jit_code_cache.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010099#include "jni/java_vm_ext.h"
100#include "jni/jni_internal.h"
Lokesh Gidra606bd942022-05-23 19:00:09 +0000101#include "linear_alloc-inl.h"
Andreas Gampe8e0f0432018-10-24 13:38:03 -0700102#include "mirror/array-alloc-inl.h"
103#include "mirror/array-inl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000104#include "mirror/call_site.h"
Andreas Gampe70f5fd02018-10-24 19:58:37 -0700105#include "mirror/class-alloc-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800106#include "mirror/class-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700107#include "mirror/class.h"
Alex Lightd6251582016-10-31 11:12:30 -0700108#include "mirror/class_ext.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800109#include "mirror/class_loader.h"
Ian Rogers39ebcb82013-05-30 16:57:23 -0700110#include "mirror/dex_cache-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700111#include "mirror/dex_cache.h"
Narayan Kamath000e1882016-10-24 17:14:25 +0100112#include "mirror/emulated_stack_frame.h"
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700113#include "mirror/field.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800114#include "mirror/iftable-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700115#include "mirror/method.h"
Narayan Kamathafa48272016-08-03 12:46:58 +0100116#include "mirror/method_handle_impl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +0000117#include "mirror/method_handles_lookup.h"
Steven Morelande431e272017-07-18 16:53:49 -0700118#include "mirror/method_type.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800119#include "mirror/object-inl.h"
Chang Xingba17dbd2017-06-28 21:27:56 +0000120#include "mirror/object-refvisitor-inl.h"
Alex Lighta9bbc082019-11-14 14:51:41 -0800121#include "mirror/object.h"
Andreas Gampe52ecb652018-10-24 15:18:21 -0700122#include "mirror/object_array-alloc-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700123#include "mirror/object_array-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000124#include "mirror/object_array.h"
Chris Wailes0c61be42018-09-26 17:27:34 -0700125#include "mirror/object_reference.h"
126#include "mirror/object_reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800127#include "mirror/proxy.h"
Fred Shih4ee7a662014-07-11 09:59:27 -0700128#include "mirror/reference-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800129#include "mirror/stack_trace_element.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700130#include "mirror/string-inl.h"
Andreas Gampe501c3b02019-04-17 21:54:27 +0000131#include "mirror/throwable.h"
Orion Hodson005ac512017-10-24 15:43:43 +0100132#include "mirror/var_handle.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700133#include "native/dalvik_system_DexFile.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -0700134#include "nativehelper/scoped_local_ref.h"
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000135#include "nterp_helpers.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700136#include "oat.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700137#include "oat_file-inl.h"
Steven Morelande431e272017-07-18 16:53:49 -0700138#include "oat_file.h"
Mathieu Chartiere58991b2015-10-13 07:59:34 -0700139#include "oat_file_assistant.h"
140#include "oat_file_manager.h"
141#include "object_lock.h"
David Sehr82d046e2018-04-23 08:14:19 -0700142#include "profile/profile_compilation_info.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -0700143#include "runtime.h"
Andreas Gampeac30fa22017-01-18 21:02:36 -0800144#include "runtime_callbacks.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -0700145#include "scoped_thread_state_change-inl.h"
Ian Rogers7b078e82014-09-10 14:44:24 -0700146#include "thread-inl.h"
Alex Light133987d2020-03-26 19:22:12 +0000147#include "thread.h"
Mingyao Yang063fc772016-08-02 11:02:54 -0700148#include "thread_list.h"
Mathieu Chartier7778b882015-10-05 16:41:10 -0700149#include "trace.h"
Vladimir Markob68bb7a2020-03-17 10:55:25 +0000150#include "transaction.h"
Nicolas Geoffray6df45112021-02-07 21:51:58 +0000151#include "vdex_file.h"
Andreas Gampea43ba3d2019-03-13 15:49:20 -0700152#include "verifier/class_verifier.h"
Nicolas Geoffray80789962021-04-30 16:50:39 +0100153#include "verifier/verifier_deps.h"
Elliott Hugheseac76672012-05-24 21:56:51 -0700154#include "well_known_classes.h"
Carl Shapiro0e5d75d2011-07-06 18:28:37 -0700155
156namespace art {
157
Andreas Gampe46ee31b2016-12-14 10:11:49 -0800158using android::base::StringPrintf;
159
Orion Hodson5880c772020-07-28 20:12:08 +0100160static constexpr bool kCheckImageObjects = kIsDebugBuild;
Mathieu Chartier8790c7f2016-03-31 15:05:45 -0700161static constexpr bool kVerifyArtMethodDeclaringClasses = kIsDebugBuild;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700162
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700163static void ThrowNoClassDefFoundError(const char* fmt, ...)
164 __attribute__((__format__(__printf__, 1, 2)))
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700165 REQUIRES_SHARED(Locks::mutator_lock_);
Elliott Hughes0512f022012-03-15 22:10:52 -0700166static void ThrowNoClassDefFoundError(const char* fmt, ...) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700167 va_list args;
168 va_start(args, fmt);
Ian Rogers62d6c772013-02-27 08:32:07 -0800169 Thread* self = Thread::Current();
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000170 self->ThrowNewExceptionV("Ljava/lang/NoClassDefFoundError;", fmt, args);
Ian Rogerscab01012012-01-10 17:35:46 -0800171 va_end(args);
172}
173
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100174static ObjPtr<mirror::Object> GetErroneousStateError(ObjPtr<mirror::Class> c)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700175 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightd6251582016-10-31 11:12:30 -0700176 ObjPtr<mirror::ClassExt> ext(c->GetExtData());
177 if (ext == nullptr) {
178 return nullptr;
179 } else {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100180 return ext->GetErroneousStateError();
Alex Lightd6251582016-10-31 11:12:30 -0700181 }
182}
183
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100184static bool IsVerifyError(ObjPtr<mirror::Object> obj)
Alex Lightd6251582016-10-31 11:12:30 -0700185 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100186 // This is slow, but we only use it for rethrowing an error, and for DCHECK.
187 return obj->GetClass()->DescriptorEquals("Ljava/lang/VerifyError;");
188}
189
190// Helper for ThrowEarlierClassFailure. Throws the stored error.
191static void HandleEarlierErroneousStateError(Thread* self,
192 ClassLinker* class_linker,
193 ObjPtr<mirror::Class> c)
194 REQUIRES_SHARED(Locks::mutator_lock_) {
195 ObjPtr<mirror::Object> obj = GetErroneousStateError(c);
Andreas Gampe99babb62015-11-02 16:20:00 -0800196 DCHECK(obj != nullptr);
197 self->AssertNoPendingException();
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100198 DCHECK(!obj->IsClass());
199 ObjPtr<mirror::Class> throwable_class = GetClassRoot<mirror::Throwable>(class_linker);
200 ObjPtr<mirror::Class> error_class = obj->GetClass();
201 CHECK(throwable_class->IsAssignableFrom(error_class));
202 self->SetException(obj->AsThrowable());
Andreas Gampe99babb62015-11-02 16:20:00 -0800203 self->AssertPendingException();
204}
205
Nicolas Geoffray66934ef2021-07-07 14:56:23 +0100206static void UpdateClassAfterVerification(Handle<mirror::Class> klass,
207 PointerSize pointer_size,
208 verifier::FailureKind failure_kind)
Andreas Gampe5b20b352018-10-11 19:03:20 -0700209 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000210 Runtime* runtime = Runtime::Current();
211 ClassLinker* class_linker = runtime->GetClassLinker();
Nicolas Geoffray2990b882021-11-30 15:48:49 +0000212 if (klass->IsVerified() && (failure_kind == verifier::FailureKind::kNoFailure)) {
Andreas Gampe5b20b352018-10-11 19:03:20 -0700213 klass->SetSkipAccessChecksFlagOnAllMethods(pointer_size);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +0100214 }
215
216 // Now that the class has passed verification, try to set nterp entrypoints
217 // to methods that currently use the switch interpreter.
218 if (interpreter::CanRuntimeUseNterp()) {
219 for (ArtMethod& m : klass->GetMethods(pointer_size)) {
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +0000220 if (class_linker->IsQuickToInterpreterBridge(m.GetEntryPointFromQuickCompiledCode())) {
221 runtime->GetInstrumentation()->InitializeMethodsCode(&m, /*aot_code=*/nullptr);
222 }
Nicolas Geoffray00391822019-12-10 10:17:23 +0000223 }
Andreas Gampe5b20b352018-10-11 19:03:20 -0700224 }
225}
226
Vladimir Markobf121912019-06-04 13:49:05 +0100227// Callback responsible for making a batch of classes visibly initialized
228// after all threads have called it from a checkpoint, ensuring visibility.
229class ClassLinker::VisiblyInitializedCallback final
230 : public Closure, public IntrusiveForwardListNode<VisiblyInitializedCallback> {
231 public:
232 explicit VisiblyInitializedCallback(ClassLinker* class_linker)
233 : class_linker_(class_linker),
234 num_classes_(0u),
235 thread_visibility_counter_(0),
236 barriers_() {
237 std::fill_n(classes_, kMaxClasses, nullptr);
238 }
239
240 bool IsEmpty() const {
241 DCHECK_LE(num_classes_, kMaxClasses);
242 return num_classes_ == 0u;
243 }
244
245 bool IsFull() const {
246 DCHECK_LE(num_classes_, kMaxClasses);
247 return num_classes_ == kMaxClasses;
248 }
249
250 void AddClass(Thread* self, ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
251 DCHECK_EQ(klass->GetStatus(), ClassStatus::kInitialized);
252 DCHECK(!IsFull());
253 classes_[num_classes_] = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, klass);
254 ++num_classes_;
255 }
256
257 void AddBarrier(Barrier* barrier) {
258 barriers_.push_front(barrier);
259 }
260
261 std::forward_list<Barrier*> GetAndClearBarriers() {
262 std::forward_list<Barrier*> result;
263 result.swap(barriers_);
264 result.reverse(); // Return barriers in insertion order.
265 return result;
266 }
267
268 void MakeVisible(Thread* self) {
269 DCHECK_EQ(thread_visibility_counter_.load(std::memory_order_relaxed), 0);
270 size_t count = Runtime::Current()->GetThreadList()->RunCheckpoint(this);
271 AdjustThreadVisibilityCounter(self, count);
272 }
273
274 void Run(Thread* self) override {
Vladimir Markobf121912019-06-04 13:49:05 +0100275 AdjustThreadVisibilityCounter(self, -1);
276 }
277
278 private:
279 void AdjustThreadVisibilityCounter(Thread* self, ssize_t adjustment) {
280 ssize_t old = thread_visibility_counter_.fetch_add(adjustment, std::memory_order_relaxed);
281 if (old + adjustment == 0) {
282 // All threads passed the checkpoint. Mark classes as visibly initialized.
283 {
284 ScopedObjectAccess soa(self);
285 StackHandleScope<1u> hs(self);
286 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
287 JavaVMExt* vm = self->GetJniEnv()->GetVm();
288 for (size_t i = 0, num = num_classes_; i != num; ++i) {
289 klass.Assign(ObjPtr<mirror::Class>::DownCast(self->DecodeJObject(classes_[i])));
290 vm->DeleteWeakGlobalRef(self, classes_[i]);
291 if (klass != nullptr) {
Vladimir Markobf121912019-06-04 13:49:05 +0100292 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100293 class_linker_->FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100294 }
295 }
296 num_classes_ = 0u;
297 }
298 class_linker_->VisiblyInitializedCallbackDone(self, this);
299 }
300 }
301
Vladimir Marko04740952023-01-04 14:06:38 +0000302 // Making classes initialized in bigger batches helps with app startup for
303 // apps that initialize a lot of classes by running fewer checkpoints.
304 // (On the other hand, bigger batches make class initialization checks more
305 // likely to take a slow path but that is mitigated by making partially
306 // filled buffers visibly initialized if we take the slow path many times.
307 // See `Thread::kMakeVisiblyInitializedCounterTriggerCount`.)
308 static constexpr size_t kMaxClasses = 48;
Vladimir Markobf121912019-06-04 13:49:05 +0100309
310 ClassLinker* const class_linker_;
311 size_t num_classes_;
312 jweak classes_[kMaxClasses];
313
314 // The thread visibility counter starts at 0 and it is incremented by the number of
315 // threads that need to run this callback (by the thread that request the callback
316 // to be run) and decremented once for each `Run()` execution. When it reaches 0,
317 // whether after the increment or after a decrement, we know that `Run()` was executed
318 // for all threads and therefore we can mark the classes as visibly initialized.
319 std::atomic<ssize_t> thread_visibility_counter_;
320
321 // List of barries to `Pass()` for threads that wait for the callback to complete.
322 std::forward_list<Barrier*> barriers_;
323};
324
Hans Boehm0f82bae2022-10-17 17:21:45 -0700325void ClassLinker::MakeInitializedClassesVisiblyInitialized(Thread* self, bool wait) {
Vladimir Markobf121912019-06-04 13:49:05 +0100326 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
327 return; // Nothing to do. Thanks to the x86 memory model classes skip the initialized status.
328 }
329 std::optional<Barrier> maybe_barrier; // Avoid constructing the Barrier for `wait == false`.
330 if (wait) {
Hans Boehm0f82bae2022-10-17 17:21:45 -0700331 Locks::mutator_lock_->AssertNotHeld(self);
Vladimir Markobf121912019-06-04 13:49:05 +0100332 maybe_barrier.emplace(0);
333 }
334 int wait_count = 0;
335 VisiblyInitializedCallback* callback = nullptr;
336 {
337 MutexLock lock(self, visibly_initialized_callback_lock_);
338 if (visibly_initialized_callback_ != nullptr && !visibly_initialized_callback_->IsEmpty()) {
339 callback = visibly_initialized_callback_.release();
340 running_visibly_initialized_callbacks_.push_front(*callback);
341 }
342 if (wait) {
343 DCHECK(maybe_barrier.has_value());
344 Barrier* barrier = std::addressof(*maybe_barrier);
345 for (VisiblyInitializedCallback& cb : running_visibly_initialized_callbacks_) {
346 cb.AddBarrier(barrier);
347 ++wait_count;
348 }
349 }
350 }
351 if (callback != nullptr) {
352 callback->MakeVisible(self);
353 }
354 if (wait_count != 0) {
355 DCHECK(maybe_barrier.has_value());
356 maybe_barrier->Increment(self, wait_count);
357 }
358}
359
360void ClassLinker::VisiblyInitializedCallbackDone(Thread* self,
361 VisiblyInitializedCallback* callback) {
362 MutexLock lock(self, visibly_initialized_callback_lock_);
363 // Pass the barriers if requested.
364 for (Barrier* barrier : callback->GetAndClearBarriers()) {
365 barrier->Pass(self);
366 }
367 // Remove the callback from the list of running callbacks.
368 auto before = running_visibly_initialized_callbacks_.before_begin();
369 auto it = running_visibly_initialized_callbacks_.begin();
370 DCHECK(it != running_visibly_initialized_callbacks_.end());
371 while (std::addressof(*it) != callback) {
372 before = it;
373 ++it;
374 DCHECK(it != running_visibly_initialized_callbacks_.end());
375 }
376 running_visibly_initialized_callbacks_.erase_after(before);
377 // Reuse or destroy the callback object.
378 if (visibly_initialized_callback_ == nullptr) {
379 visibly_initialized_callback_.reset(callback);
380 } else {
381 delete callback;
382 }
383}
384
Alex Lightfb119572019-09-18 15:04:53 -0700385void ClassLinker::ForceClassInitialized(Thread* self, Handle<mirror::Class> klass) {
386 ClassLinker::VisiblyInitializedCallback* cb = MarkClassInitialized(self, klass);
387 if (cb != nullptr) {
388 cb->MakeVisible(self);
389 }
390 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
391 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/true);
392}
393
Vladimir Markobf121912019-06-04 13:49:05 +0100394ClassLinker::VisiblyInitializedCallback* ClassLinker::MarkClassInitialized(
395 Thread* self, Handle<mirror::Class> klass) {
396 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
397 // Thanks to the x86 memory model, we do not need any memory fences and
398 // we can immediately mark the class as visibly initialized.
399 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100400 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100401 return nullptr;
402 }
403 if (Runtime::Current()->IsActiveTransaction()) {
404 // Transactions are single-threaded, so we can mark the class as visibly intialized.
405 // (Otherwise we'd need to track the callback's entry in the transaction for rollback.)
406 mirror::Class::SetStatus(klass, ClassStatus::kVisiblyInitialized, self);
Vladimir Marko86c87522020-05-11 16:55:55 +0100407 FixupStaticTrampolines(self, klass.Get());
Vladimir Markobf121912019-06-04 13:49:05 +0100408 return nullptr;
409 }
410 mirror::Class::SetStatus(klass, ClassStatus::kInitialized, self);
411 MutexLock lock(self, visibly_initialized_callback_lock_);
412 if (visibly_initialized_callback_ == nullptr) {
413 visibly_initialized_callback_.reset(new VisiblyInitializedCallback(this));
414 }
415 DCHECK(!visibly_initialized_callback_->IsFull());
416 visibly_initialized_callback_->AddClass(self, klass.Get());
417
418 if (visibly_initialized_callback_->IsFull()) {
419 VisiblyInitializedCallback* callback = visibly_initialized_callback_.release();
420 running_visibly_initialized_callbacks_.push_front(*callback);
421 return callback;
422 } else {
423 return nullptr;
424 }
425}
426
Vladimir Marko86c87522020-05-11 16:55:55 +0100427const void* ClassLinker::RegisterNative(
428 Thread* self, ArtMethod* method, const void* native_method) {
429 CHECK(method->IsNative()) << method->PrettyMethod();
430 CHECK(native_method != nullptr) << method->PrettyMethod();
431 void* new_native_method = nullptr;
432 Runtime* runtime = Runtime::Current();
433 runtime->GetRuntimeCallbacks()->RegisterNativeMethod(method,
434 native_method,
435 /*out*/&new_native_method);
436 if (method->IsCriticalNative()) {
437 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
438 // Remove old registered method if any.
439 auto it = critical_native_code_with_clinit_check_.find(method);
440 if (it != critical_native_code_with_clinit_check_.end()) {
441 critical_native_code_with_clinit_check_.erase(it);
442 }
443 // To ensure correct memory visibility, we need the class to be visibly
444 // initialized before we can set the JNI entrypoint.
445 if (method->GetDeclaringClass()->IsVisiblyInitialized()) {
446 method->SetEntryPointFromJni(new_native_method);
447 } else {
448 critical_native_code_with_clinit_check_.emplace(method, new_native_method);
449 }
450 } else {
451 method->SetEntryPointFromJni(new_native_method);
452 }
453 return new_native_method;
454}
455
456void ClassLinker::UnregisterNative(Thread* self, ArtMethod* method) {
457 CHECK(method->IsNative()) << method->PrettyMethod();
458 // Restore stub to lookup native pointer via dlsym.
459 if (method->IsCriticalNative()) {
460 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
461 auto it = critical_native_code_with_clinit_check_.find(method);
462 if (it != critical_native_code_with_clinit_check_.end()) {
463 critical_native_code_with_clinit_check_.erase(it);
464 }
465 method->SetEntryPointFromJni(GetJniDlsymLookupCriticalStub());
466 } else {
467 method->SetEntryPointFromJni(GetJniDlsymLookupStub());
468 }
469}
470
471const void* ClassLinker::GetRegisteredNative(Thread* self, ArtMethod* method) {
472 if (method->IsCriticalNative()) {
473 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
474 auto it = critical_native_code_with_clinit_check_.find(method);
475 if (it != critical_native_code_with_clinit_check_.end()) {
476 return it->second;
477 }
478 const void* native_code = method->GetEntryPointFromJni();
479 return IsJniDlsymLookupCriticalStub(native_code) ? nullptr : native_code;
480 } else {
481 const void* native_code = method->GetEntryPointFromJni();
482 return IsJniDlsymLookupStub(native_code) ? nullptr : native_code;
483 }
484}
485
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800486void ClassLinker::ThrowEarlierClassFailure(ObjPtr<mirror::Class> c,
487 bool wrap_in_no_class_def,
488 bool log) {
Elliott Hughes5c599942012-06-13 16:45:05 -0700489 // The class failed to initialize on a previous attempt, so we want to throw
490 // a NoClassDefFoundError (v2 2.17.5). The exception to this rule is if we
491 // failed in verification, in which case v2 5.4.1 says we need to re-throw
492 // the previous error.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800493 Runtime* const runtime = Runtime::Current();
494 if (!runtime->IsAotCompiler()) { // Give info if this occurs at runtime.
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700495 std::string extra;
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100496 ObjPtr<mirror::Object> verify_error = GetErroneousStateError(c);
Vladimir Markobb206de2019-03-28 10:30:32 +0000497 if (verify_error != nullptr) {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100498 DCHECK(!verify_error->IsClass());
499 extra = verify_error->AsThrowable()->Dump();
Andreas Gampe3d6b4702015-09-21 08:35:52 -0700500 }
Andreas Gampe7b3063b2019-01-07 14:12:52 -0800501 if (log) {
502 LOG(INFO) << "Rejecting re-init on previously-failed class " << c->PrettyClass()
503 << ": " << extra;
504 }
Ian Rogers87e552d2012-08-31 15:54:48 -0700505 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700506
David Sehr709b0702016-10-13 09:12:37 -0700507 CHECK(c->IsErroneous()) << c->PrettyClass() << " " << c->GetStatus();
Ian Rogers62d6c772013-02-27 08:32:07 -0800508 Thread* self = Thread::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800509 if (runtime->IsAotCompiler()) {
Ian Rogers7b078e82014-09-10 14:44:24 -0700510 // At compile time, accurate errors and NCDFE are disabled to speed compilation.
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700511 ObjPtr<mirror::Throwable> pre_allocated = runtime->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000512 self->SetException(pre_allocated);
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700513 } else {
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100514 ObjPtr<mirror::Object> erroneous_state_error = GetErroneousStateError(c);
515 if (erroneous_state_error != nullptr) {
Andreas Gampecb086952015-11-02 16:20:00 -0800516 // Rethrow stored error.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100517 HandleEarlierErroneousStateError(self, this, c);
Andreas Gampecb086952015-11-02 16:20:00 -0800518 }
Alex Lightd6251582016-10-31 11:12:30 -0700519 // TODO This might be wrong if we hit an OOME while allocating the ClassExt. In that case we
520 // might have meant to go down the earlier if statement with the original error but it got
521 // swallowed by the OOM so we end up here.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +0100522 if (erroneous_state_error == nullptr ||
523 (wrap_in_no_class_def && !IsVerifyError(erroneous_state_error))) {
Andreas Gampecb086952015-11-02 16:20:00 -0800524 // If there isn't a recorded earlier error, or this is a repeat throw from initialization,
525 // the top-level exception must be a NoClassDefFoundError. The potentially already pending
526 // exception will be a cause.
527 self->ThrowNewWrappedException("Ljava/lang/NoClassDefFoundError;",
David Sehr709b0702016-10-13 09:12:37 -0700528 c->PrettyDescriptor().c_str());
Ian Rogers7b078e82014-09-10 14:44:24 -0700529 }
Elliott Hughes4a2b4172011-09-20 17:08:25 -0700530 }
531}
532
Brian Carlstromb23eab12014-10-08 17:55:21 -0700533static void VlogClassInitializationFailure(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700534 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromb23eab12014-10-08 17:55:21 -0700535 if (VLOG_IS_ON(class_linker)) {
536 std::string temp;
537 LOG(INFO) << "Failed to initialize class " << klass->GetDescriptor(&temp) << " from "
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000538 << klass->GetLocation() << "\n" << Thread::Current()->GetException()->Dump();
Brian Carlstromb23eab12014-10-08 17:55:21 -0700539 }
540}
541
542static void WrapExceptionInInitializer(Handle<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700543 REQUIRES_SHARED(Locks::mutator_lock_) {
Elliott Hughesa4f94742012-05-29 16:28:38 -0700544 Thread* self = Thread::Current();
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700545
Vladimir Marko7b97aeb2022-12-01 16:17:47 +0000546 ObjPtr<mirror::Throwable> cause = self->GetException();
547 CHECK(cause != nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700548
David Srbecky346fd962020-07-27 16:51:00 +0100549 // Boot classpath classes should not fail initialization. This is a consistency debug check.
550 // This cannot in general be guaranteed, but in all likelihood leads to breakage down the line.
Andreas Gampe1e8a3952016-11-30 10:13:19 -0800551 if (klass->GetClassLoader() == nullptr && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampe22f71d22016-11-21 10:10:08 -0800552 std::string tmp;
Alex Light5047d9f2018-03-09 15:44:31 -0800553 // We want to LOG(FATAL) on debug builds since this really shouldn't be happening but we need to
554 // make sure to only do it if we don't have AsyncExceptions being thrown around since those
555 // could have caused the error.
556 bool known_impossible = kIsDebugBuild && !Runtime::Current()->AreAsyncExceptionsThrown();
557 LOG(known_impossible ? FATAL : WARNING) << klass->GetDescriptor(&tmp)
558 << " failed initialization: "
559 << self->GetException()->Dump();
Andreas Gampe22f71d22016-11-21 10:10:08 -0800560 }
561
Elliott Hughesa4f94742012-05-29 16:28:38 -0700562 // We only wrap non-Error exceptions; an Error can just be used as-is.
Vladimir Marko7b97aeb2022-12-01 16:17:47 +0000563 if (!cause->IsError()) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000564 self->ThrowNewWrappedException("Ljava/lang/ExceptionInInitializerError;", nullptr);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700565 }
Brian Carlstromb23eab12014-10-08 17:55:21 -0700566 VlogClassInitializationFailure(klass);
Elliott Hughes4d0207c2011-10-03 19:14:34 -0700567}
568
Andreas Gampe87658f32019-04-18 18:39:02 +0000569ClassLinker::ClassLinker(InternTable* intern_table, bool fast_class_not_found_exceptions)
Andreas Gampe2af99022017-04-25 08:32:59 -0700570 : boot_class_table_(new ClassTable()),
571 failed_dex_cache_class_lookups_(0),
Ian Rogers98379392014-02-24 16:53:16 -0800572 class_roots_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800573 find_array_class_cache_next_victim_(0),
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700574 init_done_(false),
Vladimir Marko1998cd02017-01-13 13:02:58 +0000575 log_new_roots_(false),
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700576 intern_table_(intern_table),
Andreas Gampe87658f32019-04-18 18:39:02 +0000577 fast_class_not_found_exceptions_(fast_class_not_found_exceptions),
Vladimir Marko7dac8642019-11-06 17:09:30 +0000578 jni_dlsym_lookup_trampoline_(nullptr),
Vladimir Markofa458ac2020-02-12 14:08:07 +0000579 jni_dlsym_lookup_critical_trampoline_(nullptr),
Ian Rogers98379392014-02-24 16:53:16 -0800580 quick_resolution_trampoline_(nullptr),
Andreas Gampe2da88232014-02-27 12:26:20 -0800581 quick_imt_conflict_trampoline_(nullptr),
Vladimir Marko8a630572014-04-09 18:45:35 +0100582 quick_generic_jni_trampoline_(nullptr),
Mathieu Chartier2d721012014-11-10 11:08:06 -0800583 quick_to_interpreter_bridge_trampoline_(nullptr),
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000584 nterp_trampoline_(nullptr),
Andreas Gampec1ac9ee2017-07-24 22:35:49 -0700585 image_pointer_size_(kRuntimePointerSize),
Vladimir Markobf121912019-06-04 13:49:05 +0100586 visibly_initialized_callback_lock_("visibly initialized callback lock"),
587 visibly_initialized_callback_(nullptr),
Vladimir Marko86c87522020-05-11 16:55:55 +0100588 critical_native_code_with_clinit_check_lock_("critical native code with clinit check lock"),
589 critical_native_code_with_clinit_check_(),
Andreas Gampe7dface32017-07-25 21:32:59 -0700590 cha_(Runtime::Current()->IsAotCompiler() ? nullptr : new ClassHierarchyAnalysis()) {
591 // For CHA disabled during Aot, see b/34193647.
592
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700593 CHECK(intern_table_ != nullptr);
Andreas Gampe8ac75952015-06-02 21:01:45 -0700594 static_assert(kFindArrayCacheSize == arraysize(find_array_class_cache_),
595 "Array cache size wrong.");
596 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -0700597}
Brian Carlstroma663ea52011-08-19 23:33:41 -0700598
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800599void ClassLinker::CheckSystemClass(Thread* self, Handle<mirror::Class> c1, const char* descriptor) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700600 ObjPtr<mirror::Class> c2 = FindSystemClass(self, descriptor);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800601 if (c2 == nullptr) {
602 LOG(FATAL) << "Could not find class " << descriptor;
603 UNREACHABLE();
604 }
605 if (c1.Get() != c2) {
606 std::ostringstream os1, os2;
607 c1->DumpClass(os1, mirror::Class::kDumpClassFullDetail);
608 c2->DumpClass(os2, mirror::Class::kDumpClassFullDetail);
609 LOG(FATAL) << "InitWithoutImage: Class mismatch for " << descriptor
610 << ". This is most likely the result of a broken build. Make sure that "
611 << "libcore and art projects match.\n\n"
612 << os1.str() << "\n\n" << os2.str();
613 UNREACHABLE();
614 }
615}
616
Vladimir Marko78f62d82022-01-10 16:25:19 +0000617ObjPtr<mirror::IfTable> AllocIfTable(Thread* self,
618 size_t ifcount,
619 ObjPtr<mirror::Class> iftable_class)
620 REQUIRES_SHARED(Locks::mutator_lock_) {
621 DCHECK(iftable_class->IsArrayClass());
622 DCHECK(iftable_class->GetComponentType()->IsObjectClass());
623 return ObjPtr<mirror::IfTable>::DownCast(ObjPtr<mirror::ObjectArray<mirror::Object>>(
624 mirror::IfTable::Alloc(self, iftable_class, ifcount * mirror::IfTable::kMax)));
625}
626
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800627bool ClassLinker::InitWithoutImage(std::vector<std::unique_ptr<const DexFile>> boot_class_path,
628 std::string* error_msg) {
Brian Carlstroma004aa92012-02-08 18:05:09 -0800629 VLOG(startup) << "ClassLinker::Init";
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -0700630
Mathieu Chartiere401d142015-04-22 13:56:20 -0700631 Thread* const self = Thread::Current();
632 Runtime* const runtime = Runtime::Current();
633 gc::Heap* const heap = runtime->GetHeap();
634
Jeff Haodcdc85b2015-12-04 14:06:18 -0800635 CHECK(!heap->HasBootImageSpace()) << "Runtime has image. We should use it.";
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700636 CHECK(!init_done_);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -0700637
Mathieu Chartiere401d142015-04-22 13:56:20 -0700638 // Use the pointer size from the runtime since we are probably creating the image.
639 image_pointer_size_ = InstructionSetPointerSize(runtime->GetInstructionSet());
640
Elliott Hughes30646832011-10-13 16:59:46 -0700641 // java_lang_Class comes first, it's needed for AllocClass
Mathieu Chartier590fee92013-09-13 13:46:47 -0700642 // The GC can't handle an object with a null class since we can't get the size of this object.
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800643 heap->IncrementDisableMovingGC(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700644 StackHandleScope<64> hs(self); // 64 is picked arbitrarily.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700645 auto class_class_size = mirror::Class::ClassClassSize(image_pointer_size_);
Mathieu Chartierd7a7f2f2018-09-07 11:57:18 -0700646 // Allocate the object as non-movable so that there are no cases where Object::IsClass returns
647 // the incorrect result when comparing to-space vs from-space.
Vladimir Markod7e9bbf2019-03-28 13:18:57 +0000648 Handle<mirror::Class> java_lang_Class(hs.NewHandle(ObjPtr<mirror::Class>::DownCast(
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100649 heap->AllocNonMovableObject(self, nullptr, class_class_size, VoidFunctor()))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800650 CHECK(java_lang_Class != nullptr);
Vladimir Marko317892b2018-05-31 11:11:32 +0100651 java_lang_Class->SetClassFlags(mirror::kClassFlagClass);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700652 java_lang_Class->SetClass(java_lang_Class.Get());
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -0700653 if (kUseBakerReadBarrier) {
654 java_lang_Class->AssertReadBarrierState();
Hiroshi Yamauchi9d04a202014-01-31 13:35:49 -0800655 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700656 java_lang_Class->SetClassSize(class_class_size);
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700657 java_lang_Class->SetPrimitiveType(Primitive::kPrimNot);
Mathieu Chartier1d27b342014-01-28 12:51:09 -0800658 heap->DecrementDisableMovingGC(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700659 // AllocClass(ObjPtr<mirror::Class>) can now be used
Brian Carlstroma0808032011-07-18 00:39:23 -0700660
Elliott Hughes418d20f2011-09-22 14:00:39 -0700661 // Class[] is used for reflection support.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700662 auto class_array_class_size = mirror::ObjectArray<mirror::Class>::ClassSize(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700663 Handle<mirror::Class> class_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700664 AllocClass(self, java_lang_Class.Get(), class_array_class_size)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700665 class_array_class->SetComponentType(java_lang_Class.Get());
Elliott Hughes418d20f2011-09-22 14:00:39 -0700666
Ian Rogers23435d02012-09-24 11:23:12 -0700667 // java_lang_Object comes next so that object_array_class can be created.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700668 Handle<mirror::Class> java_lang_Object(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700669 AllocClass(self, java_lang_Class.Get(), mirror::Object::ClassSize(image_pointer_size_))));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800670 CHECK(java_lang_Object != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700671 // backfill Object as the super class of Class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700672 java_lang_Class->SetSuperClass(java_lang_Object.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000673 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kLoaded, self);
Brian Carlstroma0808032011-07-18 00:39:23 -0700674
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700675 java_lang_Object->SetObjectSize(sizeof(mirror::Object));
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -0800676 // Allocate in non-movable so that it's possible to check if a JNI weak global ref has been
677 // cleared without triggering the read barrier and unintentionally mark the sentinel alive.
Vladimir Marko991cd5c2019-05-30 14:23:39 +0100678 runtime->SetSentinel(heap->AllocNonMovableObject(self,
679 java_lang_Object.Get(),
680 java_lang_Object->GetObjectSize(),
681 VoidFunctor()));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -0700682
Igor Murashkin86083f72017-10-27 10:59:04 -0700683 // Initialize the SubtypeCheck bitstring for java.lang.Object and java.lang.Class.
Vladimir Marko305c38b2018-02-14 11:50:07 +0000684 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -0700685 // It might seem the lock here is unnecessary, however all the SubtypeCheck
686 // functions are annotated to require locks all the way down.
687 //
688 // We take the lock here to avoid using NO_THREAD_SAFETY_ANALYSIS.
689 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +0000690 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Object.Get());
691 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(java_lang_Class.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -0700692 }
693
Ian Rogers23435d02012-09-24 11:23:12 -0700694 // Object[] next to hold class roots.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700695 Handle<mirror::Class> object_array_class(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700696 AllocClass(self, java_lang_Class.Get(),
697 mirror::ObjectArray<mirror::Object>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700698 object_array_class->SetComponentType(java_lang_Object.Get());
Brian Carlstroma0808032011-07-18 00:39:23 -0700699
Roland Levillain0e840272018-08-23 19:55:30 +0100700 // Setup java.lang.String.
701 //
702 // We make this class non-movable for the unlikely case where it were to be
703 // moved by a sticky-bit (minor) collection when using the Generational
704 // Concurrent Copying (CC) collector, potentially creating a stale reference
705 // in the `klass_` field of one of its instances allocated in the Large-Object
706 // Space (LOS) -- see the comment about the dirty card scanning logic in
707 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700708 Handle<mirror::Class> java_lang_String(hs.NewHandle(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700709 AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +0100710 self, java_lang_Class.Get(), mirror::String::ClassSize(image_pointer_size_))));
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700711 java_lang_String->SetStringClass();
Vladimir Marko2c64a832018-01-04 11:31:56 +0000712 mirror::Class::SetStatus(java_lang_String, ClassStatus::kResolved, self);
Jesse Wilson14150742011-07-29 19:04:44 -0400713
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700714 // Setup java.lang.ref.Reference.
Fred Shih4ee7a662014-07-11 09:59:27 -0700715 Handle<mirror::Class> java_lang_ref_Reference(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700716 AllocClass(self, java_lang_Class.Get(), mirror::Reference::ClassSize(image_pointer_size_))));
Fred Shih4ee7a662014-07-11 09:59:27 -0700717 java_lang_ref_Reference->SetObjectSize(mirror::Reference::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000718 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kResolved, self);
Fred Shih4ee7a662014-07-11 09:59:27 -0700719
Ian Rogers23435d02012-09-24 11:23:12 -0700720 // Create storage for root classes, save away our work so far (requires descriptors).
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700721 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100722 mirror::ObjectArray<mirror::Class>::Alloc(self,
723 object_array_class.Get(),
724 static_cast<int32_t>(ClassRoot::kMax)));
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700725 CHECK(!class_roots_.IsNull());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100726 SetClassRoot(ClassRoot::kJavaLangClass, java_lang_Class.Get());
727 SetClassRoot(ClassRoot::kJavaLangObject, java_lang_Object.Get());
728 SetClassRoot(ClassRoot::kClassArrayClass, class_array_class.Get());
729 SetClassRoot(ClassRoot::kObjectArrayClass, object_array_class.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100730 SetClassRoot(ClassRoot::kJavaLangString, java_lang_String.Get());
731 SetClassRoot(ClassRoot::kJavaLangRefReference, java_lang_ref_Reference.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700732
Mathieu Chartier6beced42016-11-15 15:51:31 -0800733 // Fill in the empty iftable. Needs to be done after the kObjectArrayClass root is set.
Vladimir Marko78f62d82022-01-10 16:25:19 +0000734 java_lang_Object->SetIfTable(AllocIfTable(self, 0, object_array_class.Get()));
Mathieu Chartier6beced42016-11-15 15:51:31 -0800735
Vladimir Marko02610552018-06-04 14:38:00 +0100736 // Create array interface entries to populate once we can load system classes.
Vladimir Marko78f62d82022-01-10 16:25:19 +0000737 object_array_class->SetIfTable(AllocIfTable(self, 2, object_array_class.Get()));
Vladimir Marko02610552018-06-04 14:38:00 +0100738 DCHECK_EQ(GetArrayIfTable(), object_array_class->GetIfTable());
739
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700740 // Setup the primitive type classes.
Vladimir Marko70e2a762019-07-12 16:49:00 +0100741 CreatePrimitiveClass(self, Primitive::kPrimBoolean, ClassRoot::kPrimitiveBoolean);
742 CreatePrimitiveClass(self, Primitive::kPrimByte, ClassRoot::kPrimitiveByte);
743 CreatePrimitiveClass(self, Primitive::kPrimChar, ClassRoot::kPrimitiveChar);
744 CreatePrimitiveClass(self, Primitive::kPrimShort, ClassRoot::kPrimitiveShort);
745 CreatePrimitiveClass(self, Primitive::kPrimInt, ClassRoot::kPrimitiveInt);
746 CreatePrimitiveClass(self, Primitive::kPrimLong, ClassRoot::kPrimitiveLong);
747 CreatePrimitiveClass(self, Primitive::kPrimFloat, ClassRoot::kPrimitiveFloat);
748 CreatePrimitiveClass(self, Primitive::kPrimDouble, ClassRoot::kPrimitiveDouble);
749 CreatePrimitiveClass(self, Primitive::kPrimVoid, ClassRoot::kPrimitiveVoid);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700750
Vladimir Marko70e2a762019-07-12 16:49:00 +0100751 // Allocate the primitive array classes. We need only the native pointer
752 // array at this point (int[] or long[], depending on architecture) but
753 // we shall perform the same setup steps for all primitive array classes.
754 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveBoolean, ClassRoot::kBooleanArrayClass);
755 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveByte, ClassRoot::kByteArrayClass);
756 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveChar, ClassRoot::kCharArrayClass);
757 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveShort, ClassRoot::kShortArrayClass);
758 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveInt, ClassRoot::kIntArrayClass);
759 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveLong, ClassRoot::kLongArrayClass);
760 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveFloat, ClassRoot::kFloatArrayClass);
761 AllocPrimitiveArrayClass(self, ClassRoot::kPrimitiveDouble, ClassRoot::kDoubleArrayClass);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700762
Brian Carlstrom75cb3b42011-07-28 02:13:36 -0700763 // now that these are registered, we can use AllocClass() and AllocObjectArray
Brian Carlstroma0808032011-07-18 00:39:23 -0700764
Ian Rogers52813c92012-10-11 11:50:38 -0700765 // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700766 Handle<mirror::Class> java_lang_DexCache(hs.NewHandle(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700767 AllocClass(self, java_lang_Class.Get(), mirror::DexCache::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100768 SetClassRoot(ClassRoot::kJavaLangDexCache, java_lang_DexCache.Get());
Vladimir Marko05792b92015-08-03 11:56:49 +0100769 java_lang_DexCache->SetDexCacheClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700770 java_lang_DexCache->SetObjectSize(mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000771 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kResolved, self);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700772
Alex Lightd6251582016-10-31 11:12:30 -0700773
774 // Setup dalvik.system.ClassExt
775 Handle<mirror::Class> dalvik_system_ClassExt(hs.NewHandle(
776 AllocClass(self, java_lang_Class.Get(), mirror::ClassExt::ClassSize(image_pointer_size_))));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100777 SetClassRoot(ClassRoot::kDalvikSystemClassExt, dalvik_system_ClassExt.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000778 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kResolved, self);
Alex Lightd6251582016-10-31 11:12:30 -0700779
Mathieu Chartier66f19252012-09-18 08:57:04 -0700780 // Set up array classes for string, field, method
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700781 Handle<mirror::Class> object_array_string(hs.NewHandle(
782 AllocClass(self, java_lang_Class.Get(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700783 mirror::ObjectArray<mirror::String>::ClassSize(image_pointer_size_))));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700784 object_array_string->SetComponentType(java_lang_String.Get());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100785 SetClassRoot(ClassRoot::kJavaLangStringArrayClass, object_array_string.Get());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700786
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000787 LinearAlloc* linear_alloc = runtime->GetLinearAlloc();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700788 // Create runtime resolution and imt conflict methods.
789 runtime->SetResolutionMethod(runtime->CreateResolutionMethod());
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000790 runtime->SetImtConflictMethod(runtime->CreateImtConflictMethod(linear_alloc));
791 runtime->SetImtUnimplementedMethod(runtime->CreateImtConflictMethod(linear_alloc));
Ian Rogers4445a7e2012-10-05 17:19:13 -0700792
Ian Rogers23435d02012-09-24 11:23:12 -0700793 // Setup boot_class_path_ and register class_path now that we can use AllocObjectArray to create
794 // DexCache instances. Needs to be after String, Field, Method arrays since AllocDexCache uses
795 // these roots.
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800796 if (boot_class_path.empty()) {
797 *error_msg = "Boot classpath is empty.";
798 return false;
799 }
Richard Uhlerfbef44d2014-12-23 09:48:51 -0800800 for (auto& dex_file : boot_class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800801 if (dex_file == nullptr) {
Andreas Gampe3db9c5d2015-11-17 11:52:46 -0800802 *error_msg = "Null dex file.";
803 return false;
804 }
Mathieu Chartier0a19e212019-11-27 14:35:24 -0800805 AppendToBootClassPath(self, dex_file.get());
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800806 boot_dex_files_.push_back(std::move(dex_file));
Mathieu Chartier66f19252012-09-18 08:57:04 -0700807 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700808
809 // now we can use FindSystemClass
810
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700811 // Set up GenericJNI entrypoint. That is mainly a hack for common_compiler_test.h so that
812 // we do not need friend classes or a publicly exposed setter.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700813 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800814 if (!runtime->IsAotCompiler()) {
Alex Light64ad14d2014-08-19 14:23:13 -0700815 // We need to set up the generic trampolines since we don't have an image.
Vladimir Marko7dac8642019-11-06 17:09:30 +0000816 jni_dlsym_lookup_trampoline_ = GetJniDlsymLookupStub();
Vladimir Markofa458ac2020-02-12 14:08:07 +0000817 jni_dlsym_lookup_critical_trampoline_ = GetJniDlsymLookupCriticalStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700818 quick_resolution_trampoline_ = GetQuickResolutionStub();
819 quick_imt_conflict_trampoline_ = GetQuickImtConflictStub();
Vladimir Marko7dac8642019-11-06 17:09:30 +0000820 quick_generic_jni_trampoline_ = GetQuickGenericJniStub();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700821 quick_to_interpreter_bridge_trampoline_ = GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +0000822 nterp_trampoline_ = interpreter::GetNterpEntryPoint();
Alex Light64ad14d2014-08-19 14:23:13 -0700823 }
Dmitry Petrochenkof0972a42014-05-16 17:43:39 +0700824
Alex Lightd6251582016-10-31 11:12:30 -0700825 // Object, String, ClassExt and DexCache need to be rerun through FindSystemClass to finish init
Vladimir Marko2c64a832018-01-04 11:31:56 +0000826 mirror::Class::SetStatus(java_lang_Object, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800827 CheckSystemClass(self, java_lang_Object, "Ljava/lang/Object;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700828 CHECK_EQ(java_lang_Object->GetObjectSize(), mirror::Object::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000829 mirror::Class::SetStatus(java_lang_String, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800830 CheckSystemClass(self, java_lang_String, "Ljava/lang/String;");
Vladimir Marko2c64a832018-01-04 11:31:56 +0000831 mirror::Class::SetStatus(java_lang_DexCache, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800832 CheckSystemClass(self, java_lang_DexCache, "Ljava/lang/DexCache;");
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700833 CHECK_EQ(java_lang_DexCache->GetObjectSize(), mirror::DexCache::InstanceSize());
Vladimir Marko2c64a832018-01-04 11:31:56 +0000834 mirror::Class::SetStatus(dalvik_system_ClassExt, ClassStatus::kNotReady, self);
Alex Lightd6251582016-10-31 11:12:30 -0700835 CheckSystemClass(self, dalvik_system_ClassExt, "Ldalvik/system/ClassExt;");
836 CHECK_EQ(dalvik_system_ClassExt->GetObjectSize(), mirror::ClassExt::InstanceSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700837
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800838 // Run Class through FindSystemClass. This initializes the dex_cache_ fields and register it
839 // in class_table_.
840 CheckSystemClass(self, java_lang_Class, "Ljava/lang/Class;");
Elliott Hughes418d20f2011-09-22 14:00:39 -0700841
Vladimir Marko70e2a762019-07-12 16:49:00 +0100842 // Setup core array classes, i.e. Object[], String[] and Class[] and primitive
843 // arrays - can't be done until Object has a vtable and component classes are loaded.
844 FinishCoreArrayClassSetup(ClassRoot::kObjectArrayClass);
845 FinishCoreArrayClassSetup(ClassRoot::kClassArrayClass);
846 FinishCoreArrayClassSetup(ClassRoot::kJavaLangStringArrayClass);
847 FinishCoreArrayClassSetup(ClassRoot::kBooleanArrayClass);
848 FinishCoreArrayClassSetup(ClassRoot::kByteArrayClass);
849 FinishCoreArrayClassSetup(ClassRoot::kCharArrayClass);
850 FinishCoreArrayClassSetup(ClassRoot::kShortArrayClass);
851 FinishCoreArrayClassSetup(ClassRoot::kIntArrayClass);
852 FinishCoreArrayClassSetup(ClassRoot::kLongArrayClass);
853 FinishCoreArrayClassSetup(ClassRoot::kFloatArrayClass);
854 FinishCoreArrayClassSetup(ClassRoot::kDoubleArrayClass);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700855
Ian Rogers23435d02012-09-24 11:23:12 -0700856 // Setup the single, global copy of "iftable".
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700857 auto java_lang_Cloneable = hs.NewHandle(FindSystemClass(self, "Ljava/lang/Cloneable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800858 CHECK(java_lang_Cloneable != nullptr);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700859 auto java_io_Serializable = hs.NewHandle(FindSystemClass(self, "Ljava/io/Serializable;"));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800860 CHECK(java_io_Serializable != nullptr);
Ian Rogers23435d02012-09-24 11:23:12 -0700861 // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
862 // crawl up and explicitly list all of the supers as well.
Vladimir Marko02610552018-06-04 14:38:00 +0100863 object_array_class->GetIfTable()->SetInterface(0, java_lang_Cloneable.Get());
864 object_array_class->GetIfTable()->SetInterface(1, java_io_Serializable.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700865
Vladimir Markob10668c2021-06-10 09:52:53 +0100866 // Check Class[] and Object[]'s interfaces.
867 CHECK_EQ(java_lang_Cloneable.Get(), class_array_class->GetDirectInterface(0));
868 CHECK_EQ(java_io_Serializable.Get(), class_array_class->GetDirectInterface(1));
869 CHECK_EQ(java_lang_Cloneable.Get(), object_array_class->GetDirectInterface(0));
870 CHECK_EQ(java_io_Serializable.Get(), object_array_class->GetDirectInterface(1));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700871
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700872 CHECK_EQ(object_array_string.Get(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100873 FindSystemClass(self, GetClassRootDescriptor(ClassRoot::kJavaLangStringArrayClass)));
Brian Carlstrom1f870082011-08-23 16:02:11 -0700874
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800875 // End of special init trickery, all subsequent classes may be loaded via FindSystemClass.
Ian Rogers466bb252011-10-14 03:29:56 -0700876
Ian Rogers23435d02012-09-24 11:23:12 -0700877 // Create java.lang.reflect.Proxy root.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100878 SetClassRoot(ClassRoot::kJavaLangReflectProxy,
879 FindSystemClass(self, "Ljava/lang/reflect/Proxy;"));
Ian Rogers466bb252011-10-14 03:29:56 -0700880
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700881 // Create java.lang.reflect.Field.class root.
Vladimir Markoacb906d2018-05-30 10:23:49 +0100882 ObjPtr<mirror::Class> class_root = FindSystemClass(self, "Ljava/lang/reflect/Field;");
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700883 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100884 SetClassRoot(ClassRoot::kJavaLangReflectField, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700885
886 // Create java.lang.reflect.Field array root.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700887 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Field;");
888 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100889 SetClassRoot(ClassRoot::kJavaLangReflectFieldArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700890
891 // Create java.lang.reflect.Constructor.class root and array root.
892 class_root = FindSystemClass(self, "Ljava/lang/reflect/Constructor;");
893 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100894 SetClassRoot(ClassRoot::kJavaLangReflectConstructor, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700895 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Constructor;");
896 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100897 SetClassRoot(ClassRoot::kJavaLangReflectConstructorArrayClass, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700898
899 // Create java.lang.reflect.Method.class root and array root.
900 class_root = FindSystemClass(self, "Ljava/lang/reflect/Method;");
901 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100902 SetClassRoot(ClassRoot::kJavaLangReflectMethod, class_root);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700903 class_root = FindSystemClass(self, "[Ljava/lang/reflect/Method;");
904 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100905 SetClassRoot(ClassRoot::kJavaLangReflectMethodArrayClass, class_root);
Mathieu Chartierdaaf3262015-03-24 13:30:28 -0700906
Orion Hodson005ac512017-10-24 15:43:43 +0100907 // Create java.lang.invoke.CallSite.class root
908 class_root = FindSystemClass(self, "Ljava/lang/invoke/CallSite;");
909 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100910 SetClassRoot(ClassRoot::kJavaLangInvokeCallSite, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100911
Narayan Kamathafa48272016-08-03 12:46:58 +0100912 // Create java.lang.invoke.MethodType.class root
913 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodType;");
914 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100915 SetClassRoot(ClassRoot::kJavaLangInvokeMethodType, class_root);
Narayan Kamathafa48272016-08-03 12:46:58 +0100916
917 // Create java.lang.invoke.MethodHandleImpl.class root
918 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandleImpl;");
919 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100920 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandleImpl, class_root);
Vladimir Markoc7aa87e2018-05-24 15:19:52 +0100921 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandle, class_root->GetSuperClass());
Narayan Kamathafa48272016-08-03 12:46:58 +0100922
Orion Hodsonc069a302017-01-18 09:23:12 +0000923 // Create java.lang.invoke.MethodHandles.Lookup.class root
924 class_root = FindSystemClass(self, "Ljava/lang/invoke/MethodHandles$Lookup;");
925 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100926 SetClassRoot(ClassRoot::kJavaLangInvokeMethodHandlesLookup, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +0000927
Orion Hodson005ac512017-10-24 15:43:43 +0100928 // Create java.lang.invoke.VarHandle.class root
929 class_root = FindSystemClass(self, "Ljava/lang/invoke/VarHandle;");
Orion Hodsonc069a302017-01-18 09:23:12 +0000930 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100931 SetClassRoot(ClassRoot::kJavaLangInvokeVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100932
933 // Create java.lang.invoke.FieldVarHandle.class root
934 class_root = FindSystemClass(self, "Ljava/lang/invoke/FieldVarHandle;");
935 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100936 SetClassRoot(ClassRoot::kJavaLangInvokeFieldVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100937
Orion Hodsondd411962021-06-25 08:55:22 +0100938 // Create java.lang.invoke.StaticFieldVarHandle.class root
939 class_root = FindSystemClass(self, "Ljava/lang/invoke/StaticFieldVarHandle;");
940 CHECK(class_root != nullptr);
941 SetClassRoot(ClassRoot::kJavaLangInvokeStaticFieldVarHandle, class_root);
942
Orion Hodson005ac512017-10-24 15:43:43 +0100943 // Create java.lang.invoke.ArrayElementVarHandle.class root
944 class_root = FindSystemClass(self, "Ljava/lang/invoke/ArrayElementVarHandle;");
945 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100946 SetClassRoot(ClassRoot::kJavaLangInvokeArrayElementVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100947
948 // Create java.lang.invoke.ByteArrayViewVarHandle.class root
949 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteArrayViewVarHandle;");
950 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100951 SetClassRoot(ClassRoot::kJavaLangInvokeByteArrayViewVarHandle, class_root);
Orion Hodson005ac512017-10-24 15:43:43 +0100952
953 // Create java.lang.invoke.ByteBufferViewVarHandle.class root
954 class_root = FindSystemClass(self, "Ljava/lang/invoke/ByteBufferViewVarHandle;");
955 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100956 SetClassRoot(ClassRoot::kJavaLangInvokeByteBufferViewVarHandle, class_root);
Orion Hodsonc069a302017-01-18 09:23:12 +0000957
Narayan Kamath000e1882016-10-24 17:14:25 +0100958 class_root = FindSystemClass(self, "Ldalvik/system/EmulatedStackFrame;");
959 CHECK(class_root != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100960 SetClassRoot(ClassRoot::kDalvikSystemEmulatedStackFrame, class_root);
Narayan Kamath000e1882016-10-24 17:14:25 +0100961
Brian Carlstrom1f870082011-08-23 16:02:11 -0700962 // java.lang.ref classes need to be specially flagged, but otherwise are normal classes
Fred Shih4ee7a662014-07-11 09:59:27 -0700963 // finish initializing Reference class
Vladimir Marko2c64a832018-01-04 11:31:56 +0000964 mirror::Class::SetStatus(java_lang_ref_Reference, ClassStatus::kNotReady, self);
Andreas Gampe7ba5a672016-02-04 21:45:01 -0800965 CheckSystemClass(self, java_lang_ref_Reference, "Ljava/lang/ref/Reference;");
Fred Shih4ee7a662014-07-11 09:59:27 -0700966 CHECK_EQ(java_lang_ref_Reference->GetObjectSize(), mirror::Reference::InstanceSize());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700967 CHECK_EQ(java_lang_ref_Reference->GetClassSize(),
968 mirror::Reference::ClassSize(image_pointer_size_));
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700969 class_root = FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700970 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700971 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagFinalizerReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700972 class_root = FindSystemClass(self, "Ljava/lang/ref/PhantomReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700973 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700974 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagPhantomReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700975 class_root = FindSystemClass(self, "Ljava/lang/ref/SoftReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700976 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700977 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagSoftReference);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700978 class_root = FindSystemClass(self, "Ljava/lang/ref/WeakReference;");
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -0700979 CHECK_EQ(class_root->GetClassFlags(), mirror::kClassFlagNormal);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -0700980 class_root->SetClassFlags(class_root->GetClassFlags() | mirror::kClassFlagWeakReference);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700981
Ian Rogers23435d02012-09-24 11:23:12 -0700982 // Setup the ClassLoader, verifying the object_size_.
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700983 class_root = FindSystemClass(self, "Ljava/lang/ClassLoader;");
Mathieu Chartiere4275c02015-08-06 15:34:15 -0700984 class_root->SetClassLoaderClass();
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700985 CHECK_EQ(class_root->GetObjectSize(), mirror::ClassLoader::InstanceSize());
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100986 SetClassRoot(ClassRoot::kJavaLangClassLoader, class_root);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700987
jeffhao8cd6dda2012-02-22 10:15:34 -0800988 // Set up java.lang.Throwable, java.lang.ClassNotFoundException, and
Ian Rogers23435d02012-09-24 11:23:12 -0700989 // java.lang.StackTraceElement as a convenience.
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100990 SetClassRoot(ClassRoot::kJavaLangThrowable, FindSystemClass(self, "Ljava/lang/Throwable;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100991 SetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
Brian Carlstromf3632832014-05-20 15:36:53 -0700992 FindSystemClass(self, "Ljava/lang/ClassNotFoundException;"));
Vladimir Markob4eb1b12018-05-24 11:09:38 +0100993 SetClassRoot(ClassRoot::kJavaLangStackTraceElement,
994 FindSystemClass(self, "Ljava/lang/StackTraceElement;"));
995 SetClassRoot(ClassRoot::kJavaLangStackTraceElementArrayClass,
Brian Carlstromf3632832014-05-20 15:36:53 -0700996 FindSystemClass(self, "[Ljava/lang/StackTraceElement;"));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +0000997 SetClassRoot(ClassRoot::kJavaLangClassLoaderArrayClass,
998 FindSystemClass(self, "[Ljava/lang/ClassLoader;"));
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700999
Mathieu Chartiercdca4762016-04-28 09:44:54 -07001000 // Create conflict tables that depend on the class linker.
1001 runtime->FixupConflictTables();
1002
Ian Rogers98379392014-02-24 16:53:16 -08001003 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001004
Brian Carlstroma004aa92012-02-08 18:05:09 -08001005 VLOG(startup) << "ClassLinker::InitFromCompiler exiting";
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001006
1007 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07001008}
1009
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001010static void CreateStringInitBindings(Thread* self, ClassLinker* class_linker)
1011 REQUIRES_SHARED(Locks::mutator_lock_) {
1012 // Find String.<init> -> StringFactory bindings.
1013 ObjPtr<mirror::Class> string_factory_class =
1014 class_linker->FindSystemClass(self, "Ljava/lang/StringFactory;");
1015 CHECK(string_factory_class != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001016 ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(class_linker);
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001017 WellKnownClasses::InitStringInit(string_class, string_factory_class);
1018 // Update the primordial thread.
1019 self->InitStringEntryPoints();
1020}
1021
Ian Rogers98379392014-02-24 16:53:16 -08001022void ClassLinker::FinishInit(Thread* self) {
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001023 VLOG(startup) << "ClassLinker::FinishInit entering";
Brian Carlstrom16192862011-09-12 17:50:06 -07001024
Andreas Gampe9abc31e2018-05-17 11:47:09 -07001025 CreateStringInitBindings(self, this);
1026
Brian Carlstrom16192862011-09-12 17:50:06 -07001027 // Let the heap know some key offsets into java.lang.ref instances
Elliott Hughes20cde902011-10-04 17:37:27 -07001028 // Note: we hard code the field indexes here rather than using FindInstanceField
Brian Carlstrom16192862011-09-12 17:50:06 -07001029 // as the types of the field can't be resolved prior to the runtime being
1030 // fully initialized
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001031 StackHandleScope<3> hs(self);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001032 Handle<mirror::Class> java_lang_ref_Reference =
1033 hs.NewHandle(GetClassRoot<mirror::Reference>(this));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001034 Handle<mirror::Class> java_lang_ref_FinalizerReference =
1035 hs.NewHandle(FindSystemClass(self, "Ljava/lang/ref/FinalizerReference;"));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08001036
Mathieu Chartierc7853442015-03-27 14:35:38 -07001037 ArtField* pendingNext = java_lang_ref_Reference->GetInstanceField(0);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001038 CHECK_STREQ(pendingNext->GetName(), "pendingNext");
1039 CHECK_STREQ(pendingNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001040
Mathieu Chartierc7853442015-03-27 14:35:38 -07001041 ArtField* queue = java_lang_ref_Reference->GetInstanceField(1);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001042 CHECK_STREQ(queue->GetName(), "queue");
1043 CHECK_STREQ(queue->GetTypeDescriptor(), "Ljava/lang/ref/ReferenceQueue;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001044
Mathieu Chartierc7853442015-03-27 14:35:38 -07001045 ArtField* queueNext = java_lang_ref_Reference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001046 CHECK_STREQ(queueNext->GetName(), "queueNext");
1047 CHECK_STREQ(queueNext->GetTypeDescriptor(), "Ljava/lang/ref/Reference;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001048
Mathieu Chartierc7853442015-03-27 14:35:38 -07001049 ArtField* referent = java_lang_ref_Reference->GetInstanceField(3);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001050 CHECK_STREQ(referent->GetName(), "referent");
1051 CHECK_STREQ(referent->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001052
Mathieu Chartierc7853442015-03-27 14:35:38 -07001053 ArtField* zombie = java_lang_ref_FinalizerReference->GetInstanceField(2);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001054 CHECK_STREQ(zombie->GetName(), "zombie");
1055 CHECK_STREQ(zombie->GetTypeDescriptor(), "Ljava/lang/Object;");
Brian Carlstrom16192862011-09-12 17:50:06 -07001056
Brian Carlstroma663ea52011-08-19 23:33:41 -07001057 // ensure all class_roots_ are initialized
Vladimir Markob4eb1b12018-05-24 11:09:38 +01001058 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
Brian Carlstroma663ea52011-08-19 23:33:41 -07001059 ClassRoot class_root = static_cast<ClassRoot>(i);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001060 ObjPtr<mirror::Class> klass = GetClassRoot(class_root);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07001061 CHECK(klass != nullptr);
1062 DCHECK(klass->IsArrayClass() || klass->IsPrimitive() || klass->GetDexCache() != nullptr);
Brian Carlstroma663ea52011-08-19 23:33:41 -07001063 // note SetClassRoot does additional validation.
1064 // if possible add new checks there to catch errors early
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001065 }
1066
Vladimir Marko02610552018-06-04 14:38:00 +01001067 CHECK(GetArrayIfTable() != nullptr);
Elliott Hughes92f14b22011-10-06 12:29:54 -07001068
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001069 // disable the slow paths in FindClass and CreatePrimitiveClass now
1070 // that Object, Class, and Object[] are setup
1071 init_done_ = true;
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001072
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001073 // Under sanitization, the small carve-out to handle stack overflow might not be enough to
1074 // initialize the StackOverflowError class (as it might require running the verifier). Instead,
1075 // ensure that the class will be initialized.
1076 if (kMemoryToolIsAvailable && !Runtime::Current()->IsAotCompiler()) {
Andreas Gampee0bbab92019-07-25 12:28:22 -07001077 verifier::ClassVerifier::Init(this); // Need to prepare the verifier.
Andreas Gampe7b2450e2018-06-19 10:45:54 -07001078
1079 ObjPtr<mirror::Class> soe_klass = FindSystemClass(self, "Ljava/lang/StackOverflowError;");
1080 if (soe_klass == nullptr || !EnsureInitialized(self, hs.NewHandle(soe_klass), true, true)) {
1081 // Strange, but don't crash.
1082 LOG(WARNING) << "Could not prepare StackOverflowError.";
1083 self->ClearException();
1084 }
1085 }
1086
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08001087 VLOG(startup) << "ClassLinker::FinishInit exiting";
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07001088}
1089
Vladimir Markoad824392022-10-20 10:57:35 +00001090static void EnsureRootInitialized(ClassLinker* class_linker,
1091 Thread* self,
1092 ObjPtr<mirror::Class> klass)
1093 REQUIRES_SHARED(Locks::mutator_lock_) {
1094 if (!klass->IsVisiblyInitialized()) {
1095 DCHECK(!klass->IsArrayClass());
1096 DCHECK(!klass->IsPrimitive());
1097 StackHandleScope<1> hs(self);
1098 Handle<mirror::Class> h_class(hs.NewHandle(klass));
1099 if (!class_linker->EnsureInitialized(
1100 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true)) {
1101 LOG(FATAL) << "Exception when initializing " << h_class->PrettyClass()
1102 << ": " << self->GetException()->Dump();
Elliott Hughes2a20cfd2011-09-23 19:30:41 -07001103 }
1104 }
1105}
1106
Vladimir Markoad824392022-10-20 10:57:35 +00001107void ClassLinker::RunEarlyRootClinits(Thread* self) {
1108 StackHandleScope<1u> hs(self);
1109 Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1110 EnsureRootInitialized(this, self, GetClassRoot<mirror::Class>(class_roots.Get()));
1111 EnsureRootInitialized(this, self, GetClassRoot<mirror::String>(class_roots.Get()));
Vladimir Markoaddc2d12022-11-29 08:47:02 +00001112 // `Field` class is needed for register_java_net_InetAddress in libcore, b/28153851.
Vladimir Markoad824392022-10-20 10:57:35 +00001113 EnsureRootInitialized(this, self, GetClassRoot<mirror::Field>(class_roots.Get()));
Vladimir Markoaddc2d12022-11-29 08:47:02 +00001114
1115 WellKnownClasses::Init(self->GetJniEnv());
1116
1117 // `FinalizerReference` class is needed for initialization of `java.net.InetAddress`.
1118 // (Indirectly by constructing a `ObjectStreamField` which uses a `StringBuilder`
1119 // and, when resizing, initializes the `System` class for `System.arraycopy()`
1120 // and `System.<clinit> creates a finalizable object.)
1121 EnsureRootInitialized(
1122 this, self, WellKnownClasses::java_lang_ref_FinalizerReference_add->GetDeclaringClass());
Vladimir Markoad824392022-10-20 10:57:35 +00001123}
1124
1125void ClassLinker::RunRootClinits(Thread* self) {
1126 StackHandleScope<1u> hs(self);
1127 Handle<mirror::ObjectArray<mirror::Class>> class_roots = hs.NewHandle(GetClassRoots());
1128 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); ++i) {
1129 EnsureRootInitialized(this, self, GetClassRoot(ClassRoot(i), class_roots.Get()));
1130 }
1131
1132 // Make sure certain well-known classes are initialized. Note that well-known
1133 // classes are always in the boot image, so this code is primarily intended
1134 // for running without boot image but may be needed for boot image if the
1135 // AOT-initialization fails due to introduction of new code to `<clinit>`.
1136 ArtMethod* static_methods_of_classes_to_initialize[] = {
1137 // Initialize primitive boxing classes (avoid check at runtime).
1138 WellKnownClasses::java_lang_Boolean_valueOf,
1139 WellKnownClasses::java_lang_Byte_valueOf,
1140 WellKnownClasses::java_lang_Character_valueOf,
1141 WellKnownClasses::java_lang_Double_valueOf,
1142 WellKnownClasses::java_lang_Float_valueOf,
1143 WellKnownClasses::java_lang_Integer_valueOf,
1144 WellKnownClasses::java_lang_Long_valueOf,
1145 WellKnownClasses::java_lang_Short_valueOf,
Vladimir Marko7b97aeb2022-12-01 16:17:47 +00001146 // Initialize `StackOverflowError`.
1147 WellKnownClasses::java_lang_StackOverflowError_init,
Vladimir Markob6f965d2022-11-23 14:24:45 +00001148 // Ensure class loader classes are initialized (avoid check at runtime).
Vladimir Markoaddc2d12022-11-29 08:47:02 +00001149 // Superclass `ClassLoader` is a class root and already initialized above.
1150 // Superclass `BaseDexClassLoader` is initialized implicitly.
Vladimir Markob6f965d2022-11-23 14:24:45 +00001151 WellKnownClasses::dalvik_system_DelegateLastClassLoader_init,
1152 WellKnownClasses::dalvik_system_DexClassLoader_init,
1153 WellKnownClasses::dalvik_system_InMemoryDexClassLoader_init,
1154 WellKnownClasses::dalvik_system_PathClassLoader_init,
1155 WellKnownClasses::java_lang_BootClassLoader_init,
Vladimir Markoaddc2d12022-11-29 08:47:02 +00001156 // Ensure `Daemons` class is initialized (avoid check at runtime).
1157 WellKnownClasses::java_lang_Daemons_start,
Vladimir Markof7bd1fa2022-11-18 10:55:05 +00001158 // Ensure `Thread` and `ThreadGroup` classes are initialized (avoid check at runtime).
1159 WellKnownClasses::java_lang_Thread_init,
1160 WellKnownClasses::java_lang_ThreadGroup_add,
Vladimir Markoaddc2d12022-11-29 08:47:02 +00001161 // Ensure reference classes are initialized (avoid check at runtime).
1162 // The `FinalizerReference` class was initialized in `RunEarlyRootClinits()`.
1163 WellKnownClasses::java_lang_ref_ReferenceQueue_add,
1164 // Ensure `InvocationTargetException` class is initialized (avoid check at runtime).
1165 WellKnownClasses::java_lang_reflect_InvocationTargetException_init,
1166 // Ensure `Parameter` class is initialized (avoid check at runtime).
1167 WellKnownClasses::java_lang_reflect_Parameter_init,
Vladimir Marko316fe632022-11-22 11:00:59 +00001168 // Ensure `MethodHandles` class is initialized (avoid check at runtime).
1169 WellKnownClasses::java_lang_invoke_MethodHandles_lookup,
Vladimir Markof9613592022-11-07 08:10:25 +00001170 // Ensure `DirectByteBuffer` class is initialized (avoid check at runtime).
1171 WellKnownClasses::java_nio_DirectByteBuffer_init,
Vladimir Marko41de4502019-05-21 10:00:15 +01001172 // Ensure `FloatingDecimal` class is initialized (avoid check at runtime).
1173 WellKnownClasses::jdk_internal_math_FloatingDecimal_getBinaryToASCIIConverter_D,
Vladimir Markof776c172022-11-03 17:29:49 +00001174 // Ensure reflection annotation classes are initialized (avoid check at runtime).
1175 WellKnownClasses::libcore_reflect_AnnotationFactory_createAnnotation,
1176 WellKnownClasses::libcore_reflect_AnnotationMember_init,
Vladimir Markoa3749b22022-11-02 13:42:54 +00001177 // We're suppressing exceptions from `DdmServer` and we do not want to repeatedly
1178 // suppress class initialization error (say, due to OOM), so initialize it early.
1179 WellKnownClasses::org_apache_harmony_dalvik_ddmc_DdmServer_dispatch,
Vladimir Markoad824392022-10-20 10:57:35 +00001180 };
1181 for (ArtMethod* method : static_methods_of_classes_to_initialize) {
1182 EnsureRootInitialized(this, self, method->GetDeclaringClass());
1183 }
Nicolas Geoffraycaf21e32022-10-24 06:39:59 +00001184 ArtField* static_fields_of_classes_to_initialize[] = {
Vladimir Markob6f965d2022-11-23 14:24:45 +00001185 // Ensure classes used by class loaders are initialized (avoid check at runtime).
1186 WellKnownClasses::dalvik_system_DexFile_cookie,
1187 WellKnownClasses::dalvik_system_DexPathList_dexElements,
1188 WellKnownClasses::dalvik_system_DexPathList__Element_dexFile,
Vladimir Markof9613592022-11-07 08:10:25 +00001189 // Ensure `VMRuntime` is initialized (avoid check at runtime).
1190 WellKnownClasses::dalvik_system_VMRuntime_nonSdkApiUsageConsumer,
Nicolas Geoffraycaf21e32022-10-24 06:39:59 +00001191 // Initialize empty arrays needed by `StackOverflowError`.
1192 WellKnownClasses::java_util_Collections_EMPTY_LIST,
1193 WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT,
1194 };
1195 for (ArtField* field : static_fields_of_classes_to_initialize) {
1196 EnsureRootInitialized(this, self, field->GetDeclaringClass());
1197 }
Vladimir Markoad824392022-10-20 10:57:35 +00001198}
1199
Vladimir Marko8670e042021-12-21 17:55:48 +00001200ALWAYS_INLINE
1201static uint32_t ComputeMethodHash(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
1202 DCHECK(!method->IsRuntimeMethod());
1203 DCHECK(!method->IsProxyMethod());
1204 DCHECK(!method->IsObsolete());
1205 // Do not use `ArtMethod::GetNameView()` to avoid unnecessary runtime/proxy/obsolete method
1206 // checks. It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1207 const DexFile& dex_file = method->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1208 const dex::MethodId& method_id = dex_file.GetMethodId(method->GetDexMethodIndex());
1209 std::string_view name = dex_file.GetMethodNameView(method_id);
1210 return ComputeModifiedUtf8Hash(name);
1211}
1212
Vladimir Markobed84ef2022-01-21 13:57:14 +00001213ALWAYS_INLINE
1214static bool MethodSignatureEquals(ArtMethod* lhs, ArtMethod* rhs)
1215 REQUIRES_SHARED(Locks::mutator_lock_) {
1216 DCHECK(!lhs->IsRuntimeMethod());
1217 DCHECK(!lhs->IsProxyMethod());
1218 DCHECK(!lhs->IsObsolete());
1219 DCHECK(!rhs->IsRuntimeMethod());
1220 DCHECK(!rhs->IsProxyMethod());
1221 DCHECK(!rhs->IsObsolete());
1222 // Do not use `ArtMethod::GetDexFile()` to avoid unnecessary obsolete method checks.
1223 // It is safe to avoid the read barrier here, see `ArtMethod::GetDexFile()`.
1224 const DexFile& lhs_dex_file = lhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1225 const DexFile& rhs_dex_file = rhs->GetDeclaringClass<kWithoutReadBarrier>()->GetDexFile();
1226 const dex::MethodId& lhs_mid = lhs_dex_file.GetMethodId(lhs->GetDexMethodIndex());
1227 const dex::MethodId& rhs_mid = rhs_dex_file.GetMethodId(rhs->GetDexMethodIndex());
1228 if (&lhs_dex_file == &rhs_dex_file) {
1229 return lhs_mid.name_idx_ == rhs_mid.name_idx_ &&
1230 lhs_mid.proto_idx_ == rhs_mid.proto_idx_;
1231 } else {
1232 return
1233 lhs_dex_file.GetMethodNameView(lhs_mid) == rhs_dex_file.GetMethodNameView(rhs_mid) &&
1234 lhs_dex_file.GetMethodSignature(lhs_mid) == rhs_dex_file.GetMethodSignature(rhs_mid);
1235 }
1236}
1237
Vladimir Marko43354742021-02-03 15:37:01 +00001238static void InitializeObjectVirtualMethodHashes(ObjPtr<mirror::Class> java_lang_Object,
1239 PointerSize pointer_size,
1240 /*out*/ ArrayRef<uint32_t> virtual_method_hashes)
1241 REQUIRES_SHARED(Locks::mutator_lock_) {
1242 ArraySlice<ArtMethod> virtual_methods = java_lang_Object->GetVirtualMethods(pointer_size);
1243 DCHECK_EQ(virtual_method_hashes.size(), virtual_methods.size());
1244 for (size_t i = 0; i != virtual_method_hashes.size(); ++i) {
Vladimir Marko8670e042021-12-21 17:55:48 +00001245 virtual_method_hashes[i] = ComputeMethodHash(&virtual_methods[i]);
Vladimir Marko43354742021-02-03 15:37:01 +00001246 }
1247}
1248
Jeff Haodcdc85b2015-12-04 14:06:18 -08001249struct TrampolineCheckData {
1250 const void* quick_resolution_trampoline;
1251 const void* quick_imt_conflict_trampoline;
1252 const void* quick_generic_jni_trampoline;
1253 const void* quick_to_interpreter_bridge_trampoline;
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001254 const void* nterp_trampoline;
Andreas Gampe542451c2016-07-26 09:02:02 -07001255 PointerSize pointer_size;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001256 ArtMethod* m;
1257 bool error;
1258};
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001259
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001260bool ClassLinker::InitFromBootImage(std::string* error_msg) {
1261 VLOG(startup) << __FUNCTION__ << " entering";
Brian Carlstroma663ea52011-08-19 23:33:41 -07001262 CHECK(!init_done_);
1263
Mathieu Chartierdaaf3262015-03-24 13:30:28 -07001264 Runtime* const runtime = Runtime::Current();
1265 Thread* const self = Thread::Current();
1266 gc::Heap* const heap = runtime->GetHeap();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001267 std::vector<gc::space::ImageSpace*> spaces = heap->GetBootImageSpaces();
1268 CHECK(!spaces.empty());
Vladimir Marko024d69f2019-06-13 10:52:32 +01001269 const ImageHeader& image_header = spaces[0]->GetImageHeader();
1270 uint32_t pointer_size_unchecked = image_header.GetPointerSizeUnchecked();
Andreas Gampe542451c2016-07-26 09:02:02 -07001271 if (!ValidPointerSize(pointer_size_unchecked)) {
1272 *error_msg = StringPrintf("Invalid image pointer size: %u", pointer_size_unchecked);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001273 return false;
1274 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001275 image_pointer_size_ = image_header.GetPointerSize();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001276 if (!runtime->IsAotCompiler()) {
1277 // Only the Aot compiler supports having an image with a different pointer size than the
1278 // runtime. This happens on the host for compiling 32 bit tests since we use a 64 bit libart
1279 // compiler. We may also use 32 bit dex2oat on a system with 64 bit apps.
Andreas Gampe542451c2016-07-26 09:02:02 -07001280 if (image_pointer_size_ != kRuntimePointerSize) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001281 *error_msg = StringPrintf("Runtime must use current image pointer size: %zu vs %zu",
Andreas Gampe542451c2016-07-26 09:02:02 -07001282 static_cast<size_t>(image_pointer_size_),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001283 sizeof(void*));
1284 return false;
1285 }
1286 }
Vladimir Marko3364d182019-03-13 13:55:01 +00001287 DCHECK(!runtime->HasResolutionMethod());
1288 runtime->SetResolutionMethod(image_header.GetImageMethod(ImageHeader::kResolutionMethod));
1289 runtime->SetImtConflictMethod(image_header.GetImageMethod(ImageHeader::kImtConflictMethod));
1290 runtime->SetImtUnimplementedMethod(
1291 image_header.GetImageMethod(ImageHeader::kImtUnimplementedMethod));
1292 runtime->SetCalleeSaveMethod(
1293 image_header.GetImageMethod(ImageHeader::kSaveAllCalleeSavesMethod),
1294 CalleeSaveType::kSaveAllCalleeSaves);
1295 runtime->SetCalleeSaveMethod(
1296 image_header.GetImageMethod(ImageHeader::kSaveRefsOnlyMethod),
1297 CalleeSaveType::kSaveRefsOnly);
1298 runtime->SetCalleeSaveMethod(
1299 image_header.GetImageMethod(ImageHeader::kSaveRefsAndArgsMethod),
1300 CalleeSaveType::kSaveRefsAndArgs);
1301 runtime->SetCalleeSaveMethod(
1302 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethod),
1303 CalleeSaveType::kSaveEverything);
1304 runtime->SetCalleeSaveMethod(
1305 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForClinit),
1306 CalleeSaveType::kSaveEverythingForClinit);
1307 runtime->SetCalleeSaveMethod(
1308 image_header.GetImageMethod(ImageHeader::kSaveEverythingMethodForSuspendCheck),
1309 CalleeSaveType::kSaveEverythingForSuspendCheck);
1310
Jeff Haodcdc85b2015-12-04 14:06:18 -08001311 std::vector<const OatFile*> oat_files =
1312 runtime->GetOatFileManager().RegisterImageOatFiles(spaces);
1313 DCHECK(!oat_files.empty());
1314 const OatHeader& default_oat_header = oat_files[0]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001315 jni_dlsym_lookup_trampoline_ = default_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001316 jni_dlsym_lookup_critical_trampoline_ = default_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001317 quick_resolution_trampoline_ = default_oat_header.GetQuickResolutionTrampoline();
1318 quick_imt_conflict_trampoline_ = default_oat_header.GetQuickImtConflictTrampoline();
1319 quick_generic_jni_trampoline_ = default_oat_header.GetQuickGenericJniTrampoline();
1320 quick_to_interpreter_bridge_trampoline_ = default_oat_header.GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001321 nterp_trampoline_ = default_oat_header.GetNterpTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001322 if (kIsDebugBuild) {
1323 // Check that the other images use the same trampoline.
1324 for (size_t i = 1; i < oat_files.size(); ++i) {
1325 const OatHeader& ith_oat_header = oat_files[i]->GetOatHeader();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001326 const void* ith_jni_dlsym_lookup_trampoline_ =
1327 ith_oat_header.GetJniDlsymLookupTrampoline();
Vladimir Markofa458ac2020-02-12 14:08:07 +00001328 const void* ith_jni_dlsym_lookup_critical_trampoline_ =
1329 ith_oat_header.GetJniDlsymLookupCriticalTrampoline();
Jeff Haodcdc85b2015-12-04 14:06:18 -08001330 const void* ith_quick_resolution_trampoline =
1331 ith_oat_header.GetQuickResolutionTrampoline();
1332 const void* ith_quick_imt_conflict_trampoline =
1333 ith_oat_header.GetQuickImtConflictTrampoline();
1334 const void* ith_quick_generic_jni_trampoline =
1335 ith_oat_header.GetQuickGenericJniTrampoline();
1336 const void* ith_quick_to_interpreter_bridge_trampoline =
1337 ith_oat_header.GetQuickToInterpreterBridge();
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001338 const void* ith_nterp_trampoline =
1339 ith_oat_header.GetNterpTrampoline();
Vladimir Marko7dac8642019-11-06 17:09:30 +00001340 if (ith_jni_dlsym_lookup_trampoline_ != jni_dlsym_lookup_trampoline_ ||
Vladimir Markofa458ac2020-02-12 14:08:07 +00001341 ith_jni_dlsym_lookup_critical_trampoline_ != jni_dlsym_lookup_critical_trampoline_ ||
Vladimir Marko7dac8642019-11-06 17:09:30 +00001342 ith_quick_resolution_trampoline != quick_resolution_trampoline_ ||
Jeff Haodcdc85b2015-12-04 14:06:18 -08001343 ith_quick_imt_conflict_trampoline != quick_imt_conflict_trampoline_ ||
1344 ith_quick_generic_jni_trampoline != quick_generic_jni_trampoline_ ||
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001345 ith_quick_to_interpreter_bridge_trampoline != quick_to_interpreter_bridge_trampoline_ ||
1346 ith_nterp_trampoline != nterp_trampoline_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -08001347 // Make sure that all methods in this image do not contain those trampolines as
1348 // entrypoints. Otherwise the class-linker won't be able to work with a single set.
1349 TrampolineCheckData data;
1350 data.error = false;
1351 data.pointer_size = GetImagePointerSize();
1352 data.quick_resolution_trampoline = ith_quick_resolution_trampoline;
1353 data.quick_imt_conflict_trampoline = ith_quick_imt_conflict_trampoline;
1354 data.quick_generic_jni_trampoline = ith_quick_generic_jni_trampoline;
1355 data.quick_to_interpreter_bridge_trampoline = ith_quick_to_interpreter_bridge_trampoline;
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00001356 data.nterp_trampoline = ith_nterp_trampoline;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001357 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
Andreas Gampe0c183382017-07-13 22:26:24 -07001358 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1359 if (obj->IsClass()) {
1360 ObjPtr<mirror::Class> klass = obj->AsClass();
1361 for (ArtMethod& m : klass->GetMethods(data.pointer_size)) {
1362 const void* entrypoint =
1363 m.GetEntryPointFromQuickCompiledCodePtrSize(data.pointer_size);
1364 if (entrypoint == data.quick_resolution_trampoline ||
1365 entrypoint == data.quick_imt_conflict_trampoline ||
1366 entrypoint == data.quick_generic_jni_trampoline ||
1367 entrypoint == data.quick_to_interpreter_bridge_trampoline) {
1368 data.m = &m;
1369 data.error = true;
1370 return;
1371 }
1372 }
1373 }
1374 };
1375 spaces[i]->GetLiveBitmap()->Walk(visitor);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001376 if (data.error) {
1377 ArtMethod* m = data.m;
David Sehr709b0702016-10-13 09:12:37 -07001378 LOG(ERROR) << "Found a broken ArtMethod: " << ArtMethod::PrettyMethod(m);
Jeff Haodcdc85b2015-12-04 14:06:18 -08001379 *error_msg = "Found an ArtMethod with a bad entrypoint";
1380 return false;
1381 }
1382 }
1383 }
1384 }
Brian Carlstrom58ae9412011-10-04 00:56:06 -07001385
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001386 class_roots_ = GcRoot<mirror::ObjectArray<mirror::Class>>(
Vladimir Markod7e9bbf2019-03-28 13:18:57 +00001387 ObjPtr<mirror::ObjectArray<mirror::Class>>::DownCast(
Vladimir Marko024d69f2019-06-13 10:52:32 +01001388 image_header.GetImageRoot(ImageHeader::kClassRoots)));
Vladimir Markof75613c2018-06-05 12:51:04 +01001389 DCHECK_EQ(GetClassRoot<mirror::Class>(this)->GetClassFlags(), mirror::kClassFlagClass);
Mathieu Chartier02b6a782012-10-26 13:51:26 -07001390
Vladimir Marko024d69f2019-06-13 10:52:32 +01001391 DCHECK_EQ(GetClassRoot<mirror::Object>(this)->GetObjectSize(), sizeof(mirror::Object));
1392 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
1393 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
1394 image_header.GetImageRoot(ImageHeader::kBootImageLiveObjects));
1395 runtime->SetSentinel(boot_image_live_objects->Get(ImageHeader::kClearedJniWeakSentinel));
1396 DCHECK(runtime->GetSentinel().Read()->GetClass() == GetClassRoot<mirror::Object>(this));
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07001397
Vladimir Markod1908512018-11-22 14:57:28 +00001398 for (size_t i = 0u, size = spaces.size(); i != size; ++i) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001399 // Boot class loader, use a null handle.
1400 std::vector<std::unique_ptr<const DexFile>> dex_files;
Vladimir Markod1908512018-11-22 14:57:28 +00001401 if (!AddImageSpace(spaces[i],
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001402 ScopedNullHandle<mirror::ClassLoader>(),
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001403 /*out*/&dex_files,
1404 error_msg)) {
1405 return false;
Jeff Haodcdc85b2015-12-04 14:06:18 -08001406 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001407 // Append opened dex files at the end.
1408 boot_dex_files_.insert(boot_dex_files_.end(),
1409 std::make_move_iterator(dex_files.begin()),
1410 std::make_move_iterator(dex_files.end()));
Mathieu Chartier208a5cb2015-12-02 15:44:07 -08001411 }
Mathieu Chartierbe8303d2017-08-17 17:39:39 -07001412 for (const std::unique_ptr<const DexFile>& dex_file : boot_dex_files_) {
Jared Duke95bb9952021-08-11 15:07:25 -07001413 OatDexFile::MadviseDexFileAtLoad(*dex_file);
Mathieu Chartierbe8303d2017-08-17 17:39:39 -07001414 }
Vladimir Marko43354742021-02-03 15:37:01 +00001415 InitializeObjectVirtualMethodHashes(GetClassRoot<mirror::Object>(this),
1416 image_pointer_size_,
1417 ArrayRef<uint32_t>(object_virtual_method_hashes_));
Ian Rogers98379392014-02-24 16:53:16 -08001418 FinishInit(self);
Brian Carlstrom0a5b14d2011-09-27 13:29:15 -07001419
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001420 VLOG(startup) << __FUNCTION__ << " exiting";
1421 return true;
1422}
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08001423
Vladimir Marko4433c432018-12-04 14:57:47 +00001424void ClassLinker::AddExtraBootDexFiles(
1425 Thread* self,
1426 std::vector<std::unique_ptr<const DexFile>>&& additional_dex_files) {
1427 for (std::unique_ptr<const DexFile>& dex_file : additional_dex_files) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08001428 AppendToBootClassPath(self, dex_file.get());
Orion Hodson771708f2021-01-06 15:45:16 +00001429 if (kIsDebugBuild) {
1430 for (const auto& boot_dex_file : boot_dex_files_) {
1431 DCHECK_NE(boot_dex_file->GetLocation(), dex_file->GetLocation());
1432 }
1433 }
Vladimir Marko4433c432018-12-04 14:57:47 +00001434 boot_dex_files_.push_back(std::move(dex_file));
1435 }
1436}
1437
Nicolas Geoffray5d3a23d2022-12-08 14:51:25 +00001438bool ClassLinker::IsBootClassLoader(ObjPtr<mirror::Object> class_loader) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001439 return class_loader == nullptr ||
Vladimir Markob6f965d2022-11-23 14:24:45 +00001440 WellKnownClasses::java_lang_BootClassLoader == class_loader->GetClass();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001441}
1442
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03001443class CHAOnDeleteUpdateClassVisitor {
1444 public:
1445 explicit CHAOnDeleteUpdateClassVisitor(LinearAlloc* alloc)
1446 : allocator_(alloc), cha_(Runtime::Current()->GetClassLinker()->GetClassHierarchyAnalysis()),
1447 pointer_size_(Runtime::Current()->GetClassLinker()->GetImagePointerSize()),
1448 self_(Thread::Current()) {}
1449
1450 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
1451 // This class is going to be unloaded. Tell CHA about it.
1452 cha_->ResetSingleImplementationInHierarchy(klass, allocator_, pointer_size_);
1453 return true;
1454 }
1455 private:
1456 const LinearAlloc* allocator_;
1457 const ClassHierarchyAnalysis* cha_;
1458 const PointerSize pointer_size_;
1459 const Thread* self_;
1460};
1461
Chris Wailes0c61be42018-09-26 17:27:34 -07001462/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001463 * A class used to ensure that all references to strings interned in an AppImage have been
1464 * properly recorded in the interned references list, and is only ever run in debug mode.
Chris Wailes0c61be42018-09-26 17:27:34 -07001465 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001466class CountInternedStringReferencesVisitor {
Chang Xingba17dbd2017-06-28 21:27:56 +00001467 public:
Vladimir Marko8e05f092019-06-10 11:10:38 +01001468 CountInternedStringReferencesVisitor(const gc::space::ImageSpace& space,
1469 const InternTable::UnorderedSet& image_interns)
1470 : space_(space),
1471 image_interns_(image_interns),
1472 count_(0u) {}
Chris Wailes0c61be42018-09-26 17:27:34 -07001473
Chris Wailes0c61be42018-09-26 17:27:34 -07001474 void TestObject(ObjPtr<mirror::Object> referred_obj) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001475 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001476 if (referred_obj != nullptr &&
1477 space_.HasAddress(referred_obj.Ptr()) &&
1478 referred_obj->IsString()) {
1479 ObjPtr<mirror::String> referred_str = referred_obj->AsString();
Vladimir Marko365c0202022-03-22 09:53:31 +00001480 uint32_t hash = static_cast<uint32_t>(referred_str->GetStoredHashCode());
1481 // All image strings have the hash code calculated, even if they are not interned.
1482 DCHECK_EQ(hash, static_cast<uint32_t>(referred_str->ComputeHashCode()));
1483 auto it = image_interns_.FindWithHash(GcRoot<mirror::String>(referred_str), hash);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001484 if (it != image_interns_.end() && it->Read() == referred_str) {
1485 ++count_;
Chris Wailesfbeef462018-10-19 14:16:35 -07001486 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001487 }
Chang Xingba17dbd2017-06-28 21:27:56 +00001488 }
1489
Chris Wailes0c61be42018-09-26 17:27:34 -07001490 void VisitRootIfNonNull(
Chang Xingba17dbd2017-06-28 21:27:56 +00001491 mirror::CompressedReference<mirror::Object>* root) const
1492 REQUIRES_SHARED(Locks::mutator_lock_) {
1493 if (!root->IsNull()) {
1494 VisitRoot(root);
1495 }
1496 }
1497
Chris Wailes0c61be42018-09-26 17:27:34 -07001498 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001499 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001500 TestObject(root->AsMirrorPtr());
Chang Xingba17dbd2017-06-28 21:27:56 +00001501 }
1502
1503 // Visit Class Fields
Chris Wailes0c61be42018-09-26 17:27:34 -07001504 void operator()(ObjPtr<mirror::Object> obj,
1505 MemberOffset offset,
1506 bool is_static ATTRIBUTE_UNUSED) const
Chang Xingba17dbd2017-06-28 21:27:56 +00001507 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001508 // References within image or across images don't need a read barrier.
1509 ObjPtr<mirror::Object> referred_obj =
1510 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
1511 TestObject(referred_obj);
Chang Xingba17dbd2017-06-28 21:27:56 +00001512 }
1513
1514 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1515 ObjPtr<mirror::Reference> ref) const
1516 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
Vladimir Marko8e05f092019-06-10 11:10:38 +01001517 operator()(ref, mirror::Reference::ReferentOffset(), /*is_static=*/ false);
Chang Xingba17dbd2017-06-28 21:27:56 +00001518 }
1519
Vladimir Marko8e05f092019-06-10 11:10:38 +01001520 size_t GetCount() const {
1521 return count_;
1522 }
1523
1524 private:
Chris Wailes0c61be42018-09-26 17:27:34 -07001525 const gc::space::ImageSpace& space_;
Vladimir Marko8e05f092019-06-10 11:10:38 +01001526 const InternTable::UnorderedSet& image_interns_;
1527 mutable size_t count_; // Modified from the `const` callbacks.
Chang Xingba17dbd2017-06-28 21:27:56 +00001528};
1529
Chris Wailes0c61be42018-09-26 17:27:34 -07001530/*
Vladimir Marko8e05f092019-06-10 11:10:38 +01001531 * This function counts references to strings interned in the AppImage.
1532 * This is used in debug build to check against the number of the recorded references.
Chris Wailes0c61be42018-09-26 17:27:34 -07001533 */
Vladimir Marko8e05f092019-06-10 11:10:38 +01001534size_t CountInternedStringReferences(gc::space::ImageSpace& space,
1535 const InternTable::UnorderedSet& image_interns)
1536 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes0c61be42018-09-26 17:27:34 -07001537 const gc::accounting::ContinuousSpaceBitmap* bitmap = space.GetMarkBitmap();
1538 const ImageHeader& image_header = space.GetImageHeader();
1539 const uint8_t* target_base = space.GetMemMap()->Begin();
1540 const ImageSection& objects_section = image_header.GetObjectsSection();
Chris Wailesfbeef462018-10-19 14:16:35 -07001541
1542 auto objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
1543 auto objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
Chris Wailes0c61be42018-09-26 17:27:34 -07001544
Vladimir Marko8e05f092019-06-10 11:10:38 +01001545 CountInternedStringReferencesVisitor visitor(space, image_interns);
Chris Wailes0c61be42018-09-26 17:27:34 -07001546 bitmap->VisitMarkedRange(objects_begin,
1547 objects_end,
1548 [&space, &visitor](mirror::Object* obj)
1549 REQUIRES_SHARED(Locks::mutator_lock_) {
1550 if (space.HasAddress(obj)) {
1551 if (obj->IsDexCache()) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001552 obj->VisitReferences</* kVisitNativeRoots= */ true,
1553 kVerifyNone,
1554 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001555 } else {
1556 // Don't visit native roots for non-dex-cache as they can't contain
1557 // native references to strings. This is verified during compilation
1558 // by ImageWriter::VerifyNativeGCRootInvariants.
Chris Wailesfbeef462018-10-19 14:16:35 -07001559 obj->VisitReferences</* kVisitNativeRoots= */ false,
1560 kVerifyNone,
1561 kWithoutReadBarrier>(visitor, visitor);
Chris Wailes0c61be42018-09-26 17:27:34 -07001562 }
1563 }
1564 });
Vladimir Marko8e05f092019-06-10 11:10:38 +01001565 return visitor.GetCount();
1566}
1567
1568template <typename Visitor>
1569static void VisitInternedStringReferences(
1570 gc::space::ImageSpace* space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001571 const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1572 const uint8_t* target_base = space->Begin();
1573 const ImageSection& sro_section =
1574 space->GetImageHeader().GetImageStringReferenceOffsetsSection();
1575 const size_t num_string_offsets = sro_section.Size() / sizeof(AppImageReferenceOffsetInfo);
1576
1577 VLOG(image)
1578 << "ClassLinker:AppImage:InternStrings:imageStringReferenceOffsetCount = "
1579 << num_string_offsets;
1580
1581 const auto* sro_base =
1582 reinterpret_cast<const AppImageReferenceOffsetInfo*>(target_base + sro_section.Offset());
1583
1584 for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
1585 uint32_t base_offset = sro_base[offset_index].first;
1586
David Srbecky86d6cd52020-12-02 18:13:10 +00001587 uint32_t raw_member_offset = sro_base[offset_index].second;
1588 DCHECK_ALIGNED(base_offset, 2);
1589 DCHECK_ALIGNED(raw_member_offset, 2);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001590
David Srbecky86d6cd52020-12-02 18:13:10 +00001591 ObjPtr<mirror::Object> obj_ptr =
1592 reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
1593 MemberOffset member_offset(raw_member_offset);
1594 ObjPtr<mirror::String> referred_string =
1595 obj_ptr->GetFieldObject<mirror::String,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001596 kVerifyNone,
David Srbecky86d6cd52020-12-02 18:13:10 +00001597 kWithoutReadBarrier,
1598 /* kIsVolatile= */ false>(member_offset);
1599 DCHECK(referred_string != nullptr);
1600
1601 ObjPtr<mirror::String> visited = visitor(referred_string);
1602 if (visited != referred_string) {
1603 obj_ptr->SetFieldObject</* kTransactionActive= */ false,
1604 /* kCheckTransaction= */ false,
1605 kVerifyNone,
1606 /* kIsVolatile= */ false>(member_offset, visited);
Vladimir Marko8e05f092019-06-10 11:10:38 +01001607 }
1608 }
1609}
1610
1611static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
1612 REQUIRES_SHARED(Locks::mutator_lock_) {
1613 InternTable::UnorderedSet image_interns;
1614 const ImageSection& section = space->GetImageHeader().GetInternedStringsSection();
1615 if (section.Size() > 0) {
1616 size_t read_count;
1617 const uint8_t* data = space->Begin() + section.Offset();
1618 InternTable::UnorderedSet image_set(data, /*make_copy_of_data=*/ false, &read_count);
1619 image_set.swap(image_interns);
1620 }
1621 size_t num_recorded_refs = 0u;
1622 VisitInternedStringReferences(
1623 space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001624 [&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
1625 REQUIRES_SHARED(Locks::mutator_lock_) {
1626 auto it = image_interns.find(GcRoot<mirror::String>(str));
1627 CHECK(it != image_interns.end());
1628 CHECK(it->Read() == str);
1629 ++num_recorded_refs;
1630 return str;
1631 });
1632 size_t num_found_refs = CountInternedStringReferences(*space, image_interns);
1633 CHECK_EQ(num_recorded_refs, num_found_refs);
Chris Wailes0c61be42018-09-26 17:27:34 -07001634}
1635
Andreas Gampe2af99022017-04-25 08:32:59 -07001636// new_class_set is the set of classes that were read from the class table section in the image.
1637// If there was no class table section, it is null.
1638// Note: using a class here to avoid having to make ClassLinker internals public.
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001639class AppImageLoadingHelper {
Andreas Gampe2af99022017-04-25 08:32:59 -07001640 public:
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001641 static void Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001642 ClassLinker* class_linker,
1643 gc::space::ImageSpace* space,
1644 Handle<mirror::ClassLoader> class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001645 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
Andreas Gampe2af99022017-04-25 08:32:59 -07001646 REQUIRES(!Locks::dex_lock_)
1647 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001648
Chris Wailesfbeef462018-10-19 14:16:35 -07001649 static void HandleAppImageStrings(gc::space::ImageSpace* space)
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001650 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07001651};
1652
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001653void AppImageLoadingHelper::Update(
Andreas Gampe2af99022017-04-25 08:32:59 -07001654 ClassLinker* class_linker,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001655 gc::space::ImageSpace* space,
1656 Handle<mirror::ClassLoader> class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001657 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
Andreas Gampe2af99022017-04-25 08:32:59 -07001658 REQUIRES(!Locks::dex_lock_)
1659 REQUIRES_SHARED(Locks::mutator_lock_) {
Chris Wailes23866362018-08-22 16:16:58 -07001660 ScopedTrace app_image_timing("AppImage:Updating");
1661
Vladimir Marko8e05f092019-06-10 11:10:38 +01001662 if (kIsDebugBuild && ClassLinker::kAppImageMayContainStrings) {
1663 // In debug build, verify the string references before applying
1664 // the Runtime::LoadAppImageStartupCache() option.
1665 VerifyInternedStringReferences(space);
1666 }
1667
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001668 Thread* const self = Thread::Current();
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001669 Runtime* const runtime = Runtime::Current();
1670 gc::Heap* const heap = runtime->GetHeap();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001671 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001672 {
Vladimir Marko0f3c7002017-09-07 14:15:56 +01001673 // Register dex caches with the class loader.
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001674 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Alex Lighta9bbc082019-11-14 14:51:41 -08001675 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001676 const DexFile* const dex_file = dex_cache->GetDexFile();
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001677 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08001678 WriterMutexLock mu2(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08001679 CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
Andreas Gampe2af99022017-04-25 08:32:59 -07001680 class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
Mathieu Chartier064e9d42016-03-07 17:41:39 -08001681 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001682 }
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001683 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001684
Mathieu Chartier0933cc52018-03-23 14:25:08 -07001685 if (ClassLinker::kAppImageMayContainStrings) {
Chris Wailesfbeef462018-10-19 14:16:35 -07001686 HandleAppImageStrings(space);
Chang Xingba17dbd2017-06-28 21:27:56 +00001687 }
Chris Wailes0c61be42018-09-26 17:27:34 -07001688
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001689 if (kVerifyArtMethodDeclaringClasses) {
Chris Wailes23866362018-08-22 16:16:58 -07001690 ScopedTrace timing("AppImage:VerifyDeclaringClasses");
Mathieu Chartiera0b95212016-03-07 16:13:54 -08001691 ReaderMutexLock rmu(self, *Locks::heap_bitmap_lock_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001692 gc::accounting::HeapBitmap* live_bitmap = heap->GetLiveBitmap();
1693 header.VisitPackedArtMethods([&](ArtMethod& method)
1694 REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1695 ObjPtr<mirror::Class> klass = method.GetDeclaringClassUnchecked();
1696 if (klass != nullptr) {
1697 CHECK(live_bitmap->Test(klass.Ptr())) << "Image method has unmarked declaring class";
1698 }
1699 }, space->Begin(), kRuntimePointerSize);
Mathieu Chartier03c1dd92016-03-07 16:13:54 -08001700 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001701}
1702
Chris Wailesfbeef462018-10-19 14:16:35 -07001703void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space) {
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001704 // Iterate over the string reference offsets stored in the image and intern
1705 // the strings they point to.
1706 ScopedTrace timing("AppImage:InternString");
1707
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001708 Runtime* const runtime = Runtime::Current();
1709 InternTable* const intern_table = runtime->GetInternTable();
1710
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001711 // Add the intern table, removing any conflicts. For conflicts, store the new address in a map
1712 // for faster lookup.
1713 // TODO: Optimize with a bitmap or bloom filter
1714 SafeMap<mirror::String*, mirror::String*> intern_remap;
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001715 auto func = [&](InternTable::UnorderedSet& interns)
Mathieu Chartier41c08082018-10-31 11:50:26 -07001716 REQUIRES_SHARED(Locks::mutator_lock_)
1717 REQUIRES(Locks::intern_table_lock_) {
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001718 const size_t non_boot_image_strings = intern_table->CountInterns(
1719 /*visit_boot_images=*/false,
1720 /*visit_non_boot_images=*/true);
Chris Wailesfbeef462018-10-19 14:16:35 -07001721 VLOG(image) << "AppImage:stringsInInternTableSize = " << interns.size();
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001722 VLOG(image) << "AppImage:nonBootImageInternStrings = " << non_boot_image_strings;
1723 // Visit the smaller of the two sets to compute the intersection.
1724 if (interns.size() < non_boot_image_strings) {
1725 for (auto it = interns.begin(); it != interns.end(); ) {
1726 ObjPtr<mirror::String> string = it->Read();
1727 ObjPtr<mirror::String> existing = intern_table->LookupWeakLocked(string);
1728 if (existing == nullptr) {
1729 existing = intern_table->LookupStrongLocked(string);
1730 }
1731 if (existing != nullptr) {
1732 intern_remap.Put(string.Ptr(), existing.Ptr());
1733 it = interns.erase(it);
1734 } else {
1735 ++it;
1736 }
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001737 }
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001738 } else {
1739 intern_table->VisitInterns([&](const GcRoot<mirror::String>& root)
1740 REQUIRES_SHARED(Locks::mutator_lock_)
1741 REQUIRES(Locks::intern_table_lock_) {
1742 auto it = interns.find(root);
1743 if (it != interns.end()) {
1744 ObjPtr<mirror::String> existing = root.Read();
1745 intern_remap.Put(it->Read(), existing.Ptr());
1746 it = interns.erase(it);
1747 }
1748 }, /*visit_boot_images=*/false, /*visit_non_boot_images=*/true);
1749 }
David Srbecky346fd962020-07-27 16:51:00 +01001750 // Consistency check to ensure correctness.
Mathieu Chartier8fc75582018-11-01 14:21:33 -07001751 if (kIsDebugBuild) {
1752 for (GcRoot<mirror::String>& root : interns) {
1753 ObjPtr<mirror::String> string = root.Read();
1754 CHECK(intern_table->LookupWeakLocked(string) == nullptr) << string->ToModifiedUtf8();
1755 CHECK(intern_table->LookupStrongLocked(string) == nullptr) << string->ToModifiedUtf8();
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001756 }
1757 }
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001758 };
Vladimir Marko8e05f092019-06-10 11:10:38 +01001759 intern_table->AddImageStringsToTable(space, func);
1760 if (!intern_remap.empty()) {
Mathieu Chartiera88abfa2019-02-04 11:08:29 -08001761 VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
Vladimir Marko8e05f092019-06-10 11:10:38 +01001762 VisitInternedStringReferences(
1763 space,
Vladimir Marko8e05f092019-06-10 11:10:38 +01001764 [&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
1765 auto it = intern_remap.find(str.Ptr());
1766 if (it != intern_remap.end()) {
1767 return ObjPtr<mirror::String>(it->second);
1768 }
1769 return str;
1770 });
Mathieu Chartier74ccee62018-10-10 10:30:29 -07001771 }
1772}
1773
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001774static std::unique_ptr<const DexFile> OpenOatDexFile(const OatFile* oat_file,
1775 const char* location,
1776 std::string* error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001777 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001778 DCHECK(error_msg != nullptr);
1779 std::unique_ptr<const DexFile> dex_file;
Andreas Gampeb40d3612018-06-26 15:49:42 -07001780 const OatDexFile* oat_dex_file = oat_file->GetOatDexFile(location, nullptr, error_msg);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001781 if (oat_dex_file == nullptr) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001782 return std::unique_ptr<const DexFile>();
1783 }
1784 std::string inner_error_msg;
1785 dex_file = oat_dex_file->OpenDexFile(&inner_error_msg);
1786 if (dex_file == nullptr) {
1787 *error_msg = StringPrintf("Failed to open dex file %s from within oat file %s error '%s'",
1788 location,
1789 oat_file->GetLocation().c_str(),
1790 inner_error_msg.c_str());
1791 return std::unique_ptr<const DexFile>();
1792 }
1793
1794 if (dex_file->GetLocationChecksum() != oat_dex_file->GetDexFileLocationChecksum()) {
1795 *error_msg = StringPrintf("Checksums do not match for %s: %x vs %x",
1796 location,
1797 dex_file->GetLocationChecksum(),
1798 oat_dex_file->GetDexFileLocationChecksum());
1799 return std::unique_ptr<const DexFile>();
1800 }
1801 return dex_file;
1802}
1803
1804bool ClassLinker::OpenImageDexFiles(gc::space::ImageSpace* space,
1805 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1806 std::string* error_msg) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07001807 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001808 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001809 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001810 DCHECK(dex_caches_object != nullptr);
Vladimir Marko4617d582019-03-28 13:48:31 +00001811 ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001812 dex_caches_object->AsObjectArray<mirror::DexCache>();
1813 const OatFile* oat_file = space->GetOatFile();
Alex Lighta9bbc082019-11-14 14:51:41 -08001814 for (auto dex_cache : dex_caches->Iterate()) {
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08001815 std::string dex_file_location(dex_cache->GetLocation()->ToModifiedUtf8());
1816 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
1817 dex_file_location.c_str(),
1818 error_msg);
1819 if (dex_file == nullptr) {
1820 return false;
1821 }
1822 dex_cache->SetDexFile(dex_file.get());
1823 out_dex_files->push_back(std::move(dex_file));
1824 }
1825 return true;
1826}
1827
Andreas Gampe0793bec2016-12-01 11:37:33 -08001828// Helper class for ArtMethod checks when adding an image. Keeps all required functionality
1829// together and caches some intermediate results.
Vladimir Marko76ba2562022-10-12 11:27:58 +00001830template <PointerSize kPointerSize>
Orion Hodson5880c772020-07-28 20:12:08 +01001831class ImageChecker final {
Andreas Gampe0793bec2016-12-01 11:37:33 -08001832 public:
Vladimir Marko76ba2562022-10-12 11:27:58 +00001833 static void CheckObjects(gc::Heap* heap, gc::space::ImageSpace* space)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001834 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001835 // There can be no GC during boot image initialization, so we do not need read barriers.
1836 ScopedDebugDisallowReadBarriers sddrb(Thread::Current());
1837
1838 CHECK_EQ(kPointerSize, space->GetImageHeader().GetPointerSize());
1839 const ImageSection& objects_section = space->GetImageHeader().GetObjectsSection();
1840 uintptr_t space_begin = reinterpret_cast<uintptr_t>(space->Begin());
1841 uintptr_t objects_begin = space_begin + objects_section.Offset();
1842 uintptr_t objects_end = objects_begin + objects_section.Size();
1843 ImageChecker ic(heap);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001844 auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1845 DCHECK(obj != nullptr);
Vladimir Marko76ba2562022-10-12 11:27:58 +00001846 mirror::Class* obj_klass = obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
1847 CHECK(obj_klass != nullptr) << "Null class in object " << obj;
1848 mirror::Class* class_class = obj_klass->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>();
1849 CHECK(class_class != nullptr) << "Null class class " << obj;
1850 if (obj_klass == class_class) {
Andreas Gampe1c158a02017-07-13 17:26:19 -07001851 auto klass = obj->AsClass();
1852 for (ArtField& field : klass->GetIFields()) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001853 CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001854 }
1855 for (ArtField& field : klass->GetSFields()) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001856 CHECK_EQ(field.GetDeclaringClass<kWithoutReadBarrier>(), klass);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001857 }
Vladimir Marko76ba2562022-10-12 11:27:58 +00001858 for (ArtMethod& m : klass->GetMethods(kPointerSize)) {
Orion Hodson5880c772020-07-28 20:12:08 +01001859 ic.CheckArtMethod(&m, klass);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001860 }
Vladimir Marko76ba2562022-10-12 11:27:58 +00001861 ObjPtr<mirror::PointerArray> vtable =
1862 klass->GetVTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
Andreas Gampe1c158a02017-07-13 17:26:19 -07001863 if (vtable != nullptr) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001864 ic.CheckArtMethodPointerArray(vtable);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001865 }
1866 if (klass->ShouldHaveImt()) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001867 ImTable* imt = klass->GetImt(kPointerSize);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001868 for (size_t i = 0; i < ImTable::kSize; ++i) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001869 ic.CheckArtMethod(imt->Get(i, kPointerSize), /*expected_class=*/ nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001870 }
1871 }
1872 if (klass->ShouldHaveEmbeddedVTable()) {
1873 for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001874 ic.CheckArtMethod(klass->GetEmbeddedVTableEntry(i, kPointerSize),
1875 /*expected_class=*/ nullptr);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001876 }
1877 }
Vladimir Marko76ba2562022-10-12 11:27:58 +00001878 ObjPtr<mirror::IfTable> iftable =
1879 klass->GetIfTable<kDefaultVerifyFlags, kWithoutReadBarrier>();
1880 int32_t iftable_count = (iftable != nullptr) ? iftable->Count() : 0;
1881 for (int32_t i = 0; i < iftable_count; ++i) {
1882 ObjPtr<mirror::PointerArray> method_array =
1883 iftable->GetMethodArrayOrNull<kDefaultVerifyFlags, kWithoutReadBarrier>(i);
1884 if (method_array != nullptr) {
1885 ic.CheckArtMethodPointerArray(method_array);
Andreas Gampe1c158a02017-07-13 17:26:19 -07001886 }
1887 }
1888 }
1889 };
Vladimir Marko76ba2562022-10-12 11:27:58 +00001890 space->GetLiveBitmap()->VisitMarkedRange(objects_begin, objects_end, visitor);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001891 }
1892
Andreas Gampe0793bec2016-12-01 11:37:33 -08001893 private:
Vladimir Marko76ba2562022-10-12 11:27:58 +00001894 explicit ImageChecker(gc::Heap* heap) {
1895 ArrayRef<gc::space::ImageSpace* const> spaces(heap->GetBootImageSpaces());
1896 space_begin_.reserve(spaces.size());
1897 for (gc::space::ImageSpace* space : spaces) {
1898 CHECK_EQ(static_cast<const void*>(space->Begin()), &space->GetImageHeader());
Andreas Gampe0793bec2016-12-01 11:37:33 -08001899 space_begin_.push_back(space->Begin());
Andreas Gampe0793bec2016-12-01 11:37:33 -08001900 }
1901 }
1902
Orion Hodson5880c772020-07-28 20:12:08 +01001903 void CheckArtMethod(ArtMethod* m, ObjPtr<mirror::Class> expected_class)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001904 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001905 ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
Andreas Gampe0793bec2016-12-01 11:37:33 -08001906 if (m->IsRuntimeMethod()) {
Andreas Gampe0793bec2016-12-01 11:37:33 -08001907 CHECK(declaring_class == nullptr) << declaring_class << " " << m->PrettyMethod();
1908 } else if (m->IsCopied()) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001909 CHECK(declaring_class != nullptr) << m->PrettyMethod();
Andreas Gampe0793bec2016-12-01 11:37:33 -08001910 } else if (expected_class != nullptr) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001911 CHECK_EQ(declaring_class, expected_class) << m->PrettyMethod();
Andreas Gampe0793bec2016-12-01 11:37:33 -08001912 }
Vladimir Marko76ba2562022-10-12 11:27:58 +00001913 bool contains = false;
1914 for (const uint8_t* begin : space_begin_) {
1915 const size_t offset = reinterpret_cast<uint8_t*>(m) - begin;
1916 const ImageHeader* header = reinterpret_cast<const ImageHeader*>(begin);
1917 if (header->GetMethodsSection().Contains(offset) ||
1918 header->GetRuntimeMethodsSection().Contains(offset)) {
1919 contains = true;
1920 break;
Andreas Gampe0793bec2016-12-01 11:37:33 -08001921 }
Andreas Gampe0793bec2016-12-01 11:37:33 -08001922 }
Vladimir Marko76ba2562022-10-12 11:27:58 +00001923 CHECK(contains) << m << " not found";
Andreas Gampe0793bec2016-12-01 11:37:33 -08001924 }
1925
Vladimir Marko76ba2562022-10-12 11:27:58 +00001926 void CheckArtMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
Andreas Gampe0793bec2016-12-01 11:37:33 -08001927 REQUIRES_SHARED(Locks::mutator_lock_) {
1928 CHECK(arr != nullptr);
1929 for (int32_t j = 0; j < arr->GetLength(); ++j) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00001930 auto* method = arr->GetElementPtrSize<ArtMethod*>(j, kPointerSize);
1931 CHECK(method != nullptr);
1932 CheckArtMethod(method, /*expected_class=*/ nullptr);
Andreas Gampe0793bec2016-12-01 11:37:33 -08001933 }
1934 }
1935
Andreas Gampe0793bec2016-12-01 11:37:33 -08001936 std::vector<const uint8_t*> space_begin_;
Andreas Gampe0793bec2016-12-01 11:37:33 -08001937};
1938
Andreas Gampebe7af222017-07-25 09:57:28 -07001939static void VerifyAppImage(const ImageHeader& header,
1940 const Handle<mirror::ClassLoader>& class_loader,
David Srbecky86d6cd52020-12-02 18:13:10 +00001941 ClassTable* class_table,
1942 gc::space::ImageSpace* space)
Andreas Gampebe7af222017-07-25 09:57:28 -07001943 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07001944 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
1945 ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
1946 if (klass != nullptr && !Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
1947 CHECK_EQ(class_table->LookupByDescriptor(klass), klass)
1948 << mirror::Class::PrettyClass(klass);
1949 }
1950 }, space->Begin(), kRuntimePointerSize);
Andreas Gampebe7af222017-07-25 09:57:28 -07001951 {
1952 // Verify that all direct interfaces of classes in the class table are also resolved.
1953 std::vector<ObjPtr<mirror::Class>> classes;
1954 auto verify_direct_interfaces_in_table = [&](ObjPtr<mirror::Class> klass)
1955 REQUIRES_SHARED(Locks::mutator_lock_) {
1956 if (!klass->IsPrimitive() && klass->GetClassLoader() == class_loader.Get()) {
1957 classes.push_back(klass);
1958 }
1959 return true;
1960 };
1961 class_table->Visit(verify_direct_interfaces_in_table);
Andreas Gampebe7af222017-07-25 09:57:28 -07001962 for (ObjPtr<mirror::Class> klass : classes) {
1963 for (uint32_t i = 0, num = klass->NumDirectInterfaces(); i != num; ++i) {
Vladimir Markob10668c2021-06-10 09:52:53 +01001964 CHECK(klass->GetDirectInterface(i) != nullptr)
Andreas Gampebe7af222017-07-25 09:57:28 -07001965 << klass->PrettyDescriptor() << " iface #" << i;
1966 }
1967 }
1968 }
Andreas Gampebe7af222017-07-25 09:57:28 -07001969}
1970
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001971bool ClassLinker::AddImageSpace(
1972 gc::space::ImageSpace* space,
1973 Handle<mirror::ClassLoader> class_loader,
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001974 std::vector<std::unique_ptr<const DexFile>>* out_dex_files,
1975 std::string* error_msg) {
1976 DCHECK(out_dex_files != nullptr);
1977 DCHECK(error_msg != nullptr);
1978 const uint64_t start_time = NanoTime();
Andreas Gampefa4333d2017-02-14 11:10:34 -08001979 const bool app_image = class_loader != nullptr;
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001980 const ImageHeader& header = space->GetImageHeader();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001981 ObjPtr<mirror::Object> dex_caches_object = header.GetImageRoot(ImageHeader::kDexCaches);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001982 DCHECK(dex_caches_object != nullptr);
1983 Runtime* const runtime = Runtime::Current();
1984 gc::Heap* const heap = runtime->GetHeap();
1985 Thread* const self = Thread::Current();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08001986 // Check that the image is what we are expecting.
1987 if (image_pointer_size_ != space->GetImageHeader().GetPointerSize()) {
1988 *error_msg = StringPrintf("Application image pointer size does not match runtime: %zu vs %zu",
1989 static_cast<size_t>(space->GetImageHeader().GetPointerSize()),
1990 image_pointer_size_);
1991 return false;
1992 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00001993 size_t expected_image_roots = ImageHeader::NumberOfImageRoots(app_image);
1994 if (static_cast<size_t>(header.GetImageRoots()->GetLength()) != expected_image_roots) {
1995 *error_msg = StringPrintf("Expected %zu image roots but got %d",
1996 expected_image_roots,
1997 header.GetImageRoots()->GetLength());
1998 return false;
1999 }
2000 StackHandleScope<3> hs(self);
2001 Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches(
2002 hs.NewHandle(dex_caches_object->AsObjectArray<mirror::DexCache>()));
2003 Handle<mirror::ObjectArray<mirror::Class>> class_roots(hs.NewHandle(
2004 header.GetImageRoot(ImageHeader::kClassRoots)->AsObjectArray<mirror::Class>()));
Nicolas Geoffray5d3a23d2022-12-08 14:51:25 +00002005 MutableHandle<mirror::Object> special_root(hs.NewHandle(
2006 app_image ? header.GetImageRoot(ImageHeader::kSpecialRoots) : nullptr));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002007 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002008 if (class_roots->GetLength() != static_cast<int32_t>(ClassRoot::kMax)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002009 *error_msg = StringPrintf("Expected %d class roots but got %d",
2010 class_roots->GetLength(),
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002011 static_cast<int32_t>(ClassRoot::kMax));
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002012 return false;
2013 }
2014 // Check against existing class roots to make sure they match the ones in the boot image.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002015 ObjPtr<mirror::ObjectArray<mirror::Class>> existing_class_roots = GetClassRoots();
2016 for (size_t i = 0; i < static_cast<size_t>(ClassRoot::kMax); i++) {
2017 if (class_roots->Get(i) != GetClassRoot(static_cast<ClassRoot>(i), existing_class_roots)) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002018 *error_msg = "App image class roots must have pointer equality with runtime ones.";
2019 return false;
2020 }
2021 }
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002022 const OatFile* oat_file = space->GetOatFile();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002023 if (oat_file->GetOatHeader().GetDexFileCount() !=
2024 static_cast<uint32_t>(dex_caches->GetLength())) {
2025 *error_msg = "Dex cache count and dex file count mismatch while trying to initialize from "
2026 "image";
2027 return false;
2028 }
2029
Alex Lighta9bbc082019-11-14 14:51:41 -08002030 for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
David Brazdil3e8aae02019-03-26 18:48:02 +00002031 std::string dex_file_location = dex_cache->GetLocation()->ToModifiedUtf8();
Mathieu Chartierbcb6a722016-03-08 16:49:58 -08002032 std::unique_ptr<const DexFile> dex_file = OpenOatDexFile(oat_file,
2033 dex_file_location.c_str(),
2034 error_msg);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002035 if (dex_file == nullptr) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002036 return false;
2037 }
2038
David Srbecky86d6cd52020-12-02 18:13:10 +00002039 {
David Srbecky33df0e32021-09-30 14:36:32 +00002040 // Native fields are all null. Initialize them.
David Srbecky86d6cd52020-12-02 18:13:10 +00002041 WriterMutexLock mu(self, *Locks::dex_lock_);
David Srbecky33df0e32021-09-30 14:36:32 +00002042 dex_cache->Initialize(dex_file.get(), class_loader.Get());
David Srbecky86d6cd52020-12-02 18:13:10 +00002043 }
2044 if (!app_image) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002045 // Register dex files, keep track of existing ones that are conflicts.
Mathieu Chartier0a19e212019-11-27 14:35:24 -08002046 AppendToBootClassPath(dex_file.get(), dex_cache);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002047 }
2048 out_dex_files->push_back(std::move(dex_file));
2049 }
2050
2051 if (app_image) {
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002052 ScopedAssertNoThreadSuspension sants("Checking app image");
Nicolas Geoffray5d3a23d2022-12-08 14:51:25 +00002053 if (special_root == nullptr) {
2054 *error_msg = "Unexpected null special root in app image";
2055 return false;
2056 } else if (special_root->IsIntArray()) {
2057 size_t count = special_root->AsIntArray()->GetLength();
2058 if (oat_file->GetVdexFile()->GetNumberOfDexFiles() != count) {
Vladimir Markod80bbba2022-12-20 16:25:58 +00002059 *error_msg = "Checksums count does not match";
Nicolas Geoffray5d3a23d2022-12-08 14:51:25 +00002060 return false;
2061 }
2062 static_assert(sizeof(VdexFile::VdexChecksum) == sizeof(int32_t));
2063 const VdexFile::VdexChecksum* art_checksums =
2064 reinterpret_cast<VdexFile::VdexChecksum*>(special_root->AsIntArray()->GetData());
2065 const VdexFile::VdexChecksum* vdex_checksums =
2066 oat_file->GetVdexFile()->GetDexChecksumsArray();
2067 if (memcmp(art_checksums, vdex_checksums, sizeof(VdexFile::VdexChecksum) * count) != 0) {
Vladimir Markod80bbba2022-12-20 16:25:58 +00002068 *error_msg = "Image and vdex checksums did not match";
Nicolas Geoffray5d3a23d2022-12-08 14:51:25 +00002069 return false;
2070 }
2071 } else if (IsBootClassLoader(special_root.Get())) {
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002072 *error_msg = "Unexpected BootClassLoader in app image";
2073 return false;
Nicolas Geoffray5d3a23d2022-12-08 14:51:25 +00002074 } else if (!special_root->IsClassLoader()) {
2075 *error_msg = "Unexpected special root in app image";
2076 return false;
Vladimir Markoeca3eda2016-11-09 16:26:44 +00002077 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002078 }
2079
Orion Hodson5880c772020-07-28 20:12:08 +01002080 if (kCheckImageObjects) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002081 if (!app_image) {
Vladimir Marko76ba2562022-10-12 11:27:58 +00002082 if (image_pointer_size_ == PointerSize::k64) {
2083 ImageChecker<PointerSize::k64>::CheckObjects(heap, space);
2084 } else {
2085 ImageChecker<PointerSize::k32>::CheckObjects(heap, space);
2086 }
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002087 }
2088 }
2089
2090 // Set entry point to interpreter if in InterpretOnly mode.
2091 if (!runtime->IsAotCompiler() && runtime->GetInstrumentation()->InterpretOnly()) {
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07002092 // Set image methods' entry point to interpreter.
2093 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2094 if (!method.IsRuntimeMethod()) {
2095 DCHECK(method.GetDeclaringClass() != nullptr);
Ulya Trafimovich5439f052020-07-29 10:03:46 +01002096 if (!method.IsNative() && !method.IsResolutionMethod()) {
2097 method.SetEntryPointFromQuickCompiledCodePtrSize(GetQuickToInterpreterBridge(),
2098 image_pointer_size_);
Mathieu Chartier9d5956a2019-03-22 11:29:08 -07002099 }
2100 }
2101 }, space->Begin(), image_pointer_size_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002102 }
2103
Nicolas Geoffray47171752020-08-31 15:03:20 +01002104 if (!runtime->IsAotCompiler()) {
Nicolas Geoffray8bf5afd2022-04-27 14:11:23 +01002105 // If we are profiling the boot classpath, disable the shared memory for
2106 // boot image method optimization. We need to disable it before doing
2107 // ResetCounter below, as counters of shared memory method always hold the
2108 // "hot" value.
2109 if (runtime->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
2110 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
2111 method.ClearMemorySharedMethod();
2112 }, space->Begin(), image_pointer_size_);
2113 }
2114
Nicolas Geoffraybd728b02021-01-27 13:21:35 +00002115 ScopedTrace trace("AppImage:UpdateCodeItemAndNterp");
Nicolas Geoffray47171752020-08-31 15:03:20 +01002116 bool can_use_nterp = interpreter::CanRuntimeUseNterp();
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00002117 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00002118 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray47171752020-08-31 15:03:20 +01002119 // In the image, the `data` pointer field of the ArtMethod contains the code
2120 // item offset. Change this to the actual pointer to the code item.
2121 if (method.HasCodeItem()) {
2122 const dex::CodeItem* code_item = method.GetDexFile()->GetCodeItem(
2123 reinterpret_cast32<uint32_t>(method.GetDataPtrSize(image_pointer_size_)));
zhaoxuyang7156ea22022-01-10 13:58:11 +08002124 method.SetCodeItem(code_item, method.GetDexFile()->IsCompactDexFile());
Nicolas Geoffray61673dc2021-11-06 13:58:31 +00002125 // The hotness counter may have changed since we compiled the image, so
2126 // reset it with the runtime value.
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00002127 method.ResetCounter(hotness_threshold);
Nicolas Geoffray47171752020-08-31 15:03:20 +01002128 }
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00002129 if (method.GetEntryPointFromQuickCompiledCode() == nterp_trampoline_) {
2130 if (can_use_nterp) {
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +00002131 // Set image methods' entry point that point to the nterp trampoline to the
2132 // nterp entry point. This allows taking the fast path when doing a
2133 // nterp->nterp call.
Vladimir Markoa20ec9b2022-12-12 09:12:21 +00002134 DCHECK(!method.StillNeedsClinitCheck());
Nicolas Geoffrayc39af942021-01-25 08:43:57 +00002135 method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
2136 } else {
2137 method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
2138 }
Nicolas Geoffray47171752020-08-31 15:03:20 +01002139 }
Nicolas Geoffray7e2c9632020-01-09 13:41:10 +00002140 }, space->Begin(), image_pointer_size_);
2141 }
2142
Nicolas Geoffray8c41a0b2020-02-06 16:52:11 +00002143 if (runtime->IsVerificationSoftFail()) {
2144 header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoff8f8c42022-09-19 17:18:24 +00002145 if (method.IsManagedAndInvokable()) {
Nicolas Geoffray8c41a0b2020-02-06 16:52:11 +00002146 method.ClearSkipAccessChecks();
2147 }
2148 }, space->Begin(), image_pointer_size_);
2149 }
2150
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002151 ClassTable* class_table = nullptr;
2152 {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002153 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002154 class_table = InsertClassTableForClassLoader(class_loader.Get());
Mathieu Chartier69731002016-03-02 16:08:31 -08002155 }
2156 // If we have a class table section, read it and use it for verification in
2157 // UpdateAppImageClassLoadersAndDexCaches.
2158 ClassTable::ClassSet temp_set;
Vladimir Marko0f3c7002017-09-07 14:15:56 +01002159 const ImageSection& class_table_section = header.GetClassTableSection();
Mathieu Chartier69731002016-03-02 16:08:31 -08002160 const bool added_class_table = class_table_section.Size() > 0u;
2161 if (added_class_table) {
2162 const uint64_t start_time2 = NanoTime();
2163 size_t read_count = 0;
2164 temp_set = ClassTable::ClassSet(space->Begin() + class_table_section.Offset(),
2165 /*make copy*/false,
2166 &read_count);
Mathieu Chartier69731002016-03-02 16:08:31 -08002167 VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002168 }
2169 if (app_image) {
David Srbecky86d6cd52020-12-02 18:13:10 +00002170 AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
Mathieu Chartier456b4922018-11-06 10:35:48 -08002171
2172 {
2173 ScopedTrace trace("AppImage:UpdateClassLoaders");
2174 // Update class loader and resolved strings. If added_class_table is false, the resolved
2175 // strings were forwarded UpdateAppImageClassLoadersAndDexCaches.
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002176 ObjPtr<mirror::ClassLoader> loader(class_loader.Get());
Mathieu Chartier456b4922018-11-06 10:35:48 -08002177 for (const ClassTable::TableSlot& root : temp_set) {
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002178 // Note: We probably don't need the read barrier unless we copy the app image objects into
2179 // the region space.
2180 ObjPtr<mirror::Class> klass(root.Read());
2181 // Do not update class loader for boot image classes where the app image
2182 // class loader is only the initiating loader but not the defining loader.
2183 // Avoid read barrier since we are comparing against null.
2184 if (klass->GetClassLoader<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002185 klass->SetClassLoader(loader);
Mathieu Chartierbc1e0fa2018-11-14 16:18:18 -08002186 }
Mathieu Chartier456b4922018-11-06 10:35:48 -08002187 }
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002188 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002189
Vladimir Marko305c38b2018-02-14 11:50:07 +00002190 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07002191 // Every class in the app image has initially SubtypeCheckInfo in the
2192 // Uninitialized state.
2193 //
2194 // The SubtypeCheck invariants imply that a SubtypeCheckInfo is at least Initialized
2195 // after class initialization is complete. The app image ClassStatus as-is
2196 // are almost all ClassStatus::Initialized, and being in the
2197 // SubtypeCheckInfo::kUninitialized state is violating that invariant.
2198 //
2199 // Force every app image class's SubtypeCheck to be at least kIninitialized.
2200 //
2201 // See also ImageWriter::FixupClass.
Chris Wailes23866362018-08-22 16:16:58 -07002202 ScopedTrace trace("AppImage:RecacluateSubtypeCheckBitstrings");
Igor Murashkin86083f72017-10-27 10:59:04 -07002203 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
2204 for (const ClassTable::TableSlot& root : temp_set) {
Vladimir Marko38b8b252018-01-02 19:07:06 +00002205 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(root.Read());
Igor Murashkin86083f72017-10-27 10:59:04 -07002206 }
2207 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002208 }
2209 if (!oat_file->GetBssGcRoots().empty()) {
2210 // Insert oat file to class table for visiting .bss GC roots.
2211 class_table->InsertOatFile(oat_file);
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002212 }
Igor Murashkin86083f72017-10-27 10:59:04 -07002213
Mathieu Chartier69731002016-03-02 16:08:31 -08002214 if (added_class_table) {
2215 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
2216 class_table->AddClassSet(std::move(temp_set));
2217 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002218
Mathieu Chartier69731002016-03-02 16:08:31 -08002219 if (kIsDebugBuild && app_image) {
2220 // This verification needs to happen after the classes have been added to the class loader.
2221 // Since it ensures classes are in the class table.
Chris Wailes23866362018-08-22 16:16:58 -07002222 ScopedTrace trace("AppImage:Verify");
David Srbecky86d6cd52020-12-02 18:13:10 +00002223 VerifyAppImage(header, class_loader, class_table, space);
Mathieu Chartier69731002016-03-02 16:08:31 -08002224 }
Andreas Gampebe7af222017-07-25 09:57:28 -07002225
Mathieu Chartierfbc31082016-01-24 11:59:56 -08002226 VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
Andreas Gampe3db9c5d2015-11-17 11:52:46 -08002227 return true;
Brian Carlstroma663ea52011-08-19 23:33:41 -07002228}
2229
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002230void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier7778b882015-10-05 16:41:10 -07002231 // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
2232 // enabling tracing requires the mutator lock, there are no race conditions here.
2233 const bool tracing_enabled = Trace::IsTracingEnabled();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002234 Thread* const self = Thread::Current();
2235 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00002236 if (gUseReadBarrier) {
Vladimir Marko9b03cb42017-02-16 16:37:03 +00002237 // We do not track new roots for CC.
2238 DCHECK_EQ(0, flags & (kVisitRootFlagNewRoots |
2239 kVisitRootFlagClearRootLog |
2240 kVisitRootFlagStartLoggingNewRoots |
2241 kVisitRootFlagStopLoggingNewRoots));
2242 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002243 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002244 // Argument for how root visiting deals with ArtField and ArtMethod roots.
2245 // There is 3 GC cases to handle:
2246 // Non moving concurrent:
2247 // This case is easy to handle since the reference members of ArtMethod and ArtFields are held
Mathieu Chartierda7c6502015-07-23 16:01:26 -07002248 // live by the class and class roots.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002249 //
2250 // Moving non-concurrent:
2251 // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move.
2252 // To prevent missing roots, this case needs to ensure that there is no
2253 // suspend points between the point which we allocate ArtMethod arrays and place them in a
2254 // class which is in the class table.
2255 //
2256 // Moving concurrent:
2257 // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
2258 // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -08002259 //
2260 // Use an unbuffered visitor since the class table uses a temporary GcRoot for holding decoded
2261 // ClassTable::TableSlot. The buffered root visiting would access a stale stack location for
2262 // these objects.
2263 UnbufferedRootVisitor root_visitor(visitor, RootInfo(kRootStickyClass));
Andreas Gampe2af99022017-04-25 08:32:59 -07002264 boot_class_table_->VisitRoots(root_visitor);
Mathieu Chartier7778b882015-10-05 16:41:10 -07002265 // If tracing is enabled, then mark all the class loaders to prevent unloading.
neo.chaea2d1b282016-11-08 08:40:46 +09002266 if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
Lokesh Gidraf3c44b42022-10-05 10:43:49 -07002267 gc::Heap* const heap = Runtime::Current()->GetHeap();
2268 // Don't visit class-loaders if compacting with userfaultfd GC as these
2269 // weaks are updated using Runtime::SweepSystemWeaks() and the GC doesn't
2270 // tolerate double updates.
2271 if (!gUseUserfaultfd
2272 || !heap->MarkCompactCollector()->IsCompacting(self)) {
2273 for (const ClassLoaderData& data : class_loaders_) {
2274 GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
2275 root.VisitRoot(visitor, RootInfo(kRootVMInternal));
2276 }
2277 } else {
2278 DCHECK_EQ(heap->CurrentCollectorType(), gc::CollectorType::kCollectorTypeCMC);
Mathieu Chartier7778b882015-10-05 16:41:10 -07002279 }
2280 }
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00002281 } else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -08002282 for (auto& root : new_class_roots_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002283 ObjPtr<mirror::Class> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002284 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002285 ObjPtr<mirror::Class> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002286 // Concurrent moving GC marked new roots through the to-space invariant.
2287 CHECK_EQ(new_ref, old_ref);
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002288 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00002289 for (const OatFile* oat_file : new_bss_roots_boot_oat_files_) {
2290 for (GcRoot<mirror::Object>& root : oat_file->GetBssGcRoots()) {
2291 ObjPtr<mirror::Object> old_ref = root.Read<kWithoutReadBarrier>();
2292 if (old_ref != nullptr) {
2293 DCHECK(old_ref->IsClass());
2294 root.VisitRoot(visitor, RootInfo(kRootStickyClass));
2295 ObjPtr<mirror::Object> new_ref = root.Read<kWithoutReadBarrier>();
2296 // Concurrent moving GC marked new roots through the to-space invariant.
2297 CHECK_EQ(new_ref, old_ref);
2298 }
2299 }
2300 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002301 }
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00002302 if (!gUseReadBarrier && (flags & kVisitRootFlagClearRootLog) != 0) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002303 new_class_roots_.clear();
Vladimir Marko1998cd02017-01-13 13:02:58 +00002304 new_bss_roots_boot_oat_files_.clear();
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002305 }
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00002306 if (!gUseReadBarrier && (flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002307 log_new_roots_ = true;
Lokesh Gidraca5ed9f2022-04-20 01:39:28 +00002308 } else if (!gUseReadBarrier && (flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00002309 log_new_roots_ = false;
Mathieu Chartier52e4b432014-06-10 11:22:31 -07002310 }
2311 // We deliberately ignore the class roots in the image since we
2312 // handle image roots by using the MS/CMS rescanning of dirty cards.
2313}
2314
Brian Carlstroma663ea52011-08-19 23:33:41 -07002315// Keep in sync with InitCallback. Anything we visit, we need to
2316// reinit references to when reinitializing a ClassLinker from a
2317// mapped image.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002318void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Mathieu Chartier31000802015-06-14 14:14:37 -07002319 class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07002320 VisitClassRoots(visitor, flags);
Mathieu Chartier6cfc2c02015-10-12 15:06:16 -07002321 // Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
2322 // unloading if we are marking roots.
2323 DropFindArrayClassCache();
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07002324}
2325
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002326class VisitClassLoaderClassesVisitor : public ClassLoaderVisitor {
2327 public:
2328 explicit VisitClassLoaderClassesVisitor(ClassVisitor* visitor)
2329 : visitor_(visitor),
2330 done_(false) {}
2331
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002332 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002333 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002334 ClassTable* const class_table = class_loader->GetClassTable();
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002335 if (!done_ && class_table != nullptr) {
2336 DefiningClassLoaderFilterVisitor visitor(class_loader, visitor_);
2337 if (!class_table->Visit(visitor)) {
2338 // If the visitor ClassTable returns false it means that we don't need to continue.
2339 done_ = true;
2340 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002341 }
2342 }
2343
2344 private:
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002345 // Class visitor that limits the class visits from a ClassTable to the classes with
2346 // the provided defining class loader. This filter is used to avoid multiple visits
2347 // of the same class which can be recorded for multiple initiating class loaders.
2348 class DefiningClassLoaderFilterVisitor : public ClassVisitor {
2349 public:
2350 DefiningClassLoaderFilterVisitor(ObjPtr<mirror::ClassLoader> defining_class_loader,
2351 ClassVisitor* visitor)
2352 : defining_class_loader_(defining_class_loader), visitor_(visitor) { }
2353
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002354 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002355 if (klass->GetClassLoader() != defining_class_loader_) {
2356 return true;
2357 }
2358 return (*visitor_)(klass);
2359 }
2360
Vladimir Marko0984e482019-03-27 16:41:41 +00002361 const ObjPtr<mirror::ClassLoader> defining_class_loader_;
Vladimir Markoc5798bf2016-12-09 10:20:54 +00002362 ClassVisitor* const visitor_;
2363 };
2364
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002365 ClassVisitor* const visitor_;
2366 // If done is true then we don't need to do any more visiting.
2367 bool done_;
2368};
2369
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002370void ClassLinker::VisitClassesInternal(ClassVisitor* visitor) {
Andreas Gampe2af99022017-04-25 08:32:59 -07002371 if (boot_class_table_->Visit(*visitor)) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002372 VisitClassLoaderClassesVisitor loader_visitor(visitor);
2373 VisitClassLoaders(&loader_visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002374 }
2375}
2376
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002377void ClassLinker::VisitClasses(ClassVisitor* visitor) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002378 Thread* const self = Thread::Current();
2379 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
2380 // Not safe to have thread suspension when we are holding a lock.
2381 if (self != nullptr) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002382 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002383 VisitClassesInternal(visitor);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07002384 } else {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002385 VisitClassesInternal(visitor);
Elliott Hughesa2155262011-11-16 16:26:58 -08002386 }
2387}
2388
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002389class GetClassesInToVector : public ClassVisitor {
2390 public:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002391 bool operator()(ObjPtr<mirror::Class> klass) override {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002392 classes_.push_back(klass);
2393 return true;
2394 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002395 std::vector<ObjPtr<mirror::Class>> classes_;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002396};
2397
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002398class GetClassInToObjectArray : public ClassVisitor {
2399 public:
2400 explicit GetClassInToObjectArray(mirror::ObjectArray<mirror::Class>* arr)
2401 : arr_(arr), index_(0) {}
2402
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01002403 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002404 ++index_;
2405 if (index_ <= arr_->GetLength()) {
2406 arr_->Set(index_ - 1, klass);
2407 return true;
2408 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002409 return false;
2410 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002411
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002412 bool Succeeded() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002413 return index_ <= arr_->GetLength();
2414 }
2415
2416 private:
2417 mirror::ObjectArray<mirror::Class>* const arr_;
2418 int32_t index_;
2419};
2420
2421void ClassLinker::VisitClassesWithoutClassesLock(ClassVisitor* visitor) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002422 // TODO: it may be possible to avoid secondary storage if we iterate over dex caches. The problem
2423 // is avoiding duplicates.
2424 if (!kMovingClasses) {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002425 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002426 GetClassesInToVector accumulator;
2427 VisitClasses(&accumulator);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002428 for (ObjPtr<mirror::Class> klass : accumulator.classes_) {
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002429 if (!visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002430 return;
2431 }
2432 }
2433 } else {
Mathieu Chartier268764d2016-09-13 12:09:38 -07002434 Thread* const self = Thread::Current();
Ian Rogersdbf3be02014-08-29 15:40:08 -07002435 StackHandleScope<1> hs(self);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002436 auto classes = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002437 // We size the array assuming classes won't be added to the class table during the visit.
2438 // If this assumption fails we iterate again.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002439 while (true) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002440 size_t class_table_size;
2441 {
Ian Rogers7b078e82014-09-10 14:44:24 -07002442 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002443 // Add 100 in case new classes get loaded when we are filling in the object array.
2444 class_table_size = NumZygoteClasses() + NumNonZygoteClasses() + 100;
Ian Rogersdbf3be02014-08-29 15:40:08 -07002445 }
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002446 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Ian Rogersdbf3be02014-08-29 15:40:08 -07002447 classes.Assign(
2448 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, class_table_size));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002449 CHECK(classes != nullptr); // OOME.
Mathieu Chartiere0671ce2015-07-28 17:23:28 -07002450 GetClassInToObjectArray accumulator(classes.Get());
2451 VisitClasses(&accumulator);
2452 if (accumulator.Succeeded()) {
2453 break;
2454 }
Ian Rogersdbf3be02014-08-29 15:40:08 -07002455 }
2456 for (int32_t i = 0; i < classes->GetLength(); ++i) {
2457 // If the class table shrank during creation of the clases array we expect null elements. If
2458 // the class table grew then the loop repeats. If classes are created after the loop has
2459 // finished then we don't visit.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002460 ObjPtr<mirror::Class> klass = classes->Get(i);
Mathieu Chartier1aa8ec22016-02-01 10:34:47 -08002461 if (klass != nullptr && !visitor->operator()(klass)) {
Ian Rogersdbf3be02014-08-29 15:40:08 -07002462 return;
2463 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -07002464 }
2465 }
2466}
2467
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002468ClassLinker::~ClassLinker() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002469 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07002470 for (const ClassLoaderData& data : class_loaders_) {
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002471 // CHA unloading analysis is not needed. No negative consequences are expected because
2472 // all the classloaders are deleted at the same time.
Andreas Gampe98ea9d92018-10-19 14:06:15 -07002473 DeleteClassLoader(self, data, /*cleanup_cha=*/ false);
Mathieu Chartier6b069532015-08-05 15:08:12 -07002474 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07002475 class_loaders_.clear();
Vladimir Markobf121912019-06-04 13:49:05 +01002476 while (!running_visibly_initialized_callbacks_.empty()) {
2477 std::unique_ptr<VisiblyInitializedCallback> callback(
2478 std::addressof(running_visibly_initialized_callbacks_.front()));
2479 running_visibly_initialized_callbacks_.pop_front();
2480 }
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07002481}
2482
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002483void ClassLinker::DeleteClassLoader(Thread* self, const ClassLoaderData& data, bool cleanup_cha) {
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002484 Runtime* const runtime = Runtime::Current();
2485 JavaVMExt* const vm = runtime->GetJavaVM();
2486 vm->DeleteWeakGlobalRef(self, data.weak_root);
Calin Juravlee5de54c2016-04-20 14:22:09 +01002487 // Notify the JIT that we need to remove the methods and/or profiling info.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002488 if (runtime->GetJit() != nullptr) {
2489 jit::JitCodeCache* code_cache = runtime->GetJit()->GetCodeCache();
2490 if (code_cache != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002491 // For the JIT case, RemoveMethodsIn removes the CHA dependencies.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002492 code_cache->RemoveMethodsIn(self, *data.allocator);
2493 }
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07002494 } else if (cha_ != nullptr) {
Mathieu Chartiercf79cf52017-07-21 11:17:57 -07002495 // If we don't have a JIT, we need to manually remove the CHA dependencies manually.
Vladimir Marko8e94a6f2022-12-13 16:46:22 +00002496 cha_->RemoveDependenciesForLinearAlloc(self, data.allocator);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002497 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002498 // Cleanup references to single implementation ArtMethods that will be deleted.
2499 if (cleanup_cha) {
2500 CHAOnDeleteUpdateClassVisitor visitor(data.allocator);
Vladimir Marko5f958f62022-02-08 12:01:07 +00002501 data.class_table->Visit<kWithoutReadBarrier>(visitor);
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002502 }
Vladimir Marko86c87522020-05-11 16:55:55 +01002503 {
2504 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
2505 auto end = critical_native_code_with_clinit_check_.end();
2506 for (auto it = critical_native_code_with_clinit_check_.begin(); it != end; ) {
2507 if (data.allocator->ContainsUnsafe(it->first)) {
2508 it = critical_native_code_with_clinit_check_.erase(it);
2509 } else {
2510 ++it;
2511 }
2512 }
2513 }
Alexey Grebenkinbe4c2bd2018-02-01 19:09:59 +03002514
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +01002515 delete data.allocator;
2516 delete data.class_table;
2517}
2518
Vladimir Markobcf17522018-06-01 13:14:32 +01002519ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t length) {
2520 return ObjPtr<mirror::PointerArray>::DownCast(
Andreas Gampe542451c2016-07-26 09:02:02 -07002521 image_pointer_size_ == PointerSize::k64
Vladimir Markobcf17522018-06-01 13:14:32 +01002522 ? ObjPtr<mirror::Array>(mirror::LongArray::Alloc(self, length))
2523 : ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002524}
2525
David Srbecky86d6cd52020-12-02 18:13:10 +00002526ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002527 StackHandleScope<1> hs(self);
Mathieu Chartier28bd2e42016-10-04 13:54:57 -07002528 auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002529 GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
Andreas Gampefa4333d2017-02-14 11:10:34 -08002530 if (dex_cache == nullptr) {
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002531 self->AssertPendingOOMException();
2532 return nullptr;
2533 }
Vladimir Marko31c3daa2019-06-13 12:18:37 +01002534 // Use InternWeak() so that the location String can be collected when the ClassLoader
2535 // with this DexCache is collected.
2536 ObjPtr<mirror::String> location = intern_table_->InternWeak(dex_file.GetLocation().c_str());
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002537 if (location == nullptr) {
2538 self->AssertPendingOOMException();
2539 return nullptr;
2540 }
David Srbecky86d6cd52020-12-02 18:13:10 +00002541 dex_cache->SetLocation(location);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002542 return dex_cache.Get();
Brian Carlstroma0808032011-07-18 00:39:23 -07002543}
2544
David Srbecky33df0e32021-09-30 14:36:32 +00002545ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(
2546 Thread* self, const DexFile& dex_file, ObjPtr<mirror::ClassLoader> class_loader) {
2547 StackHandleScope<1> hs(self);
2548 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
David Srbecky86d6cd52020-12-02 18:13:10 +00002549 ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002550 if (dex_cache != nullptr) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08002551 WriterMutexLock mu(self, *Locks::dex_lock_);
David Srbecky33df0e32021-09-30 14:36:32 +00002552 dex_cache->Initialize(&dex_file, h_class_loader.Get());
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002553 }
Vladimir Markobcf17522018-06-01 13:14:32 +01002554 return dex_cache;
Mathieu Chartier6c60d842016-09-15 10:24:43 -07002555}
2556
Vladimir Marko70e2a762019-07-12 16:49:00 +01002557template <bool kMovable, typename PreFenceVisitor>
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002558ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2559 ObjPtr<mirror::Class> java_lang_Class,
Vladimir Marko70e2a762019-07-12 16:49:00 +01002560 uint32_t class_size,
2561 const PreFenceVisitor& pre_fence_visitor) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08002562 DCHECK_GE(class_size, sizeof(mirror::Class));
Ian Rogers1d54e732013-05-02 21:10:01 -07002563 gc::Heap* heap = Runtime::Current()->GetHeap();
Roland Levillain0e840272018-08-23 19:55:30 +01002564 ObjPtr<mirror::Object> k = (kMovingClasses && kMovable) ?
Vladimir Marko70e2a762019-07-12 16:49:00 +01002565 heap->AllocObject(self, java_lang_Class, class_size, pre_fence_visitor) :
2566 heap->AllocNonMovableObject(self, java_lang_Class, class_size, pre_fence_visitor);
Ian Rogers6fac4472014-02-25 17:01:10 -08002567 if (UNLIKELY(k == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002568 self->AssertPendingOOMException();
Ian Rogers6fac4472014-02-25 17:01:10 -08002569 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07002570 }
Ian Rogers6fac4472014-02-25 17:01:10 -08002571 return k->AsClass();
Brian Carlstrom75cb3b42011-07-28 02:13:36 -07002572}
2573
Vladimir Marko70e2a762019-07-12 16:49:00 +01002574template <bool kMovable>
2575ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self,
2576 ObjPtr<mirror::Class> java_lang_Class,
2577 uint32_t class_size) {
2578 mirror::Class::InitializeClassVisitor visitor(class_size);
2579 return AllocClass<kMovable>(self, java_lang_Class, class_size, visitor);
2580}
2581
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002582ObjPtr<mirror::Class> ClassLinker::AllocClass(Thread* self, uint32_t class_size) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002583 return AllocClass(self, GetClassRoot<mirror::Class>(this), class_size);
Brian Carlstroma0808032011-07-18 00:39:23 -07002584}
2585
Vladimir Marko70e2a762019-07-12 16:49:00 +01002586void ClassLinker::AllocPrimitiveArrayClass(Thread* self,
2587 ClassRoot primitive_root,
2588 ClassRoot array_root) {
Roland Levillain0e840272018-08-23 19:55:30 +01002589 // We make this class non-movable for the unlikely case where it were to be
2590 // moved by a sticky-bit (minor) collection when using the Generational
2591 // Concurrent Copying (CC) collector, potentially creating a stale reference
2592 // in the `klass_` field of one of its instances allocated in the Large-Object
2593 // Space (LOS) -- see the comment about the dirty card scanning logic in
2594 // art::gc::collector::ConcurrentCopying::MarkingPhase.
Vladimir Marko70e2a762019-07-12 16:49:00 +01002595 ObjPtr<mirror::Class> array_class = AllocClass</* kMovable= */ false>(
2596 self, GetClassRoot<mirror::Class>(this), mirror::Array::ClassSize(image_pointer_size_));
2597 ObjPtr<mirror::Class> component_type = GetClassRoot(primitive_root, this);
2598 DCHECK(component_type->IsPrimitive());
2599 array_class->SetComponentType(component_type);
2600 SetClassRoot(array_root, array_class);
2601}
2602
2603void ClassLinker::FinishArrayClassSetup(ObjPtr<mirror::Class> array_class) {
2604 ObjPtr<mirror::Class> java_lang_Object = GetClassRoot<mirror::Object>(this);
2605 array_class->SetSuperClass(java_lang_Object);
2606 array_class->SetVTable(java_lang_Object->GetVTable());
2607 array_class->SetPrimitiveType(Primitive::kPrimNot);
2608 ObjPtr<mirror::Class> component_type = array_class->GetComponentType();
2609 array_class->SetClassFlags(component_type->IsPrimitive()
2610 ? mirror::kClassFlagNoReferenceFields
2611 : mirror::kClassFlagObjectArray);
2612 array_class->SetClassLoader(component_type->GetClassLoader());
2613 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kLoaded);
2614 array_class->PopulateEmbeddedVTable(image_pointer_size_);
2615 ImTable* object_imt = java_lang_Object->GetImt(image_pointer_size_);
2616 array_class->SetImt(object_imt, image_pointer_size_);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002617 DCHECK_EQ(array_class->NumMethods(), 0u);
2618
2619 // don't need to set new_class->SetObjectSize(..)
2620 // because Object::SizeOf delegates to Array::SizeOf
2621
2622 // All arrays have java/lang/Cloneable and java/io/Serializable as
2623 // interfaces. We need to set that up here, so that stuff like
2624 // "instanceof" works right.
2625
2626 // Use the single, global copies of "interfaces" and "iftable"
2627 // (remember not to free them for arrays).
2628 {
2629 ObjPtr<mirror::IfTable> array_iftable = GetArrayIfTable();
2630 CHECK(array_iftable != nullptr);
2631 array_class->SetIfTable(array_iftable);
2632 }
2633
2634 // Inherit access flags from the component type.
2635 int access_flags = component_type->GetAccessFlags();
2636 // Lose any implementation detail flags; in particular, arrays aren't finalizable.
2637 access_flags &= kAccJavaFlagsMask;
2638 // Arrays can't be used as a superclass or interface, so we want to add "abstract final"
2639 // and remove "interface".
2640 access_flags |= kAccAbstract | kAccFinal;
2641 access_flags &= ~kAccInterface;
Vladimir Marko70e2a762019-07-12 16:49:00 +01002642
Vladimir Markob68bb7a2020-03-17 10:55:25 +00002643 array_class->SetAccessFlagsDuringLinking(access_flags);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002644
Vladimir Markobf121912019-06-04 13:49:05 +01002645 // Array classes are fully initialized either during single threaded startup,
2646 // or from a pre-fence visitor, so visibly initialized.
2647 array_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Vladimir Marko70e2a762019-07-12 16:49:00 +01002648}
2649
2650void ClassLinker::FinishCoreArrayClassSetup(ClassRoot array_root) {
2651 // Do not hold lock on the array class object, the initialization of
2652 // core array classes is done while the process is still single threaded.
2653 ObjPtr<mirror::Class> array_class = GetClassRoot(array_root, this);
2654 FinishArrayClassSetup(array_class);
2655
2656 std::string temp;
2657 const char* descriptor = array_class->GetDescriptor(&temp);
2658 size_t hash = ComputeModifiedUtf8Hash(descriptor);
2659 ObjPtr<mirror::Class> existing = InsertClass(descriptor, array_class, hash);
2660 CHECK(existing == nullptr);
Roland Levillain0e840272018-08-23 19:55:30 +01002661}
2662
Vladimir Markobcf17522018-06-01 13:14:32 +01002663ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> ClassLinker::AllocStackTraceElementArray(
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07002664 Thread* self,
2665 size_t length) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07002666 return mirror::ObjectArray<mirror::StackTraceElement>::Alloc(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01002667 self, GetClassRoot<mirror::ObjectArray<mirror::StackTraceElement>>(this), length);
Shih-wei Liao55df06b2011-08-26 14:39:27 -07002668}
2669
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002670ObjPtr<mirror::Class> ClassLinker::EnsureResolved(Thread* self,
2671 const char* descriptor,
2672 ObjPtr<mirror::Class> klass) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002673 DCHECK(klass != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002674 if (kIsDebugBuild) {
2675 StackHandleScope<1> hs(self);
2676 HandleWrapperObjPtr<mirror::Class> h = hs.NewHandleWrapper(&klass);
2677 Thread::PoisonObjectPointersIfDebug();
2678 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002679
2680 // For temporary classes we must wait for them to be retired.
2681 if (init_done_ && klass->IsTemp()) {
2682 CHECK(!klass->IsResolved());
Vladimir Marko72ab6842017-01-20 19:32:50 +00002683 if (klass->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002684 ThrowEarlierClassFailure(klass);
2685 return nullptr;
2686 }
2687 StackHandleScope<1> hs(self);
2688 Handle<mirror::Class> h_class(hs.NewHandle(klass));
2689 ObjectLock<mirror::Class> lock(self, h_class);
2690 // Loop and wait for the resolving thread to retire this class.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002691 while (!h_class->IsRetired() && !h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002692 lock.WaitIgnoringInterrupts();
2693 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00002694 if (h_class->IsErroneousUnresolved()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002695 ThrowEarlierClassFailure(h_class.Get());
2696 return nullptr;
2697 }
2698 CHECK(h_class->IsRetired());
2699 // Get the updated class from class table.
Andreas Gampe34ee6842014-12-02 15:43:52 -08002700 klass = LookupClass(self, descriptor, h_class.Get()->GetClassLoader());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002701 }
2702
Brian Carlstromaded5f72011-10-07 17:15:04 -07002703 // Wait for the class if it has not already been linked.
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002704 size_t index = 0;
2705 // Maximum number of yield iterations until we start sleeping.
2706 static const size_t kNumYieldIterations = 1000;
2707 // How long each sleep is in us.
2708 static const size_t kSleepDurationUS = 1000; // 1 ms.
Vladimir Marko72ab6842017-01-20 19:32:50 +00002709 while (!klass->IsResolved() && !klass->IsErroneousUnresolved()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002710 StackHandleScope<1> hs(self);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07002711 HandleWrapperObjPtr<mirror::Class> h_class(hs.NewHandleWrapper(&klass));
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002712 {
2713 ObjectTryLock<mirror::Class> lock(self, h_class);
2714 // Can not use a monitor wait here since it may block when returning and deadlock if another
2715 // thread has locked klass.
2716 if (lock.Acquired()) {
2717 // Check for circular dependencies between classes, the lock is required for SetStatus.
2718 if (!h_class->IsResolved() && h_class->GetClinitThreadId() == self->GetTid()) {
2719 ThrowClassCircularityError(h_class.Get());
Vladimir Marko2c64a832018-01-04 11:31:56 +00002720 mirror::Class::SetStatus(h_class, ClassStatus::kErrorUnresolved, self);
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002721 return nullptr;
2722 }
2723 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002724 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002725 {
2726 // Handle wrapper deals with klass moving.
Vladimir Markoddf4fd32021-11-22 16:31:57 +00002727 ScopedThreadSuspension sts(self, ThreadState::kSuspended);
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002728 if (index < kNumYieldIterations) {
2729 sched_yield();
2730 } else {
2731 usleep(kSleepDurationUS);
2732 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002733 }
Mathieu Chartier4b0ef1c2016-07-29 16:26:01 -07002734 ++index;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002735 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002736
Vladimir Marko72ab6842017-01-20 19:32:50 +00002737 if (klass->IsErroneousUnresolved()) {
Elliott Hughes4a2b4172011-09-20 17:08:25 -07002738 ThrowEarlierClassFailure(klass);
Mathieu Chartierc528dba2013-11-26 12:00:11 -08002739 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07002740 }
2741 // Return the loaded class. No exceptions should be pending.
David Sehr709b0702016-10-13 09:12:37 -07002742 CHECK(klass->IsResolved()) << klass->PrettyClass();
Ian Rogers62d6c772013-02-27 08:32:07 -08002743 self->AssertNoPendingException();
Vladimir Markobcf17522018-06-01 13:14:32 +01002744 return klass;
Brian Carlstromaded5f72011-10-07 17:15:04 -07002745}
2746
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002747using ClassPathEntry = std::pair<const DexFile*, const dex::ClassDef*>;
Ian Rogers68b56852014-08-29 20:19:11 -07002748
2749// Search a collection of DexFiles for a descriptor
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002750ClassPathEntry FindInClassPath(const char* descriptor,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07002751 size_t hash, const std::vector<const DexFile*>& class_path) {
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002752 for (const DexFile* dex_file : class_path) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08002753 DCHECK(dex_file != nullptr);
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08002754 const dex::ClassDef* dex_class_def = OatDexFile::FindClassDef(*dex_file, descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07002755 if (dex_class_def != nullptr) {
Ian Rogers68b56852014-08-29 20:19:11 -07002756 return ClassPathEntry(dex_file, dex_class_def);
2757 }
2758 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08002759 return ClassPathEntry(nullptr, nullptr);
Ian Rogers68b56852014-08-29 20:19:11 -07002760}
2761
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002762// Helper macro to make sure each class loader lookup call handles the case the
2763// class loader is not recognized, or the lookup threw an exception.
2764#define RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(call_, result_, thread_) \
2765do { \
2766 auto local_call = call_; \
2767 if (!local_call) { \
2768 return false; \
2769 } \
2770 auto local_result = result_; \
2771 if (local_result != nullptr) { \
2772 return true; \
2773 } \
2774 auto local_thread = thread_; \
2775 if (local_thread->IsExceptionPending()) { \
2776 /* Pending exception means there was an error other than */ \
2777 /* ClassNotFound that must be returned to the caller. */ \
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00002778 return false; \
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002779 } \
2780} while (0)
2781
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002782bool ClassLinker::FindClassInSharedLibraries(Thread* self,
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002783 const char* descriptor,
2784 size_t hash,
2785 Handle<mirror::ClassLoader> class_loader,
2786 /*out*/ ObjPtr<mirror::Class>* result) {
Vladimír Marko5be5ce72022-10-18 09:49:00 +00002787 ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002788 return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
Brad Stenning9c924e82021-10-11 19:09:00 -07002789}
2790
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002791bool ClassLinker::FindClassInSharedLibrariesHelper(Thread* self,
Brad Stenning9c924e82021-10-11 19:09:00 -07002792 const char* descriptor,
2793 size_t hash,
2794 Handle<mirror::ClassLoader> class_loader,
2795 ArtField* field,
2796 /*out*/ ObjPtr<mirror::Class>* result) {
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002797 ObjPtr<mirror::Object> raw_shared_libraries = field->GetObject(class_loader.Get());
2798 if (raw_shared_libraries == nullptr) {
2799 return true;
2800 }
2801
2802 StackHandleScope<2> hs(self);
2803 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries(
2804 hs.NewHandle(raw_shared_libraries->AsObjectArray<mirror::ClassLoader>()));
2805 MutableHandle<mirror::ClassLoader> temp_loader = hs.NewHandle<mirror::ClassLoader>(nullptr);
Alex Lighta9bbc082019-11-14 14:51:41 -08002806 for (auto loader : shared_libraries.Iterate<mirror::ClassLoader>()) {
2807 temp_loader.Assign(loader);
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002808 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002809 FindClassInBaseDexClassLoader(self, descriptor, hash, temp_loader, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002810 *result,
2811 self);
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002812 }
2813 return true;
2814}
2815
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002816bool ClassLinker::FindClassInSharedLibrariesAfter(Thread* self,
Brad Stenning9c924e82021-10-11 19:09:00 -07002817 const char* descriptor,
2818 size_t hash,
2819 Handle<mirror::ClassLoader> class_loader,
2820 /*out*/ ObjPtr<mirror::Class>* result) {
Vladimír Marko5be5ce72022-10-18 09:49:00 +00002821 ArtField* field = WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002822 return FindClassInSharedLibrariesHelper(self, descriptor, hash, class_loader, field, result);
Brad Stenning9c924e82021-10-11 19:09:00 -07002823}
2824
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002825bool ClassLinker::FindClassInBaseDexClassLoader(Thread* self,
Nicolas Geoffray7d8d8ff2016-11-02 12:38:05 +00002826 const char* descriptor,
2827 size_t hash,
2828 Handle<mirror::ClassLoader> class_loader,
Vladimir Markobcf17522018-06-01 13:14:32 +01002829 /*out*/ ObjPtr<mirror::Class>* result) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002830 // Termination case: boot class loader.
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00002831 if (IsBootClassLoader(class_loader.Get())) {
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002832 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
2833 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
Andreas Gampef865ea92015-04-13 22:14:19 -07002834 return true;
2835 }
2836
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002837 if (IsPathOrDexClassLoader(class_loader) || IsInMemoryDexClassLoader(class_loader)) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002838 // For regular path or dex class loader the search order is:
2839 // - parent
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002840 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002841 // - class loader dex files
Andreas Gampef865ea92015-04-13 22:14:19 -07002842
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002843 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
Calin Juravlecdd49122017-07-05 20:09:53 -07002844 StackHandleScope<1> hs(self);
2845 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002846 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002847 FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002848 *result,
2849 self);
2850 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002851 FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002852 *result,
2853 self);
2854 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002855 FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002856 *result,
2857 self);
Brad Stenning9c924e82021-10-11 19:09:00 -07002858 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002859 FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
Brad Stenning9c924e82021-10-11 19:09:00 -07002860 *result,
2861 self);
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002862 // We did not find a class, but the class loader chain was recognized, so we
2863 // return true.
2864 return true;
Andreas Gampef865ea92015-04-13 22:14:19 -07002865 }
2866
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002867 if (IsDelegateLastClassLoader(class_loader)) {
Calin Juravlecdd49122017-07-05 20:09:53 -07002868 // For delegate last, the search order is:
2869 // - boot class path
Nicolas Geoffray80a560c2018-10-26 13:48:51 +01002870 // - shared libraries
Calin Juravlecdd49122017-07-05 20:09:53 -07002871 // - class loader dex files
2872 // - parent
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002873 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00002874 FindClassInBootClassLoaderClassPath(self, descriptor, hash, result), *result, self);
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002875 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002876 FindClassInSharedLibraries(self, descriptor, hash, class_loader, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002877 *result,
2878 self);
2879 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002880 FindClassInBaseDexClassLoaderClassPath(self, descriptor, hash, class_loader, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002881 *result,
2882 self);
Brad Stenning9c924e82021-10-11 19:09:00 -07002883 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002884 FindClassInSharedLibrariesAfter(self, descriptor, hash, class_loader, result),
Brad Stenning9c924e82021-10-11 19:09:00 -07002885 *result,
2886 self);
Calin Juravlecdd49122017-07-05 20:09:53 -07002887
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002888 // Create a handle as RegisterDexFile may allocate dex caches (and cause thread suspension).
Calin Juravlecdd49122017-07-05 20:09:53 -07002889 StackHandleScope<1> hs(self);
2890 Handle<mirror::ClassLoader> h_parent(hs.NewHandle(class_loader->GetParent()));
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002891 RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002892 FindClassInBaseDexClassLoader(self, descriptor, hash, h_parent, result),
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002893 *result,
2894 self);
2895 // We did not find a class, but the class loader chain was recognized, so we
2896 // return true.
2897 return true;
Calin Juravlecdd49122017-07-05 20:09:53 -07002898 }
2899
2900 // Unsupported class loader.
2901 *result = nullptr;
2902 return false;
Calin Juravle415dc3d2017-06-28 11:03:12 -07002903}
2904
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002905#undef RETURN_IF_UNRECOGNIZED_OR_FOUND_OR_EXCEPTION
2906
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00002907namespace {
2908
2909// Matches exceptions caught in DexFile.defineClass.
2910ALWAYS_INLINE bool MatchesDexFileCaughtExceptions(ObjPtr<mirror::Throwable> throwable,
2911 ClassLinker* class_linker)
2912 REQUIRES_SHARED(Locks::mutator_lock_) {
2913 return
2914 // ClassNotFoundException.
2915 throwable->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException,
2916 class_linker))
2917 ||
2918 // NoClassDefFoundError. TODO: Reconsider this. b/130746382.
2919 throwable->InstanceOf(Runtime::Current()->GetPreAllocatedNoClassDefFoundError()->GetClass());
2920}
2921
2922// Clear exceptions caught in DexFile.defineClass.
2923ALWAYS_INLINE void FilterDexFileCaughtExceptions(Thread* self, ClassLinker* class_linker)
2924 REQUIRES_SHARED(Locks::mutator_lock_) {
2925 if (MatchesDexFileCaughtExceptions(self->GetException(), class_linker)) {
2926 self->ClearException();
2927 }
2928}
2929
2930} // namespace
2931
Calin Juravle415dc3d2017-06-28 11:03:12 -07002932// Finds the class in the boot class loader.
2933// If the class is found the method returns the resolved class. Otherwise it returns null.
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002934bool ClassLinker::FindClassInBootClassLoaderClassPath(Thread* self,
2935 const char* descriptor,
2936 size_t hash,
2937 /*out*/ ObjPtr<mirror::Class>* result) {
Calin Juravle415dc3d2017-06-28 11:03:12 -07002938 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
2939 if (pair.second != nullptr) {
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00002940 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, nullptr);
2941 if (klass != nullptr) {
2942 *result = EnsureResolved(self, descriptor, klass);
2943 } else {
2944 *result = DefineClass(self,
2945 descriptor,
2946 hash,
2947 ScopedNullHandle<mirror::ClassLoader>(),
2948 *pair.first,
2949 *pair.second);
2950 }
2951 if (*result == nullptr) {
2952 CHECK(self->IsExceptionPending()) << descriptor;
2953 FilterDexFileCaughtExceptions(self, this);
2954 }
Andreas Gampef865ea92015-04-13 22:14:19 -07002955 }
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002956 // The boot classloader is always a known lookup.
2957 return true;
Calin Juravle415dc3d2017-06-28 11:03:12 -07002958}
Andreas Gampef865ea92015-04-13 22:14:19 -07002959
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002960bool ClassLinker::FindClassInBaseDexClassLoaderClassPath(
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002961 Thread* self,
Calin Juravle415dc3d2017-06-28 11:03:12 -07002962 const char* descriptor,
2963 size_t hash,
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002964 Handle<mirror::ClassLoader> class_loader,
2965 /*out*/ ObjPtr<mirror::Class>* result) {
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002966 DCHECK(IsPathOrDexClassLoader(class_loader) ||
2967 IsInMemoryDexClassLoader(class_loader) ||
2968 IsDelegateLastClassLoader(class_loader))
Calin Juravle415dc3d2017-06-28 11:03:12 -07002969 << "Unexpected class loader for descriptor " << descriptor;
Andreas Gampef865ea92015-04-13 22:14:19 -07002970
Vladimir Marko68c07582021-04-19 16:01:15 +00002971 const DexFile* dex_file = nullptr;
2972 const dex::ClassDef* class_def = nullptr;
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002973 ObjPtr<mirror::Class> ret;
Vladimir Marko68c07582021-04-19 16:01:15 +00002974 auto find_class_def = [&](const DexFile* cp_dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
2975 const dex::ClassDef* cp_class_def = OatDexFile::FindClassDef(*cp_dex_file, descriptor, hash);
2976 if (cp_class_def != nullptr) {
2977 dex_file = cp_dex_file;
2978 class_def = cp_class_def;
2979 return false; // Found a class definition, stop visit.
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002980 }
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002981 return true; // Continue with the next DexFile.
2982 };
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002983 VisitClassLoaderDexFiles(self, class_loader, find_class_def);
Andreas Gampeb8e7c372018-02-20 18:24:55 -08002984
Vladimir Marko68c07582021-04-19 16:01:15 +00002985 if (class_def != nullptr) {
Vladimir Marko4e0b5d72022-11-09 10:58:15 +00002986 *result = DefineClass(self, descriptor, hash, class_loader, *dex_file, *class_def);
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00002987 if (UNLIKELY(*result == nullptr)) {
2988 CHECK(self->IsExceptionPending()) << descriptor;
2989 FilterDexFileCaughtExceptions(self, this);
2990 } else {
2991 DCHECK(!self->IsExceptionPending());
2992 }
Vladimir Marko68c07582021-04-19 16:01:15 +00002993 }
Nicolas Geoffraye8445e52021-09-23 14:10:05 +01002994 // A BaseDexClassLoader is always a known lookup.
2995 return true;
Mathieu Chartierab0ed822014-09-11 14:21:41 -07002996}
2997
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01002998ObjPtr<mirror::Class> ClassLinker::FindClass(Thread* self,
2999 const char* descriptor,
3000 Handle<mirror::ClassLoader> class_loader) {
Elliott Hughesba8eee12012-01-24 20:25:24 -08003001 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
Ian Rogers98379392014-02-24 16:53:16 -08003002 DCHECK(self != nullptr);
Ian Rogers00f7d0e2012-07-19 15:28:27 -07003003 self->AssertNoPendingException();
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07003004 self->PoisonObjectPointers(); // For DefineClass, CreateArrayClass, etc...
Elliott Hughesc3b77c72011-12-15 20:56:48 -08003005 if (descriptor[1] == '\0') {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003006 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
3007 // for primitive classes that aren't backed by dex files.
3008 return FindPrimitiveClass(descriptor[0]);
3009 }
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08003010 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003011 // Find the class in the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003012 ObjPtr<mirror::Class> klass = LookupClass(self, descriptor, hash, class_loader.Get());
Ian Rogers68b56852014-08-29 20:19:11 -07003013 if (klass != nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003014 return EnsureResolved(self, descriptor, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003015 }
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003016 // Class is not yet loaded.
Andreas Gampefa4333d2017-02-14 11:10:34 -08003017 if (descriptor[0] != '[' && class_loader == nullptr) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003018 // Non-array class and the boot class loader, search the boot class path.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08003019 ClassPathEntry pair = FindInClassPath(descriptor, hash, boot_class_path_);
Ian Rogers68b56852014-08-29 20:19:11 -07003020 if (pair.second != nullptr) {
Mathieu Chartier9865bde2015-12-21 09:58:16 -08003021 return DefineClass(self,
3022 descriptor,
3023 hash,
3024 ScopedNullHandle<mirror::ClassLoader>(),
3025 *pair.first,
Ian Rogers7b078e82014-09-10 14:44:24 -07003026 *pair.second);
Ian Rogers63557452014-06-04 16:57:15 -07003027 } else {
3028 // The boot class loader is searched ahead of the application class loader, failures are
3029 // expected and will be wrapped in a ClassNotFoundException. Use the pre-allocated error to
3030 // trigger the chaining with a proper stack trace.
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003031 ObjPtr<mirror::Throwable> pre_allocated =
3032 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
Nicolas Geoffray14691c52015-03-05 10:40:17 +00003033 self->SetException(pre_allocated);
Ian Rogers63557452014-06-04 16:57:15 -07003034 return nullptr;
Jesse Wilson47daf872011-11-23 11:42:45 -05003035 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003036 }
3037 ObjPtr<mirror::Class> result_ptr;
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003038 bool descriptor_equals;
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003039 if (descriptor[0] == '[') {
3040 result_ptr = CreateArrayClass(self, descriptor, hash, class_loader);
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003041 DCHECK_EQ(result_ptr == nullptr, self->IsExceptionPending());
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003042 DCHECK(result_ptr == nullptr || result_ptr->DescriptorEquals(descriptor));
3043 descriptor_equals = true;
Jesse Wilson47daf872011-11-23 11:42:45 -05003044 } else {
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003045 ScopedObjectAccessUnchecked soa(self);
3046 bool known_hierarchy =
3047 FindClassInBaseDexClassLoader(self, descriptor, hash, class_loader, &result_ptr);
3048 if (result_ptr != nullptr) {
3049 // The chain was understood and we found the class. We still need to add the class to
3050 // the class table to protect from racy programs that can try and redefine the path list
3051 // which would change the Class<?> returned for subsequent evaluation of const-class.
3052 DCHECK(known_hierarchy);
3053 DCHECK(result_ptr->DescriptorEquals(descriptor));
3054 descriptor_equals = true;
3055 } else if (!self->IsExceptionPending()) {
3056 // Either the chain wasn't understood or the class wasn't found.
3057 // If there is a pending exception we didn't clear, it is a not a ClassNotFoundException and
3058 // we should return it instead of silently clearing and retrying.
3059 //
3060 // If the chain was understood but we did not find the class, let the Java-side
3061 // rediscover all this and throw the exception with the right stack trace. Note that
3062 // the Java-side could still succeed for racy programs if another thread is actively
3063 // modifying the class loader's path list.
3064
3065 // The runtime is not allowed to call into java from a runtime-thread so just abort.
3066 if (self->IsRuntimeThread()) {
3067 // Oops, we can't call into java so we can't run actual class-loader code.
3068 // This is true for e.g. for the compiler (jit or aot).
3069 ObjPtr<mirror::Throwable> pre_allocated =
3070 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3071 self->SetException(pre_allocated);
3072 return nullptr;
3073 }
3074
3075 // Inlined DescriptorToDot(descriptor) with extra validation.
3076 //
3077 // Throw NoClassDefFoundError early rather than potentially load a class only to fail
3078 // the DescriptorEquals() check below and give a confusing error message. For example,
3079 // when native code erroneously calls JNI GetFieldId() with signature "java/lang/String"
3080 // instead of "Ljava/lang/String;", the message below using the "dot" names would be
3081 // "class loader [...] returned class java.lang.String instead of java.lang.String".
3082 size_t descriptor_length = strlen(descriptor);
3083 if (UNLIKELY(descriptor[0] != 'L') ||
3084 UNLIKELY(descriptor[descriptor_length - 1] != ';') ||
3085 UNLIKELY(memchr(descriptor + 1, '.', descriptor_length - 2) != nullptr)) {
3086 ThrowNoClassDefFoundError("Invalid descriptor: %s.", descriptor);
3087 return nullptr;
3088 }
3089
3090 std::string class_name_string(descriptor + 1, descriptor_length - 2);
3091 std::replace(class_name_string.begin(), class_name_string.end(), '/', '.');
3092 if (known_hierarchy &&
3093 fast_class_not_found_exceptions_ &&
3094 !Runtime::Current()->IsJavaDebuggable()) {
3095 // For known hierarchy, we know that the class is going to throw an exception. If we aren't
3096 // debuggable, optimize this path by throwing directly here without going back to Java
3097 // language. This reduces how many ClassNotFoundExceptions happen.
3098 self->ThrowNewExceptionF("Ljava/lang/ClassNotFoundException;",
3099 "%s",
3100 class_name_string.c_str());
3101 } else {
Vladimir Markob6f965d2022-11-23 14:24:45 +00003102 StackHandleScope<1u> hs(self);
3103 Handle<mirror::String> class_name_object = hs.NewHandle(
3104 mirror::String::AllocFromModifiedUtf8(self, class_name_string.c_str()));
3105 if (class_name_object == nullptr) {
3106 DCHECK(self->IsExceptionPending()); // OOME.
3107 return nullptr;
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003108 }
Vladimir Markob6f965d2022-11-23 14:24:45 +00003109 DCHECK(class_loader != nullptr);
3110 result_ptr = ObjPtr<mirror::Class>::DownCast(
3111 WellKnownClasses::java_lang_ClassLoader_loadClass->InvokeVirtual<'L', 'L'>(
3112 self, class_loader.Get(), class_name_object.Get()));
3113 if (result_ptr == nullptr && !self->IsExceptionPending()) {
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003114 // broken loader - throw NPE to be compatible with Dalvik
3115 ThrowNullPointerException(StringPrintf("ClassLoader.loadClass returned null for %s",
3116 class_name_string.c_str()).c_str());
3117 return nullptr;
3118 }
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00003119 // Check the name of the returned class.
3120 descriptor_equals = (result_ptr != nullptr) && result_ptr->DescriptorEquals(descriptor);
3121 }
3122 } else {
3123 DCHECK(!MatchesDexFileCaughtExceptions(self->GetException(), this));
3124 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003125 }
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003126
3127 if (self->IsExceptionPending()) {
3128 // If the ClassLoader threw or array class allocation failed, pass that exception up.
3129 // However, to comply with the RI behavior, first check if another thread succeeded.
3130 result_ptr = LookupClass(self, descriptor, hash, class_loader.Get());
3131 if (result_ptr != nullptr && !result_ptr->IsErroneous()) {
3132 self->ClearException();
3133 return EnsureResolved(self, descriptor, result_ptr);
3134 }
3135 return nullptr;
3136 }
3137
3138 // Try to insert the class to the class table, checking for mismatch.
3139 ObjPtr<mirror::Class> old;
3140 {
3141 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
3142 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader.Get());
3143 old = class_table->Lookup(descriptor, hash);
3144 if (old == nullptr) {
3145 old = result_ptr; // For the comparison below, after releasing the lock.
3146 if (descriptor_equals) {
Vladimir Markobcf17522018-06-01 13:14:32 +01003147 class_table->InsertWithHash(result_ptr, hash);
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003148 WriteBarrier::ForEveryFieldWrite(class_loader.Get());
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003149 } // else throw below, after releasing the lock.
3150 }
3151 }
3152 if (UNLIKELY(old != result_ptr)) {
3153 // Return `old` (even if `!descriptor_equals`) to mimic the RI behavior for parallel
3154 // capable class loaders. (All class loaders are considered parallel capable on Android.)
Vladimir Markodfc0de72019-04-01 10:57:55 +01003155 ObjPtr<mirror::Class> loader_class = class_loader->GetClass();
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00003156 const char* loader_class_name =
3157 loader_class->GetDexFile().StringByTypeIdx(loader_class->GetDexTypeIndex());
3158 LOG(WARNING) << "Initiating class loader of type " << DescriptorToDot(loader_class_name)
3159 << " is not well-behaved; it returned a different Class for racing loadClass(\""
3160 << DescriptorToDot(descriptor) << "\").";
3161 return EnsureResolved(self, descriptor, old);
3162 }
3163 if (UNLIKELY(!descriptor_equals)) {
3164 std::string result_storage;
3165 const char* result_name = result_ptr->GetDescriptor(&result_storage);
3166 std::string loader_storage;
3167 const char* loader_class_name = class_loader->GetClass()->GetDescriptor(&loader_storage);
3168 ThrowNoClassDefFoundError(
3169 "Initiating class loader of type %s returned class %s instead of %s.",
3170 DescriptorToDot(loader_class_name).c_str(),
3171 DescriptorToDot(result_name).c_str(),
3172 DescriptorToDot(descriptor).c_str());
3173 return nullptr;
3174 }
Vladimir Markobcf17522018-06-01 13:14:32 +01003175 // Success.
3176 return result_ptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07003177}
3178
Alex Light270db1c2019-12-03 12:20:01 +00003179// Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
3180// define-class and how many recursive DefineClasses we are at in order to allow for doing things
3181// like pausing class definition.
3182struct ScopedDefiningClass {
3183 public:
3184 explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
3185 : self_(self), returned_(false) {
3186 Locks::mutator_lock_->AssertSharedHeld(self_);
3187 Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
3188 self_->IncrDefineClassCount();
3189 }
3190 ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
3191 Locks::mutator_lock_->AssertSharedHeld(self_);
3192 CHECK(returned_);
3193 }
3194
3195 ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
3196 REQUIRES_SHARED(Locks::mutator_lock_) {
3197 CHECK(!returned_);
3198 self_->DecrDefineClassCount();
3199 Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
3200 Thread::PoisonObjectPointersIfDebug();
3201 returned_ = true;
3202 return h_klass.Get();
3203 }
3204
3205 ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
3206 REQUIRES_SHARED(Locks::mutator_lock_) {
3207 StackHandleScope<1> hs(self_);
3208 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
3209 return Finish(h_klass);
3210 }
3211
3212 ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
3213 REQUIRES_SHARED(Locks::mutator_lock_) {
3214 ScopedNullHandle<mirror::Class> snh;
3215 return Finish(snh);
3216 }
3217
3218 private:
3219 Thread* self_;
3220 bool returned_;
3221};
3222
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01003223ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
3224 const char* descriptor,
3225 size_t hash,
3226 Handle<mirror::ClassLoader> class_loader,
3227 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003228 const dex::ClassDef& dex_class_def) {
Alex Light270db1c2019-12-03 12:20:01 +00003229 ScopedDefiningClass sdc(self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003230 StackHandleScope<3> hs(self);
Eric Holk74584e62021-02-18 14:39:17 -08003231 metrics::AutoTimer timer{GetMetrics()->ClassLoadingTotalTime()};
Stefano Cianciullif6f5ada2022-10-07 14:27:34 +00003232 metrics::AutoTimer timeDelta{GetMetrics()->ClassLoadingTotalTimeDelta()};
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003233 auto klass = hs.NewHandle<mirror::Class>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003234
Brian Carlstromaded5f72011-10-07 17:15:04 -07003235 // Load the class from the dex file.
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003236 if (UNLIKELY(!init_done_)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003237 // finish up init of hand crafted class_roots_
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003238 if (strcmp(descriptor, "Ljava/lang/Object;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003239 klass.Assign(GetClassRoot<mirror::Object>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003240 } else if (strcmp(descriptor, "Ljava/lang/Class;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003241 klass.Assign(GetClassRoot<mirror::Class>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003242 } else if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003243 klass.Assign(GetClassRoot<mirror::String>(this));
Fred Shih4ee7a662014-07-11 09:59:27 -07003244 } else if (strcmp(descriptor, "Ljava/lang/ref/Reference;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003245 klass.Assign(GetClassRoot<mirror::Reference>(this));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07003246 } else if (strcmp(descriptor, "Ljava/lang/DexCache;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003247 klass.Assign(GetClassRoot<mirror::DexCache>(this));
Alex Lightd6251582016-10-31 11:12:30 -07003248 } else if (strcmp(descriptor, "Ldalvik/system/ClassExt;") == 0) {
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003249 klass.Assign(GetClassRoot<mirror::ClassExt>(this));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003250 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003251 }
3252
Calin Juravle33787682019-07-26 14:27:18 -07003253 // For AOT-compilation of an app, we may use only a public SDK to resolve symbols. If the SDK
3254 // checks are configured (a non null SdkChecker) and the descriptor is not in the provided
3255 // public class path then we prevent the definition of the class.
3256 //
3257 // NOTE that we only do the checks for the boot classpath APIs. Anything else, like the app
3258 // classpath is not checked.
3259 if (class_loader == nullptr &&
3260 Runtime::Current()->IsAotCompiler() &&
3261 DenyAccessBasedOnPublicSdk(descriptor)) {
3262 ObjPtr<mirror::Throwable> pre_allocated =
3263 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3264 self->SetException(pre_allocated);
3265 return sdc.Finish(nullptr);
3266 }
3267
Alex Lighte9f61032018-09-24 16:04:51 -07003268 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
3269 // code to be executed. We put it up here so we can avoid all the allocations associated with
3270 // creating the class. This can happen with (eg) jit threads.
3271 if (!self->CanLoadClasses()) {
3272 // Make sure we don't try to load anything, potentially causing an infinite loop.
3273 ObjPtr<mirror::Throwable> pre_allocated =
3274 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
3275 self->SetException(pre_allocated);
Alex Light270db1c2019-12-03 12:20:01 +00003276 return sdc.Finish(nullptr);
Alex Lighte9f61032018-09-24 16:04:51 -07003277 }
3278
Andreas Gampefa4333d2017-02-14 11:10:34 -08003279 if (klass == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003280 // Allocate a class with the status of not ready.
3281 // Interface object should get the right size here. Regular class will
3282 // figure out the right size later and be replaced with one of the right
3283 // size when the class becomes resolved.
Chang Xing0c2c2222017-08-04 14:36:17 -07003284 if (CanAllocClass()) {
3285 klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
3286 } else {
Alex Light270db1c2019-12-03 12:20:01 +00003287 return sdc.Finish(nullptr);
Chang Xing0c2c2222017-08-04 14:36:17 -07003288 }
Brian Carlstromaded5f72011-10-07 17:15:04 -07003289 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08003290 if (UNLIKELY(klass == nullptr)) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003291 self->AssertPendingOOMException();
Alex Light270db1c2019-12-03 12:20:01 +00003292 return sdc.Finish(nullptr);
Ian Rogersa436fde2013-08-27 23:34:06 -07003293 }
Alex Lightb0f11922017-01-23 14:25:17 -08003294 // Get the real dex file. This will return the input if there aren't any callbacks or they do
3295 // nothing.
3296 DexFile const* new_dex_file = nullptr;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003297 dex::ClassDef const* new_class_def = nullptr;
Alex Lightb0f11922017-01-23 14:25:17 -08003298 // TODO We should ideally figure out some way to move this after we get a lock on the klass so it
3299 // will only be called once.
3300 Runtime::Current()->GetRuntimeCallbacks()->ClassPreDefine(descriptor,
3301 klass,
3302 class_loader,
3303 dex_file,
3304 dex_class_def,
3305 &new_dex_file,
3306 &new_class_def);
Alex Light440b5d92017-01-24 15:32:25 -08003307 // Check to see if an exception happened during runtime callbacks. Return if so.
3308 if (self->IsExceptionPending()) {
Alex Light270db1c2019-12-03 12:20:01 +00003309 return sdc.Finish(nullptr);
Alex Light440b5d92017-01-24 15:32:25 -08003310 }
Alex Lightb0f11922017-01-23 14:25:17 -08003311 ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003312 if (dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00003313 self->AssertPendingException();
Alex Light270db1c2019-12-03 12:20:01 +00003314 return sdc.Finish(nullptr);
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003315 }
3316 klass->SetDexCache(dex_cache);
Alex Lightb0f11922017-01-23 14:25:17 -08003317 SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
Mathieu Chartierc7853442015-03-27 14:35:38 -07003318
Jeff Hao848f70a2014-01-15 13:49:50 -08003319 // Mark the string class by setting its access flag.
3320 if (UNLIKELY(!init_done_)) {
3321 if (strcmp(descriptor, "Ljava/lang/String;") == 0) {
3322 klass->SetStringClass();
3323 }
3324 }
3325
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07003326 ObjectLock<mirror::Class> lock(self, klass);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003327 klass->SetClinitThreadId(self->GetTid());
Mathieu Chartier1e4841e2016-12-15 14:21:04 -08003328 // Make sure we have a valid empty iftable even if there are errors.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003329 klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003330
Mathieu Chartier590fee92013-09-13 13:46:47 -07003331 // Add the newly loaded class to the loaded classes table.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003332 ObjPtr<mirror::Class> existing = InsertClass(descriptor, klass.Get(), hash);
Ian Rogersc114b5f2014-07-21 08:55:01 -07003333 if (existing != nullptr) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07003334 // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
3335 // this thread to block.
Alex Light270db1c2019-12-03 12:20:01 +00003336 return sdc.Finish(EnsureResolved(self, descriptor, existing));
Brian Carlstromaded5f72011-10-07 17:15:04 -07003337 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003338
Mathieu Chartierc7853442015-03-27 14:35:38 -07003339 // Load the fields and other things after we are inserted in the table. This is so that we don't
3340 // end up allocating unfree-able linear alloc resources and then lose the race condition. The
3341 // other reason is that the field roots are only visited from the class table. So we need to be
3342 // inserted before we allocate / fill in these fields.
Alex Lightb0f11922017-01-23 14:25:17 -08003343 LoadClass(self, *new_dex_file, *new_class_def, klass);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003344 if (self->IsExceptionPending()) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003345 VLOG(class_linker) << self->GetException()->Dump();
Mathieu Chartierc7853442015-03-27 14:35:38 -07003346 // An exception occured during load, set status to erroneous while holding klass' lock in case
3347 // notification is necessary.
3348 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003349 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003350 }
Alex Light270db1c2019-12-03 12:20:01 +00003351 return sdc.Finish(nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003352 }
3353
Brian Carlstromaded5f72011-10-07 17:15:04 -07003354 // Finish loading (if necessary) by finding parents
3355 CHECK(!klass->IsLoaded());
Alex Lightb0f11922017-01-23 14:25:17 -08003356 if (!LoadSuperAndInterfaces(klass, *new_dex_file)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003357 // Loading failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003358 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003359 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003360 }
Alex Light270db1c2019-12-03 12:20:01 +00003361 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003362 }
3363 CHECK(klass->IsLoaded());
Andreas Gampe0f01b582017-01-18 15:22:37 -08003364
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07003365 // At this point the class is loaded. Publish a ClassLoad event.
Andreas Gampe0f01b582017-01-18 15:22:37 -08003366 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
Andreas Gampeac30fa22017-01-18 21:02:36 -08003367 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(klass);
Andreas Gampe0f01b582017-01-18 15:22:37 -08003368
Brian Carlstromaded5f72011-10-07 17:15:04 -07003369 // Link the class (if necessary)
3370 CHECK(!klass->IsResolved());
Mathieu Chartier590fee92013-09-13 13:46:47 -07003371 // TODO: Use fast jobjects?
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003372 auto interfaces = hs.NewHandle<mirror::ObjectArray<mirror::Class>>(nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003373
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003374 MutableHandle<mirror::Class> h_new_class = hs.NewHandle<mirror::Class>(nullptr);
Igor Murashkinb1d8c312015-08-04 11:18:43 -07003375 if (!LinkClass(self, descriptor, klass, interfaces, &h_new_class)) {
Brian Carlstromaded5f72011-10-07 17:15:04 -07003376 // Linking failed.
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003377 if (!klass->IsErroneous()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00003378 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Ian Rogersecd4d9a2014-07-22 00:59:52 -07003379 }
Alex Light270db1c2019-12-03 12:20:01 +00003380 return sdc.Finish(nullptr);
Brian Carlstromaded5f72011-10-07 17:15:04 -07003381 }
Mathieu Chartier524507a2014-08-27 15:28:28 -07003382 self->AssertNoPendingException();
Andreas Gampefa4333d2017-02-14 11:10:34 -08003383 CHECK(h_new_class != nullptr) << descriptor;
Vladimir Markof9e82e52021-10-06 08:34:40 +01003384 CHECK(h_new_class->IsResolved()) << descriptor << " " << h_new_class->GetStatus();
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003385
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003386 // Instrumentation may have updated entrypoints for all methods of all
3387 // classes. However it could not update methods of this class while we
3388 // were loading it. Now the class is resolved, we can update entrypoints
3389 // as required by instrumentation.
Mythri Allee6351552023-01-12 14:48:45 +00003390 if (Runtime::Current()->GetInstrumentation()->EntryExitStubsInstalled()) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003391 // We must be in the kRunnable state to prevent instrumentation from
3392 // suspending all threads to update entrypoints while we are doing it
3393 // for this class.
Vladimir Markoddf4fd32021-11-22 16:31:57 +00003394 DCHECK_EQ(self->GetState(), ThreadState::kRunnable);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07003395 Runtime::Current()->GetInstrumentation()->InstallStubsForClass(h_new_class.Get());
Sebastien Hertza8a697f2015-01-15 12:28:47 +01003396 }
3397
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003398 /*
3399 * We send CLASS_PREPARE events to the debugger from here. The
3400 * definition of "preparation" is creating the static fields for a
3401 * class and initializing them to the standard default values, but not
3402 * executing any code (that comes later, during "initialization").
3403 *
3404 * We did the static preparation in LinkClass.
3405 *
3406 * The class has been prepared and resolved but possibly not yet verified
3407 * at this point.
3408 */
Andreas Gampeac30fa22017-01-18 21:02:36 -08003409 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(klass, h_new_class);
Elliott Hughes4740cdf2011-12-07 14:07:12 -08003410
Tamas Berghammer160e6df2016-01-05 14:29:02 +00003411 // Notify native debugger of the new class and its layout.
3412 jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
3413
Alex Light270db1c2019-12-03 12:20:01 +00003414 return sdc.Finish(h_new_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07003415}
3416
Mingyao Yang98d1cc82014-05-15 17:02:16 -07003417uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003418 const dex::ClassDef& dex_class_def) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07003419 size_t num_ref = 0;
Fred Shih37f05ef2014-07-16 18:38:08 -07003420 size_t num_8 = 0;
3421 size_t num_16 = 0;
Brian Carlstrom4873d462011-08-21 15:23:39 -07003422 size_t num_32 = 0;
3423 size_t num_64 = 0;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003424 ClassAccessor accessor(dex_file, dex_class_def);
3425 // We allow duplicate definitions of the same field in a class_data_item
3426 // but ignore the repeated indexes here, b/21868015.
3427 uint32_t last_field_idx = dex::kDexNoIndex;
3428 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
3429 uint32_t field_idx = field.GetIndex();
3430 // Ordering enforced by DexFileVerifier.
3431 DCHECK(last_field_idx == dex::kDexNoIndex || last_field_idx <= field_idx);
3432 if (UNLIKELY(field_idx == last_field_idx)) {
3433 continue;
3434 }
3435 last_field_idx = field_idx;
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003436 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003437 const char* descriptor = dex_file.GetFieldTypeDescriptor(field_id);
3438 char c = descriptor[0];
3439 switch (c) {
3440 case 'L':
3441 case '[':
3442 num_ref++;
3443 break;
3444 case 'J':
3445 case 'D':
3446 num_64++;
3447 break;
3448 case 'I':
3449 case 'F':
3450 num_32++;
3451 break;
3452 case 'S':
3453 case 'C':
3454 num_16++;
3455 break;
3456 case 'B':
3457 case 'Z':
3458 num_8++;
3459 break;
3460 default:
3461 LOG(FATAL) << "Unknown descriptor: " << c;
3462 UNREACHABLE();
Brian Carlstrom4873d462011-08-21 15:23:39 -07003463 }
3464 }
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003465 return mirror::Class::ComputeClassSize(false,
3466 0,
3467 num_8,
3468 num_16,
3469 num_32,
3470 num_64,
3471 num_ref,
Mathieu Chartiere401d142015-04-22 13:56:20 -07003472 image_pointer_size_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07003473}
3474
Vladimir Marko86c87522020-05-11 16:55:55 +01003475void ClassLinker::FixupStaticTrampolines(Thread* self, ObjPtr<mirror::Class> klass) {
Alex Light2d441b12018-06-08 15:33:21 -07003476 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Vladimir Markocce414f2019-10-07 08:51:33 +01003477 DCHECK(klass->IsVisiblyInitialized()) << klass->PrettyDescriptor();
Vladimir Marko86c87522020-05-11 16:55:55 +01003478 size_t num_direct_methods = klass->NumDirectMethods();
3479 if (num_direct_methods == 0) {
Ian Rogers1c829822013-09-30 18:18:50 -07003480 return; // No direct methods => no static methods.
Ian Rogers19846512012-02-24 11:42:47 -08003481 }
Vladimir Markocce414f2019-10-07 08:51:33 +01003482 if (UNLIKELY(klass->IsProxyClass())) {
3483 return;
3484 }
Vladimir Marko86c87522020-05-11 16:55:55 +01003485 PointerSize pointer_size = image_pointer_size_;
3486 if (std::any_of(klass->GetDirectMethods(pointer_size).begin(),
3487 klass->GetDirectMethods(pointer_size).end(),
3488 [](const ArtMethod& m) { return m.IsCriticalNative(); })) {
3489 // Store registered @CriticalNative methods, if any, to JNI entrypoints.
3490 // Direct methods are a contiguous chunk of memory, so use the ordering of the map.
3491 ArtMethod* first_method = klass->GetDirectMethod(0u, pointer_size);
3492 ArtMethod* last_method = klass->GetDirectMethod(num_direct_methods - 1u, pointer_size);
3493 MutexLock lock(self, critical_native_code_with_clinit_check_lock_);
3494 auto lb = critical_native_code_with_clinit_check_.lower_bound(first_method);
3495 while (lb != critical_native_code_with_clinit_check_.end() && lb->first <= last_method) {
3496 lb->first->SetEntryPointFromJni(lb->second);
3497 lb = critical_native_code_with_clinit_check_.erase(lb);
3498 }
3499 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003500 Runtime* runtime = Runtime::Current();
Nicolas Geoffray854af032021-12-21 08:32:42 +00003501 if (runtime->IsAotCompiler()) {
3502 // We should not update entrypoints when running the transactional
3503 // interpreter.
3504 return;
Ian Rogers19846512012-02-24 11:42:47 -08003505 }
Alex Light64ad14d2014-08-19 14:23:13 -07003506
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +00003507 instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
Vladimir Marko86c87522020-05-11 16:55:55 +01003508 for (size_t method_index = 0; method_index < num_direct_methods; ++method_index) {
3509 ArtMethod* method = klass->GetDirectMethod(method_index, pointer_size);
Vladimir Markoa20ec9b2022-12-12 09:12:21 +00003510 if (method->NeedsClinitCheckBeforeCall()) {
3511 instrumentation->UpdateMethodsCode(method, instrumentation->GetCodeForInvoke(method));
Ian Rogers19846512012-02-24 11:42:47 -08003512 }
Ian Rogers19846512012-02-24 11:42:47 -08003513 }
Ian Rogers62d6c772013-02-27 08:32:07 -08003514 // Ignore virtual methods on the iterator.
Ian Rogers19846512012-02-24 11:42:47 -08003515}
3516
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003517// Does anything needed to make sure that the compiler will not generate a direct invoke to this
3518// method. Should only be called on non-invokable methods.
Nicolas Geoffrayf05f04b2019-10-31 11:50:41 +00003519inline void EnsureThrowsInvocationError(ClassLinker* class_linker, ArtMethod* method)
3520 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light9139e002015-10-09 15:59:48 -07003521 DCHECK(method != nullptr);
3522 DCHECK(!method->IsInvokable());
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003523 method->SetEntryPointFromQuickCompiledCodePtrSize(
3524 class_linker->GetQuickToInterpreterBridgeTrampoline(),
3525 class_linker->GetImagePointerSize());
Alex Light9139e002015-10-09 15:59:48 -07003526}
3527
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003528static void LinkCode(ClassLinker* class_linker,
3529 ArtMethod* method,
3530 const OatFile::OatClass* oat_class,
3531 uint32_t class_def_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light2d441b12018-06-08 15:33:21 -07003532 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003533 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08003534 if (runtime->IsAotCompiler()) {
Nicolas Geoffray4fcdc942014-07-22 10:48:00 +01003535 // The following code only applies to a non-compiler runtime.
3536 return;
3537 }
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003538
Ian Rogers62d6c772013-02-27 08:32:07 -08003539 // Method shouldn't have already been linked.
Lokesh Gidra485a7142022-10-12 10:25:23 -07003540 DCHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), nullptr);
Nicolas Geoffray854af032021-12-21 08:32:42 +00003541 DCHECK(!method->GetDeclaringClass()->IsVisiblyInitialized()); // Actually ClassStatus::Idx.
Jeff Hao16743632013-05-08 10:59:04 -07003542
Alex Light9139e002015-10-09 15:59:48 -07003543 if (!method->IsInvokable()) {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01003544 EnsureThrowsInvocationError(class_linker, method);
Brian Carlstrom92827a52011-10-10 15:50:01 -07003545 return;
3546 }
Ian Rogers19846512012-02-24 11:42:47 -08003547
Nicolas Geoffray5ee206f2019-10-08 15:09:17 +01003548 const void* quick_code = nullptr;
3549 if (oat_class != nullptr) {
3550 // Every kind of method should at least get an invoke stub from the oat_method.
3551 // non-abstract methods also get their code pointers.
3552 const OatFile::OatMethod oat_method = oat_class->GetOatMethod(class_def_method_index);
3553 quick_code = oat_method.GetQuickCode();
3554 }
Nicolas Geoffray854af032021-12-21 08:32:42 +00003555 runtime->GetInstrumentation()->InitializeMethodsCode(method, quick_code);
jeffhao26c0a1a2012-01-17 16:28:33 -08003556
Ian Rogers62d6c772013-02-27 08:32:07 -08003557 if (method->IsNative()) {
Vladimir Marko86c87522020-05-11 16:55:55 +01003558 // Set up the dlsym lookup stub. Do not go through `UnregisterNative()`
3559 // as the extra processing for @CriticalNative is not needed yet.
3560 method->SetEntryPointFromJni(
3561 method->IsCriticalNative() ? GetJniDlsymLookupCriticalStub() : GetJniDlsymLookupStub());
Brian Carlstrom92827a52011-10-10 15:50:01 -07003562 }
3563}
3564
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003565void ClassLinker::SetupClass(const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003566 const dex::ClassDef& dex_class_def,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003567 Handle<mirror::Class> klass,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003568 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08003569 CHECK(klass != nullptr);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003570 CHECK(klass->GetDexCache() != nullptr);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003571 CHECK_EQ(ClassStatus::kNotReady, klass->GetStatus());
Brian Carlstromf615a612011-07-23 12:50:34 -07003572 const char* descriptor = dex_file.GetClassDescriptor(dex_class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003573 CHECK(descriptor != nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003574
Vladimir Markob4eb1b12018-05-24 11:09:38 +01003575 klass->SetClass(GetClassRoot<mirror::Class>(this));
Andreas Gampe51829322014-08-25 15:05:04 -07003576 uint32_t access_flags = dex_class_def.GetJavaAccessFlags();
Brian Carlstrom8e3fb142013-10-09 21:00:27 -07003577 CHECK_EQ(access_flags & ~kAccJavaFlagsMask, 0U);
Vladimir Markob68bb7a2020-03-17 10:55:25 +00003578 klass->SetAccessFlagsDuringLinking(access_flags);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07003579 klass->SetClassLoader(class_loader);
Ian Rogersc2b44472011-12-14 21:17:17 -08003580 DCHECK_EQ(klass->GetPrimitiveType(), Primitive::kPrimNot);
Vladimir Marko2c64a832018-01-04 11:31:56 +00003581 mirror::Class::SetStatus(klass, ClassStatus::kIdx, nullptr);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003582
Ian Rogers8b2c0b92013-09-19 02:56:49 -07003583 klass->SetDexClassDefIndex(dex_file.GetIndexForClassDef(dex_class_def));
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003584 klass->SetDexTypeIndex(dex_class_def.class_idx_);
Mathieu Chartierc7853442015-03-27 14:35:38 -07003585}
Brian Carlstrom934486c2011-07-12 23:42:50 -07003586
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003587LengthPrefixedArray<ArtField>* ClassLinker::AllocArtFieldArray(Thread* self,
3588 LinearAlloc* allocator,
3589 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003590 if (length == 0) {
3591 return nullptr;
3592 }
Vladimir Markocf36d492015-08-12 19:27:26 +01003593 // If the ArtField alignment changes, review all uses of LengthPrefixedArray<ArtField>.
3594 static_assert(alignof(ArtField) == 4, "ArtField alignment is expected to be 4.");
3595 size_t storage_size = LengthPrefixedArray<ArtField>::ComputeSize(length);
Lokesh Gidra606bd942022-05-23 19:00:09 +00003596 void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtFieldArray);
Vladimir Markocf36d492015-08-12 19:27:26 +01003597 auto* ret = new(array_storage) LengthPrefixedArray<ArtField>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003598 CHECK(ret != nullptr);
3599 std::uninitialized_fill_n(&ret->At(0), length, ArtField());
3600 return ret;
Mathieu Chartierc7853442015-03-27 14:35:38 -07003601}
3602
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003603LengthPrefixedArray<ArtMethod>* ClassLinker::AllocArtMethodArray(Thread* self,
3604 LinearAlloc* allocator,
3605 size_t length) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003606 if (length == 0) {
3607 return nullptr;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003608 }
Vladimir Marko14632852015-08-17 12:07:23 +01003609 const size_t method_alignment = ArtMethod::Alignment(image_pointer_size_);
3610 const size_t method_size = ArtMethod::Size(image_pointer_size_);
Vladimir Markocf36d492015-08-12 19:27:26 +01003611 const size_t storage_size =
3612 LengthPrefixedArray<ArtMethod>::ComputeSize(length, method_size, method_alignment);
Lokesh Gidra606bd942022-05-23 19:00:09 +00003613 void* array_storage = allocator->Alloc(self, storage_size, LinearAllocKind::kArtMethodArray);
Vladimir Markocf36d492015-08-12 19:27:26 +01003614 auto* ret = new (array_storage) LengthPrefixedArray<ArtMethod>(length);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003615 CHECK(ret != nullptr);
3616 for (size_t i = 0; i < length; ++i) {
Vladimir Markocf36d492015-08-12 19:27:26 +01003617 new(reinterpret_cast<void*>(&ret->At(i, method_size, method_alignment))) ArtMethod;
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003618 }
3619 return ret;
Mathieu Chartiere401d142015-04-22 13:56:20 -07003620}
3621
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003622LinearAlloc* ClassLinker::GetAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003623 if (class_loader == nullptr) {
3624 return Runtime::Current()->GetLinearAlloc();
3625 }
3626 LinearAlloc* allocator = class_loader->GetAllocator();
3627 DCHECK(allocator != nullptr);
3628 return allocator;
3629}
3630
Mathieu Chartier28357fa2016-10-18 16:27:40 -07003631LinearAlloc* ClassLinker::GetOrCreateAllocatorForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003632 if (class_loader == nullptr) {
3633 return Runtime::Current()->GetLinearAlloc();
3634 }
3635 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
3636 LinearAlloc* allocator = class_loader->GetAllocator();
3637 if (allocator == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08003638 RegisterClassLoader(class_loader);
3639 allocator = class_loader->GetAllocator();
3640 CHECK(allocator != nullptr);
Mathieu Chartierd57d4542015-10-14 10:55:30 -07003641 }
3642 return allocator;
3643}
3644
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003645void ClassLinker::LoadClass(Thread* self,
3646 const DexFile& dex_file,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003647 const dex::ClassDef& dex_class_def,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003648 Handle<mirror::Class> klass) {
David Brazdil20c765f2018-10-27 21:45:15 +00003649 ClassAccessor accessor(dex_file,
3650 dex_class_def,
3651 /* parse_hiddenapi_class_data= */ klass->IsBootStrapClassLoaded());
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003652 if (!accessor.HasClassData()) {
3653 return;
3654 }
3655 Runtime* const runtime = Runtime::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003656 {
3657 // Note: We cannot have thread suspension until the field and method arrays are setup or else
3658 // Class::VisitFieldRoots may miss some fields or methods.
Mathieu Chartier268764d2016-09-13 12:09:38 -07003659 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003660 // Load static fields.
Vladimir Marko23682bf2015-06-24 14:28:03 +01003661 // We allow duplicate definitions of the same field in a class_data_item
3662 // but ignore the repeated indexes here, b/21868015.
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003663 LinearAlloc* const allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003664 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self,
3665 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003666 accessor.NumStaticFields());
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07003667 LengthPrefixedArray<ArtField>* ifields = AllocArtFieldArray(self,
3668 allocator,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003669 accessor.NumInstanceFields());
3670 size_t num_sfields = 0u;
Vladimir Marko23682bf2015-06-24 14:28:03 +01003671 size_t num_ifields = 0u;
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003672 uint32_t last_static_field_idx = 0u;
3673 uint32_t last_instance_field_idx = 0u;
Orion Hodsonc069a302017-01-18 09:23:12 +00003674
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003675 // Methods
3676 bool has_oat_class = false;
3677 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
3678 ? OatFile::FindOatClass(dex_file, klass->GetDexClassDefIndex(), &has_oat_class)
3679 : OatFile::OatClass::Invalid();
3680 const OatFile::OatClass* oat_class_ptr = has_oat_class ? &oat_class : nullptr;
3681 klass->SetMethodsPtr(
3682 AllocArtMethodArray(self, allocator, accessor.NumMethods()),
3683 accessor.NumDirectMethods(),
3684 accessor.NumVirtualMethods());
3685 size_t class_def_method_index = 0;
3686 uint32_t last_dex_method_index = dex::kDexNoIndex;
3687 size_t last_class_def_method_index = 0;
3688
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00003689 uint16_t hotness_threshold = runtime->GetJITOptions()->GetWarmupThreshold();
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003690 // Use the visitor since the ranged based loops are bit slower from seeking. Seeking to the
3691 // methods needs to decode all of the fields.
3692 accessor.VisitFieldsAndMethods([&](
3693 const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3694 uint32_t field_idx = field.GetIndex();
3695 DCHECK_GE(field_idx, last_static_field_idx); // Ordering enforced by DexFileVerifier.
3696 if (num_sfields == 0 || LIKELY(field_idx > last_static_field_idx)) {
3697 LoadField(field, klass, &sfields->At(num_sfields));
3698 ++num_sfields;
3699 last_static_field_idx = field_idx;
3700 }
3701 }, [&](const ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) {
3702 uint32_t field_idx = field.GetIndex();
3703 DCHECK_GE(field_idx, last_instance_field_idx); // Ordering enforced by DexFileVerifier.
3704 if (num_ifields == 0 || LIKELY(field_idx > last_instance_field_idx)) {
3705 LoadField(field, klass, &ifields->At(num_ifields));
3706 ++num_ifields;
3707 last_instance_field_idx = field_idx;
3708 }
3709 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3710 ArtMethod* art_method = klass->GetDirectMethodUnchecked(class_def_method_index,
3711 image_pointer_size_);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003712 LoadMethod(dex_file, method, klass.Get(), art_method);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003713 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3714 uint32_t it_method_index = method.GetIndex();
3715 if (last_dex_method_index == it_method_index) {
3716 // duplicate case
3717 art_method->SetMethodIndex(last_class_def_method_index);
3718 } else {
3719 art_method->SetMethodIndex(class_def_method_index);
3720 last_dex_method_index = it_method_index;
3721 last_class_def_method_index = class_def_method_index;
3722 }
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00003723 art_method->ResetCounter(hotness_threshold);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003724 ++class_def_method_index;
3725 }, [&](const ClassAccessor::Method& method) REQUIRES_SHARED(Locks::mutator_lock_) {
3726 ArtMethod* art_method = klass->GetVirtualMethodUnchecked(
3727 class_def_method_index - accessor.NumDirectMethods(),
3728 image_pointer_size_);
Nicolas Geoffray58f916c2021-11-15 14:02:07 +00003729 art_method->ResetCounter(hotness_threshold);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003730 LoadMethod(dex_file, method, klass.Get(), art_method);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003731 LinkCode(this, art_method, oat_class_ptr, class_def_method_index);
3732 ++class_def_method_index;
3733 });
3734
3735 if (UNLIKELY(num_ifields + num_sfields != accessor.NumFields())) {
David Sehr709b0702016-10-13 09:12:37 -07003736 LOG(WARNING) << "Duplicate fields in class " << klass->PrettyDescriptor()
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003737 << " (unique static fields: " << num_sfields << "/" << accessor.NumStaticFields()
3738 << ", unique instance fields: " << num_ifields << "/" << accessor.NumInstanceFields()
3739 << ")";
Vladimir Marko81819db2015-11-05 15:30:12 +00003740 // NOTE: Not shrinking the over-allocated sfields/ifields, just setting size.
3741 if (sfields != nullptr) {
3742 sfields->SetSize(num_sfields);
3743 }
3744 if (ifields != nullptr) {
3745 ifields->SetSize(num_ifields);
3746 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07003747 }
Vladimir Marko81819db2015-11-05 15:30:12 +00003748 // Set the field arrays.
3749 klass->SetSFieldsPtr(sfields);
3750 DCHECK_EQ(klass->NumStaticFields(), num_sfields);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07003751 klass->SetIFieldsPtr(ifields);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003752 DCHECK_EQ(klass->NumInstanceFields(), num_ifields);
Ian Rogers0571d352011-11-03 19:51:38 -07003753 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07003754 // Ensure that the card is marked so that remembered sets pick up native roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003755 WriteBarrier::ForEveryFieldWrite(klass.Get());
Mathieu Chartierf3f2a7a2015-04-14 15:43:10 -07003756 self->AllowThreadSuspension();
Brian Carlstrom934486c2011-07-12 23:42:50 -07003757}
3758
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003759void ClassLinker::LoadField(const ClassAccessor::Field& field,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003760 Handle<mirror::Class> klass,
Mathieu Chartierc7853442015-03-27 14:35:38 -07003761 ArtField* dst) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003762 const uint32_t field_idx = field.GetIndex();
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08003763 dst->SetDexFieldIndex(field_idx);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07003764 dst->SetDeclaringClass(klass.Get());
David Brazdilf6a8a552018-01-15 18:10:50 +00003765
David Brazdil85865692018-10-30 17:26:20 +00003766 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3767 dst->SetAccessFlags(field.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(field));
Brian Carlstrom934486c2011-07-12 23:42:50 -07003768}
3769
Mathieu Chartier268764d2016-09-13 12:09:38 -07003770void ClassLinker::LoadMethod(const DexFile& dex_file,
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003771 const ClassAccessor::Method& method,
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003772 ObjPtr<mirror::Class> klass,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07003773 ArtMethod* dst) {
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003774 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
3775
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07003776 const uint32_t dex_method_idx = method.GetIndex();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08003777 const dex::MethodId& method_id = dex_file.GetMethodId(dex_method_idx);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003778 uint32_t name_utf16_length;
3779 const char* method_name = dex_file.StringDataAndUtf16LengthByIdx(method_id.name_idx_,
3780 &name_utf16_length);
3781 std::string_view shorty = dex_file.GetShortyView(dex_file.GetProtoId(method_id.proto_idx_));
Mathieu Chartier66f19252012-09-18 08:57:04 -07003782
Mathieu Chartier66f19252012-09-18 08:57:04 -07003783 dst->SetDexMethodIndex(dex_method_idx);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003784 dst->SetDeclaringClass(klass);
Brian Carlstrom934486c2011-07-12 23:42:50 -07003785
David Brazdil85865692018-10-30 17:26:20 +00003786 // Get access flags from the DexFile and set hiddenapi runtime access flags.
3787 uint32_t access_flags = method.GetAccessFlags() | hiddenapi::CreateRuntimeFlags(method);
David Brazdilf6a8a552018-01-15 18:10:50 +00003788
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003789 auto has_ascii_name = [method_name, name_utf16_length](const char* ascii_name,
3790 size_t length) ALWAYS_INLINE {
3791 DCHECK_EQ(strlen(ascii_name), length);
3792 return length == name_utf16_length &&
3793 method_name[length] == 0 && // Is `method_name` an ASCII string?
3794 memcmp(ascii_name, method_name, length) == 0;
3795 };
3796 if (UNLIKELY(has_ascii_name("finalize", sizeof("finalize") - 1u))) {
Ian Rogers241b5de2013-10-09 17:58:57 -07003797 // Set finalizable flag on declaring class.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003798 if (shorty == "V") {
Ian Rogersdfb325e2013-10-30 01:00:44 -07003799 // Void return type.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07003800 if (klass->GetClassLoader() != nullptr) { // All non-boot finalizer methods are flagged.
Ian Rogersdfb325e2013-10-30 01:00:44 -07003801 klass->SetFinalizable();
3802 } else {
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003803 std::string_view klass_descriptor =
3804 dex_file.GetTypeDescriptorView(dex_file.GetTypeId(klass->GetDexTypeIndex()));
Ian Rogersdfb325e2013-10-30 01:00:44 -07003805 // The Enum class declares a "final" finalize() method to prevent subclasses from
3806 // introducing a finalizer. We don't want to set the finalizable flag for Enum or its
3807 // subclasses, so we exclude it here.
3808 // We also want to avoid setting the flag on Object, where we know that finalize() is
3809 // empty.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003810 if (klass_descriptor != "Ljava/lang/Object;" &&
3811 klass_descriptor != "Ljava/lang/Enum;") {
Ian Rogers241b5de2013-10-09 17:58:57 -07003812 klass->SetFinalizable();
Ian Rogers241b5de2013-10-09 17:58:57 -07003813 }
3814 }
3815 }
3816 } else if (method_name[0] == '<') {
3817 // Fix broken access flags for initializers. Bug 11157540.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003818 bool is_init = has_ascii_name("<init>", sizeof("<init>") - 1u);
3819 bool is_clinit = has_ascii_name("<clinit>", sizeof("<clinit>") - 1u);
Ian Rogers241b5de2013-10-09 17:58:57 -07003820 if (UNLIKELY(!is_init && !is_clinit)) {
3821 LOG(WARNING) << "Unexpected '<' at start of method name " << method_name;
3822 } else {
3823 if (UNLIKELY((access_flags & kAccConstructor) == 0)) {
3824 LOG(WARNING) << method_name << " didn't have expected constructor access flag in class "
David Sehr709b0702016-10-13 09:12:37 -07003825 << klass->PrettyDescriptor() << " in dex file " << dex_file.GetLocation();
Ian Rogers241b5de2013-10-09 17:58:57 -07003826 access_flags |= kAccConstructor;
3827 }
3828 }
3829 }
Vladimir Markoe815aec2022-03-29 15:58:40 +00003830
3831 // Check for nterp invoke fast-path based on shorty.
3832 bool all_parameters_are_reference = true;
3833 bool all_parameters_are_reference_or_int = true;
3834 for (size_t i = 1; i < shorty.length(); ++i) {
3835 if (shorty[i] != 'L') {
3836 all_parameters_are_reference = false;
3837 if (shorty[i] == 'F' || shorty[i] == 'D' || shorty[i] == 'J') {
3838 all_parameters_are_reference_or_int = false;
3839 break;
3840 }
3841 }
3842 }
3843 if (all_parameters_are_reference_or_int && shorty[0] != 'F' && shorty[0] != 'D') {
3844 access_flags |= kAccNterpInvokeFastPathFlag;
3845 }
3846
Vladimir Markob0a6aee2017-10-27 10:34:04 +01003847 if (UNLIKELY((access_flags & kAccNative) != 0u)) {
3848 // Check if the native method is annotated with @FastNative or @CriticalNative.
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003849 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3850 access_flags |=
3851 annotations::GetNativeMethodAnnotationAccessFlags(dex_file, class_def, dex_method_idx);
3852 dst->SetAccessFlags(access_flags);
3853 DCHECK(!dst->IsAbstract());
3854 DCHECK(!dst->HasCodeItem());
Nicolas Geoffray47171752020-08-31 15:03:20 +01003855 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003856 dst->SetDataPtrSize(nullptr, image_pointer_size_); // JNI stub/trampoline not linked yet.
3857 } else if ((access_flags & kAccAbstract) != 0u) {
3858 dst->SetAccessFlags(access_flags);
3859 // Must be done after SetAccessFlags since IsAbstract depends on it.
3860 DCHECK(dst->IsAbstract());
3861 if (klass->IsInterface()) {
3862 dst->CalculateAndSetImtIndex();
3863 }
3864 DCHECK(!dst->HasCodeItem());
3865 DCHECK_EQ(method.GetCodeItemOffset(), 0u);
3866 dst->SetDataPtrSize(nullptr, image_pointer_size_); // Single implementation not set yet.
3867 } else {
Vladimir Markoe815aec2022-03-29 15:58:40 +00003868 // Check for nterp entry fast-path based on shorty.
3869 if (all_parameters_are_reference) {
3870 access_flags |= kAccNterpEntryPointFastPathFlag;
3871 }
Vladimir Marko05f1a5b2022-03-09 14:20:15 +00003872 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
3873 if (annotations::MethodIsNeverCompile(dex_file, class_def, dex_method_idx)) {
3874 access_flags |= kAccCompileDontBother;
3875 }
3876 dst->SetAccessFlags(access_flags);
3877 DCHECK(!dst->IsAbstract());
3878 DCHECK(dst->HasCodeItem());
3879 uint32_t code_item_offset = method.GetCodeItemOffset();
3880 DCHECK_NE(code_item_offset, 0u);
3881 if (Runtime::Current()->IsAotCompiler()) {
3882 dst->SetDataPtrSize(reinterpret_cast32<void*>(code_item_offset), image_pointer_size_);
3883 } else {
3884 dst->SetCodeItem(dex_file.GetCodeItem(code_item_offset), dex_file.IsCompactDexFile());
3885 }
Nicolas Geoffray43c9cd72021-03-10 15:09:19 +00003886 }
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +00003887
Nicolas Geoffray8bf5afd2022-04-27 14:11:23 +01003888 if (Runtime::Current()->IsZygote() &&
3889 !Runtime::Current()->GetJITOptions()->GetProfileSaverOptions().GetProfileBootClassPath()) {
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +00003890 dst->SetMemorySharedMethod();
3891 }
Brian Carlstrom934486c2011-07-12 23:42:50 -07003892}
3893
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003894void ClassLinker::AppendToBootClassPath(Thread* self, const DexFile* dex_file) {
David Srbecky33df0e32021-09-30 14:36:32 +00003895 ObjPtr<mirror::DexCache> dex_cache =
3896 AllocAndInitializeDexCache(self, *dex_file, /* class_loader= */ nullptr);
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003897 CHECK(dex_cache != nullptr) << "Failed to allocate dex cache for " << dex_file->GetLocation();
Brian Carlstrom40381fb2011-10-19 14:13:40 -07003898 AppendToBootClassPath(dex_file, dex_cache);
Brian Carlstroma663ea52011-08-19 23:33:41 -07003899}
3900
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003901void ClassLinker::AppendToBootClassPath(const DexFile* dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00003902 ObjPtr<mirror::DexCache> dex_cache) {
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003903 CHECK(dex_file != nullptr);
3904 CHECK(dex_cache != nullptr) << dex_file->GetLocation();
Nicolas Geoffray7913cf32022-02-16 09:28:21 +00003905 CHECK_EQ(dex_cache->GetDexFile(), dex_file) << dex_file->GetLocation();
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003906 boot_class_path_.push_back(dex_file);
Andreas Gampebe7af222017-07-25 09:57:28 -07003907 WriterMutexLock mu(Thread::Current(), *Locks::dex_lock_);
Mathieu Chartier0a19e212019-11-27 14:35:24 -08003908 RegisterDexFileLocked(*dex_file, dex_cache, /* class_loader= */ nullptr);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003909}
3910
Mathieu Chartierc528dba2013-11-26 12:00:11 -08003911void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
Vladimir Markocd556b02017-02-03 11:47:34 +00003912 ObjPtr<mirror::DexCache> dex_cache,
3913 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003914 Thread* const self = Thread::Current();
Andreas Gampecc1b5352016-12-01 16:58:38 -08003915 Locks::dex_lock_->AssertExclusiveHeld(self);
Vladimir Markocd556b02017-02-03 11:47:34 +00003916 CHECK(dex_cache != nullptr) << dex_file.GetLocation();
David Srbecky86d6cd52020-12-02 18:13:10 +00003917 CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003918 // For app images, the dex cache location may be a suffix of the dex file location since the
3919 // dex file location is an absolute path.
Mathieu Chartier76172162016-01-26 14:54:06 -08003920 const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
3921 const size_t dex_cache_length = dex_cache_location.length();
Mathieu Chartierfbc31082016-01-24 11:59:56 -08003922 CHECK_GT(dex_cache_length, 0u) << dex_file.GetLocation();
3923 std::string dex_file_location = dex_file.GetLocation();
Nicolas Geoffraye3e0f702019-03-12 07:02:02 +00003924 // The following paths checks don't work on preopt when using boot dex files, where the dex
3925 // cache location is the one on device, and the dex_file's location is the one on host.
Vladimír Marko1deea772022-11-30 16:17:07 +00003926 Runtime* runtime = Runtime::Current();
3927 if (!(runtime->IsAotCompiler() && class_loader == nullptr && !kIsTargetBuild)) {
Nicolas Geoffraye3e0f702019-03-12 07:02:02 +00003928 CHECK_GE(dex_file_location.length(), dex_cache_length)
3929 << dex_cache_location << " " << dex_file.GetLocation();
3930 const std::string dex_file_suffix = dex_file_location.substr(
3931 dex_file_location.length() - dex_cache_length,
3932 dex_cache_length);
3933 // Example dex_cache location is SettingsProvider.apk and
3934 // dex file location is /system/priv-app/SettingsProvider/SettingsProvider.apk
3935 CHECK_EQ(dex_cache_location, dex_file_suffix);
3936 }
Vladimir Marko68457df2022-10-26 07:18:57 +00003937
3938 // Check if we need to initialize OatFile data (.data.bimg.rel.ro and .bss
Vladimír Marko1deea772022-11-30 16:17:07 +00003939 // sections) needed for code execution and register the oat code range.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003940 const OatFile* oat_file =
3941 (dex_file.GetOatDexFile() != nullptr) ? dex_file.GetOatDexFile()->GetOatFile() : nullptr;
Vladimir Markob066d432018-01-03 13:14:37 +00003942 bool initialize_oat_file_data = (oat_file != nullptr) && oat_file->IsExecutable();
Vladimir Marko68457df2022-10-26 07:18:57 +00003943 if (initialize_oat_file_data) {
3944 for (const auto& entry : dex_caches_) {
3945 if (!self->IsJWeakCleared(entry.second.weak_root) &&
3946 entry.first->GetOatDexFile() != nullptr &&
3947 entry.first->GetOatDexFile()->GetOatFile() == oat_file) {
Vladimir Markob066d432018-01-03 13:14:37 +00003948 initialize_oat_file_data = false; // Already initialized.
Vladimir Marko68457df2022-10-26 07:18:57 +00003949 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003950 }
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07003951 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07003952 }
Vladimir Markob066d432018-01-03 13:14:37 +00003953 if (initialize_oat_file_data) {
Vladimir Marko1cedb4a2019-02-06 14:13:28 +00003954 oat_file->InitializeRelocations();
Vladimír Marko1deea772022-11-30 16:17:07 +00003955 // Notify the fault handler about the new executable code range if needed.
3956 size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
3957 DCHECK_LE(exec_offset, oat_file->Size());
3958 size_t exec_size = oat_file->Size() - exec_offset;
3959 if (exec_size != 0u) {
3960 runtime->AddGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
3961 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01003962 }
Vladimir Marko68457df2022-10-26 07:18:57 +00003963
David Brazdila5c3a802019-03-08 14:59:41 +00003964 // Let hiddenapi assign a domain to the newly registered dex file.
3965 hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
3966
Vladimir Marko68457df2022-10-26 07:18:57 +00003967 jweak dex_cache_jweak = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, dex_cache);
Hiroshi Yamauchi04302db2015-11-11 23:45:34 -08003968 DexCacheData data;
3969 data.weak_root = dex_cache_jweak;
Vladimir Markocd556b02017-02-03 11:47:34 +00003970 data.class_table = ClassTableForClassLoader(class_loader);
David Srbecky6fbcc292021-02-23 01:05:32 +00003971 AddNativeDebugInfoForDex(self, &dex_file);
Vladimir Markocd556b02017-02-03 11:47:34 +00003972 DCHECK(data.class_table != nullptr);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003973 // Make sure to hold the dex cache live in the class table. This case happens for the boot class
3974 // path dex caches without an image.
3975 data.class_table->InsertStrongRoot(dex_cache);
Andreas Gampe8a1a0f72020-03-03 16:07:45 -08003976 // Make sure that the dex cache holds the classloader live.
3977 dex_cache->SetClassLoader(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003978 if (class_loader != nullptr) {
3979 // Since we added a strong root to the class table, do the write barrier as required for
3980 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07003981 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier72041a02017-07-14 18:23:25 -07003982 }
David Srbecky6fbcc292021-02-23 01:05:32 +00003983 bool inserted = dex_caches_.emplace(&dex_file, std::move(data)).second;
3984 CHECK(inserted);
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003985}
3986
Alex Light725da8f2020-02-19 14:46:33 -08003987ObjPtr<mirror::DexCache> ClassLinker::DecodeDexCacheLocked(Thread* self, const DexCacheData* data) {
3988 return data != nullptr
3989 ? ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data->weak_root))
Vladimir Markocd556b02017-02-03 11:47:34 +00003990 : nullptr;
3991}
3992
Alex Light725da8f2020-02-19 14:46:33 -08003993bool ClassLinker::IsSameClassLoader(
Vladimir Markocd556b02017-02-03 11:47:34 +00003994 ObjPtr<mirror::DexCache> dex_cache,
Alex Light725da8f2020-02-19 14:46:33 -08003995 const DexCacheData* data,
Vladimir Markocd556b02017-02-03 11:47:34 +00003996 ObjPtr<mirror::ClassLoader> class_loader) {
Alex Light725da8f2020-02-19 14:46:33 -08003997 CHECK(data != nullptr);
David Srbecky6fbcc292021-02-23 01:05:32 +00003998 DCHECK_EQ(FindDexCacheDataLocked(*dex_cache->GetDexFile()), data);
Alex Light725da8f2020-02-19 14:46:33 -08003999 return data->class_table == ClassTableForClassLoader(class_loader);
Vladimir Markocd556b02017-02-03 11:47:34 +00004000}
4001
Alex Light07f06212017-06-01 14:01:43 -07004002void ClassLinker::RegisterExistingDexCache(ObjPtr<mirror::DexCache> dex_cache,
4003 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartiered4ee442018-06-05 14:23:35 -07004004 SCOPED_TRACE << __FUNCTION__ << " " << dex_cache->GetDexFile()->GetLocation();
Alex Light07f06212017-06-01 14:01:43 -07004005 Thread* self = Thread::Current();
4006 StackHandleScope<2> hs(self);
4007 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
4008 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
4009 const DexFile* dex_file = dex_cache->GetDexFile();
4010 DCHECK(dex_file != nullptr) << "Attempt to register uninitialized dex_cache object!";
4011 if (kIsDebugBuild) {
Alex Light725da8f2020-02-19 14:46:33 -08004012 ReaderMutexLock mu(self, *Locks::dex_lock_);
4013 const DexCacheData* old_data = FindDexCacheDataLocked(*dex_file);
4014 ObjPtr<mirror::DexCache> old_dex_cache = DecodeDexCacheLocked(self, old_data);
Alex Light07f06212017-06-01 14:01:43 -07004015 DCHECK(old_dex_cache.IsNull()) << "Attempt to manually register a dex cache thats already "
4016 << "been registered on dex file " << dex_file->GetLocation();
4017 }
4018 ClassTable* table;
4019 {
4020 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4021 table = InsertClassTableForClassLoader(h_class_loader.Get());
4022 }
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004023 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4024 // a thread holding the dex lock and blocking on a condition variable regarding
4025 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004026 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Alex Light07f06212017-06-01 14:01:43 -07004027 WriterMutexLock mu(self, *Locks::dex_lock_);
4028 RegisterDexFileLocked(*dex_file, h_dex_cache.Get(), h_class_loader.Get());
4029 table->InsertStrongRoot(h_dex_cache.Get());
4030 if (h_class_loader.Get() != nullptr) {
4031 // Since we added a strong root to the class table, do the write barrier as required for
4032 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004033 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Alex Light07f06212017-06-01 14:01:43 -07004034 }
4035}
4036
Alex Lightde7f8782020-02-24 10:14:22 -08004037static void ThrowDexFileAlreadyRegisteredError(Thread* self, const DexFile& dex_file)
4038 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light725da8f2020-02-19 14:46:33 -08004039 self->ThrowNewExceptionF("Ljava/lang/InternalError;",
Alex Lightde7f8782020-02-24 10:14:22 -08004040 "Attempt to register dex file %s with multiple class loaders",
4041 dex_file.GetLocation().c_str());
Alex Light725da8f2020-02-19 14:46:33 -08004042}
4043
Vladimir Markocd556b02017-02-03 11:47:34 +00004044ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
4045 ObjPtr<mirror::ClassLoader> class_loader) {
Ian Rogers1f539342012-10-03 21:09:42 -07004046 Thread* self = Thread::Current();
Alex Light725da8f2020-02-19 14:46:33 -08004047 ObjPtr<mirror::DexCache> old_dex_cache;
4048 bool registered_with_another_class_loader = false;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004049 {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004050 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004051 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4052 old_dex_cache = DecodeDexCacheLocked(self, old_data);
4053 if (old_dex_cache != nullptr) {
4054 if (IsSameClassLoader(old_dex_cache, old_data, class_loader)) {
4055 return old_dex_cache;
4056 } else {
4057 // TODO This is not very clean looking. Should maybe try to make a way to request exceptions
4058 // be thrown when it's safe to do so to simplify this.
4059 registered_with_another_class_loader = true;
4060 }
4061 }
Vladimir Markocd556b02017-02-03 11:47:34 +00004062 }
Alex Light725da8f2020-02-19 14:46:33 -08004063 // We need to have released the dex_lock_ to allocate safely.
4064 if (registered_with_another_class_loader) {
4065 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4066 return nullptr;
Brian Carlstromaded5f72011-10-07 17:15:04 -07004067 }
Mathieu Chartiered4ee442018-06-05 14:23:35 -07004068 SCOPED_TRACE << __FUNCTION__ << " " << dex_file.GetLocation();
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004069 LinearAlloc* const linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader);
4070 DCHECK(linear_alloc != nullptr);
4071 ClassTable* table;
4072 {
4073 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
4074 table = InsertClassTableForClassLoader(class_loader);
4075 }
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004076 // Don't alloc while holding the lock, since allocation may need to
4077 // suspend all threads and another thread may need the dex_lock_ to
4078 // get to a suspend point.
Vladimir Markocd556b02017-02-03 11:47:34 +00004079 StackHandleScope<3> hs(self);
4080 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
David Srbecky86d6cd52020-12-02 18:13:10 +00004081 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004082 {
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004083 // Avoid a deadlock between a garbage collecting thread running a checkpoint,
4084 // a thread holding the dex lock and blocking on a condition variable regarding
4085 // weak references access, and a thread blocking on the dex lock.
Ivan Maidanski2b69b9c2018-05-14 13:50:48 +03004086 gc::ScopedGCCriticalSection gcs(self, gc::kGcCauseClassLinker, gc::kCollectorTypeClassLinker);
Andreas Gampecc1b5352016-12-01 16:58:38 -08004087 WriterMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004088 const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
4089 old_dex_cache = DecodeDexCacheLocked(self, old_data);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004090 if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
David Srbecky33df0e32021-09-30 14:36:32 +00004091 // Do Initialize while holding dex lock to make sure two threads don't call it
David Srbecky86d6cd52020-12-02 18:13:10 +00004092 // at the same time with the same dex cache. Since the .bss is shared this can cause failing
4093 // DCHECK that the arrays are null.
David Srbecky33df0e32021-09-30 14:36:32 +00004094 h_dex_cache->Initialize(&dex_file, h_class_loader.Get());
Vladimir Markocd556b02017-02-03 11:47:34 +00004095 RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004096 }
Alex Light725da8f2020-02-19 14:46:33 -08004097 if (old_dex_cache != nullptr) {
4098 // Another thread managed to initialize the dex cache faster, so use that DexCache.
4099 // If this thread encountered OOME, ignore it.
4100 DCHECK_EQ(h_dex_cache == nullptr, self->IsExceptionPending());
4101 self->ClearException();
4102 // We cannot call EnsureSameClassLoader() or allocate an exception while holding the
4103 // dex_lock_.
4104 if (IsSameClassLoader(old_dex_cache, old_data, h_class_loader.Get())) {
4105 return old_dex_cache;
4106 } else {
4107 registered_with_another_class_loader = true;
4108 }
4109 }
Vladimir Markocd556b02017-02-03 11:47:34 +00004110 }
Alex Light725da8f2020-02-19 14:46:33 -08004111 if (registered_with_another_class_loader) {
4112 ThrowDexFileAlreadyRegisteredError(self, dex_file);
4113 return nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004114 }
Andreas Gampefa4333d2017-02-14 11:10:34 -08004115 if (h_dex_cache == nullptr) {
Vladimir Markocd556b02017-02-03 11:47:34 +00004116 self->AssertPendingOOMException();
4117 return nullptr;
Brian Carlstrom47d237a2011-10-18 15:08:33 -07004118 }
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -07004119 table->InsertStrongRoot(h_dex_cache.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004120 if (h_class_loader.Get() != nullptr) {
4121 // Since we added a strong root to the class table, do the write barrier as required for
4122 // remembered sets and generational GCs.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004123 WriteBarrier::ForEveryFieldWrite(h_class_loader.Get());
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004124 }
Nicolas Geoffray1d4f0092020-08-07 14:01:05 +01004125 VLOG(class_linker) << "Registered dex file " << dex_file.GetLocation();
Nicolas Geoffray4f6bb442021-06-02 18:05:51 +01004126 PaletteNotifyDexFileLoaded(dex_file.GetLocation().c_str());
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004127 return h_dex_cache.Get();
Brian Carlstromaded5f72011-10-07 17:15:04 -07004128}
4129
Vladimir Markocd556b02017-02-03 11:47:34 +00004130bool ClassLinker::IsDexFileRegistered(Thread* self, const DexFile& dex_file) {
Andreas Gampecc1b5352016-12-01 16:58:38 -08004131 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004132 return DecodeDexCacheLocked(self, FindDexCacheDataLocked(dex_file)) != nullptr;
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004133}
4134
Vladimir Markocd556b02017-02-03 11:47:34 +00004135ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const DexFile& dex_file) {
4136 ReaderMutexLock mu(self, *Locks::dex_lock_);
Alex Light725da8f2020-02-19 14:46:33 -08004137 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(dex_file);
4138 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
Vladimir Markocd556b02017-02-03 11:47:34 +00004139 if (dex_cache != nullptr) {
4140 return dex_cache;
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07004141 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004142 // Failure, dump diagnostic and abort.
David Srbecky6fbcc292021-02-23 01:05:32 +00004143 for (const auto& entry : dex_caches_) {
4144 const DexCacheData& data = entry.second;
Alex Light725da8f2020-02-19 14:46:33 -08004145 if (DecodeDexCacheLocked(self, &data) != nullptr) {
David Srbecky6fbcc292021-02-23 01:05:32 +00004146 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
Mathieu Chartier673ed3d2015-08-28 14:56:43 -07004147 }
Brian Carlstrom81a90872015-08-28 09:07:14 -07004148 }
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004149 LOG(FATAL) << "Failed to find DexCache for DexFile " << dex_file.GetLocation()
David Srbecky6fbcc292021-02-23 01:05:32 +00004150 << " " << &dex_file;
Ian Rogerse0a02da2014-12-02 14:10:53 -08004151 UNREACHABLE();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004152}
4153
Santiago Aboy Solanes4b8ea5d2021-11-19 10:14:54 +00004154ObjPtr<mirror::DexCache> ClassLinker::FindDexCache(Thread* self, const OatDexFile& oat_dex_file) {
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004155 ReaderMutexLock mu(self, *Locks::dex_lock_);
4156 const DexCacheData* dex_cache_data = FindDexCacheDataLocked(oat_dex_file);
4157 ObjPtr<mirror::DexCache> dex_cache = DecodeDexCacheLocked(self, dex_cache_data);
4158 if (dex_cache != nullptr) {
4159 return dex_cache;
4160 }
4161 // Failure, dump diagnostic and abort.
4162 for (const auto& entry : dex_caches_) {
4163 const DexCacheData& data = entry.second;
4164 if (DecodeDexCacheLocked(self, &data) != nullptr) {
4165 LOG(FATAL_WITHOUT_ABORT) << "Registered dex file " << entry.first->GetLocation();
4166 }
4167 }
Santiago Aboy Solanes4b8ea5d2021-11-19 10:14:54 +00004168 LOG(FATAL) << "Failed to find DexCache for OatDexFile " << oat_dex_file.GetDexFileLocation()
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004169 << " " << &oat_dex_file;
4170 UNREACHABLE();
4171}
4172
Vladimir Markocd556b02017-02-03 11:47:34 +00004173ClassTable* ClassLinker::FindClassTable(Thread* self, ObjPtr<mirror::DexCache> dex_cache) {
4174 const DexFile* dex_file = dex_cache->GetDexFile();
4175 DCHECK(dex_file != nullptr);
4176 ReaderMutexLock mu(self, *Locks::dex_lock_);
David Srbecky6fbcc292021-02-23 01:05:32 +00004177 auto it = dex_caches_.find(dex_file);
4178 if (it != dex_caches_.end()) {
4179 const DexCacheData& data = it->second;
4180 ObjPtr<mirror::DexCache> registered_dex_cache = DecodeDexCacheLocked(self, &data);
4181 if (registered_dex_cache != nullptr) {
4182 CHECK_EQ(registered_dex_cache, dex_cache) << dex_file->GetLocation();
4183 return data.class_table;
Vladimir Markocd556b02017-02-03 11:47:34 +00004184 }
4185 }
4186 return nullptr;
4187}
4188
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004189const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(
Santiago Aboy Solanes4b8ea5d2021-11-19 10:14:54 +00004190 const OatDexFile& oat_dex_file) {
4191 auto it = std::find_if(dex_caches_.begin(), dex_caches_.end(), [&](const auto& entry) {
4192 return entry.first->GetOatDexFile() == &oat_dex_file;
Santiago Aboy Solanes970ba212021-10-21 10:52:47 +01004193 });
4194 return it != dex_caches_.end() ? &it->second : nullptr;
4195}
4196
Alex Light725da8f2020-02-19 14:46:33 -08004197const ClassLinker::DexCacheData* ClassLinker::FindDexCacheDataLocked(const DexFile& dex_file) {
David Srbecky6fbcc292021-02-23 01:05:32 +00004198 auto it = dex_caches_.find(&dex_file);
4199 return it != dex_caches_.end() ? &it->second : nullptr;
Vladimir Markocd556b02017-02-03 11:47:34 +00004200}
4201
Vladimir Marko70e2a762019-07-12 16:49:00 +01004202void ClassLinker::CreatePrimitiveClass(Thread* self,
4203 Primitive::Type type,
4204 ClassRoot primitive_root) {
Vladimir Markoacb906d2018-05-30 10:23:49 +01004205 ObjPtr<mirror::Class> primitive_class =
Mathieu Chartier6beced42016-11-15 15:51:31 -08004206 AllocClass(self, mirror::Class::PrimitiveClassSize(image_pointer_size_));
Vladimir Marko70e2a762019-07-12 16:49:00 +01004207 CHECK(primitive_class != nullptr) << "OOM for primitive class " << type;
4208 // Do not hold lock on the primitive class object, the initialization of
4209 // primitive classes is done while the process is still single threaded.
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004210 primitive_class->SetAccessFlagsDuringLinking(kAccPublic | kAccFinal | kAccAbstract);
Vladimir Marko70e2a762019-07-12 16:49:00 +01004211 primitive_class->SetPrimitiveType(type);
4212 primitive_class->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Vladimir Marko70e2a762019-07-12 16:49:00 +01004213 DCHECK_EQ(primitive_class->NumMethods(), 0u);
Vladimir Markobf121912019-06-04 13:49:05 +01004214 // Primitive classes are initialized during single threaded startup, so visibly initialized.
4215 primitive_class->SetStatusForPrimitiveOrArray(ClassStatus::kVisiblyInitialized);
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004216 const char* descriptor = Primitive::Descriptor(type);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004217 ObjPtr<mirror::Class> existing = InsertClass(descriptor,
Vladimir Marko70e2a762019-07-12 16:49:00 +01004218 primitive_class,
Mathieu Chartier6beced42016-11-15 15:51:31 -08004219 ComputeModifiedUtf8Hash(descriptor));
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004220 CHECK(existing == nullptr) << "InitPrimitiveClass(" << type << ") failed";
Vladimir Marko70e2a762019-07-12 16:49:00 +01004221 SetClassRoot(primitive_root, primitive_class);
Carl Shapiro565f5072011-07-10 13:39:43 -07004222}
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004223
Vladimir Marko02610552018-06-04 14:38:00 +01004224inline ObjPtr<mirror::IfTable> ClassLinker::GetArrayIfTable() {
4225 return GetClassRoot<mirror::ObjectArray<mirror::Object>>(this)->GetIfTable();
4226}
4227
Brian Carlstrombe977852011-07-19 14:54:54 -07004228// Create an array class (i.e. the class object for the array, not the
4229// array itself). "descriptor" looks like "[C" or "[[[[B" or
4230// "[Ljava/lang/String;".
4231//
4232// If "descriptor" refers to an array of primitives, look up the
4233// primitive type's internally-generated class object.
4234//
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004235// "class_loader" is the class loader of the class that's referring to
4236// us. It's used to ensure that we're looking for the element type in
4237// the right context. It does NOT become the class loader for the
4238// array class; that always comes from the base element class.
Brian Carlstrombe977852011-07-19 14:54:54 -07004239//
Mathieu Chartier2cebb242015-04-21 16:50:40 -07004240// Returns null with an exception raised on failure.
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004241ObjPtr<mirror::Class> ClassLinker::CreateArrayClass(Thread* self,
4242 const char* descriptor,
4243 size_t hash,
4244 Handle<mirror::ClassLoader> class_loader) {
Brian Carlstrom5b8e4c82011-09-18 01:38:59 -07004245 // Identify the underlying component type
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004246 CHECK_EQ('[', descriptor[0]);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004247 StackHandleScope<2> hs(self);
Alex Lighte9f61032018-09-24 16:04:51 -07004248
4249 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4250 // code to be executed. We put it up here so we can avoid all the allocations associated with
4251 // creating the class. This can happen with (eg) jit threads.
4252 if (!self->CanLoadClasses()) {
4253 // Make sure we don't try to load anything, potentially causing an infinite loop.
4254 ObjPtr<mirror::Throwable> pre_allocated =
4255 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4256 self->SetException(pre_allocated);
4257 return nullptr;
4258 }
4259
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07004260 MutableHandle<mirror::Class> component_type(hs.NewHandle(FindClass(self, descriptor + 1,
4261 class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004262 if (component_type == nullptr) {
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004263 DCHECK(self->IsExceptionPending());
Nicolas Geoffray74b230a2022-08-01 16:20:06 +01004264 // We need to accept erroneous classes as component types. Under AOT, we
4265 // don't accept them as we cannot encode the erroneous class in an image.
Mathieu Chartiere7c9a8c2014-11-06 16:35:45 -08004266 const size_t component_hash = ComputeModifiedUtf8Hash(descriptor + 1);
4267 component_type.Assign(LookupClass(self, descriptor + 1, component_hash, class_loader.Get()));
Nicolas Geoffray74b230a2022-08-01 16:20:06 +01004268 if (component_type == nullptr || Runtime::Current()->IsAotCompiler()) {
Andreas Gampedc13d7d2014-07-23 20:18:36 -07004269 DCHECK(self->IsExceptionPending());
4270 return nullptr;
4271 } else {
4272 self->ClearException();
4273 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004274 }
Ian Rogers2d10b202014-05-12 19:15:18 -07004275 if (UNLIKELY(component_type->IsPrimitiveVoid())) {
4276 ThrowNoClassDefFoundError("Attempt to create array of void primitive type");
4277 return nullptr;
4278 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004279 // See if the component type is already loaded. Array classes are
4280 // always associated with the class loader of their underlying
4281 // element type -- an array of Strings goes with the loader for
4282 // java/lang/String -- so we need to look for it there. (The
4283 // caller should have checked for the existence of the class
4284 // before calling here, but they did so with *their* class loader,
4285 // not the component type's loader.)
4286 //
4287 // If we find it, the caller adds "loader" to the class' initiating
4288 // loader list, which should prevent us from going through this again.
4289 //
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07004290 // This call is unnecessary if "loader" and "component_type->GetClassLoader()"
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004291 // are the same, because our caller (FindClass) just did the
4292 // lookup. (Even if we get this wrong we still have correct behavior,
4293 // because we effectively do this lookup again when we add the new
4294 // class to the hash table --- necessary because of possible races with
4295 // other threads.)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004296 if (class_loader.Get() != component_type->GetClassLoader()) {
Vladimir Marko6ad2f6d2017-01-18 15:22:59 +00004297 ObjPtr<mirror::Class> new_class =
4298 LookupClass(self, descriptor, hash, component_type->GetClassLoader());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004299 if (new_class != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004300 return new_class;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004301 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004302 }
Vladimir Marko70e2a762019-07-12 16:49:00 +01004303 // Core array classes, i.e. Object[], Class[], String[] and primitive
4304 // arrays, have special initialization and they should be found above.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00004305 DCHECK_IMPLIES(component_type->IsObjectClass(),
4306 // Guard from false positives for errors before setting superclass.
4307 component_type->IsErroneousUnresolved());
Vladimir Marko70e2a762019-07-12 16:49:00 +01004308 DCHECK(!component_type->IsStringClass());
4309 DCHECK(!component_type->IsClassClass());
4310 DCHECK(!component_type->IsPrimitive());
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004311
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004312 // Fill out the fields in the Class.
4313 //
4314 // It is possible to execute some methods against arrays, because
4315 // all arrays are subclasses of java_lang_Object_, so we need to set
4316 // up a vtable. We can just point at the one in java_lang_Object_.
4317 //
4318 // Array classes are simple enough that we don't need to do a full
4319 // link step.
Vladimir Marko70e2a762019-07-12 16:49:00 +01004320 size_t array_class_size = mirror::Array::ClassSize(image_pointer_size_);
4321 auto visitor = [this, array_class_size, component_type](ObjPtr<mirror::Object> obj,
4322 size_t usable_size)
4323 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob68bb7a2020-03-17 10:55:25 +00004324 ScopedAssertNoNewTransactionRecords sanntr("CreateArrayClass");
Vladimir Marko70e2a762019-07-12 16:49:00 +01004325 mirror::Class::InitializeClassVisitor init_class(array_class_size);
4326 init_class(obj, usable_size);
4327 ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
4328 klass->SetComponentType(component_type.Get());
4329 // Do not hold lock for initialization, the fence issued after the visitor
4330 // returns ensures memory visibility together with the implicit consume
4331 // semantics (for all supported architectures) for any thread that loads
4332 // the array class reference from any memory locations afterwards.
4333 FinishArrayClassSetup(klass);
4334 };
4335 auto new_class = hs.NewHandle<mirror::Class>(
4336 AllocClass(self, GetClassRoot<mirror::Class>(this), array_class_size, visitor));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004337 if (new_class == nullptr) {
Vladimir Marko70e2a762019-07-12 16:49:00 +01004338 self->AssertPendingOOMException();
4339 return nullptr;
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004340 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004341
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004342 ObjPtr<mirror::Class> existing = InsertClass(descriptor, new_class.Get(), hash);
Mathieu Chartierc0a9ea42014-02-03 16:36:49 -08004343 if (existing == nullptr) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004344 // We postpone ClassLoad and ClassPrepare events to this point in time to avoid
4345 // duplicate events in case of races. Array classes don't really follow dedicated
4346 // load and prepare, anyways.
4347 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(new_class);
4348 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(new_class, new_class);
4349
Tamas Berghammer160e6df2016-01-05 14:29:02 +00004350 jit::Jit::NewTypeLoadedIfUsingJit(new_class.Get());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004351 return new_class.Get();
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07004352 }
4353 // Another thread must have loaded the class after we
4354 // started but before we finished. Abandon what we've
4355 // done.
4356 //
4357 // (Yes, this happens.)
4358
Vladimir Markobcf17522018-06-01 13:14:32 +01004359 return existing;
Brian Carlstroma331b3c2011-07-18 17:47:56 -07004360}
4361
Vladimir Marko9186b182018-11-06 14:55:54 +00004362ObjPtr<mirror::Class> ClassLinker::LookupPrimitiveClass(char type) {
4363 ClassRoot class_root;
Ian Rogers62f05122014-03-21 11:21:29 -07004364 switch (type) {
Vladimir Marko9186b182018-11-06 14:55:54 +00004365 case 'B': class_root = ClassRoot::kPrimitiveByte; break;
4366 case 'C': class_root = ClassRoot::kPrimitiveChar; break;
4367 case 'D': class_root = ClassRoot::kPrimitiveDouble; break;
4368 case 'F': class_root = ClassRoot::kPrimitiveFloat; break;
4369 case 'I': class_root = ClassRoot::kPrimitiveInt; break;
4370 case 'J': class_root = ClassRoot::kPrimitiveLong; break;
4371 case 'S': class_root = ClassRoot::kPrimitiveShort; break;
4372 case 'Z': class_root = ClassRoot::kPrimitiveBoolean; break;
4373 case 'V': class_root = ClassRoot::kPrimitiveVoid; break;
Ian Rogers62f05122014-03-21 11:21:29 -07004374 default:
Vladimir Marko9186b182018-11-06 14:55:54 +00004375 return nullptr;
Carl Shapiro744ad052011-08-06 15:53:36 -07004376 }
Vladimir Marko9186b182018-11-06 14:55:54 +00004377 return GetClassRoot(class_root, this);
4378}
4379
4380ObjPtr<mirror::Class> ClassLinker::FindPrimitiveClass(char type) {
4381 ObjPtr<mirror::Class> result = LookupPrimitiveClass(type);
4382 if (UNLIKELY(result == nullptr)) {
4383 std::string printable_type(PrintableChar(type));
4384 ThrowNoClassDefFoundError("Not a primitive type: %s", printable_type.c_str());
4385 }
4386 return result;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004387}
4388
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004389ObjPtr<mirror::Class> ClassLinker::InsertClass(const char* descriptor,
4390 ObjPtr<mirror::Class> klass,
4391 size_t hash) {
Alex Lighte9f61032018-09-24 16:04:51 -07004392 DCHECK(Thread::Current()->CanLoadClasses());
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -08004393 if (VLOG_IS_ON(class_linker)) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004394 ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
Brian Carlstromae826982011-11-09 01:33:42 -08004395 std::string source;
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004396 if (dex_cache != nullptr) {
Brian Carlstromae826982011-11-09 01:33:42 -08004397 source += " from ";
4398 source += dex_cache->GetLocation()->ToModifiedUtf8();
4399 }
4400 LOG(INFO) << "Loaded class " << descriptor << source;
4401 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004402 {
4403 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00004404 const ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
Mathieu Chartier65975772016-08-05 10:46:36 -07004405 ClassTable* const class_table = InsertClassTableForClassLoader(class_loader);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004406 ObjPtr<mirror::Class> existing = class_table->Lookup(descriptor, hash);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004407 if (existing != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004408 return existing;
Mathieu Chartier65975772016-08-05 10:46:36 -07004409 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004410 VerifyObject(klass);
4411 class_table->InsertWithHash(klass, hash);
4412 if (class_loader != nullptr) {
4413 // This is necessary because we need to have the card dirtied for remembered sets.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004414 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier65975772016-08-05 10:46:36 -07004415 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004416 if (log_new_roots_) {
Mathieu Chartier65975772016-08-05 10:46:36 -07004417 new_class_roots_.push_back(GcRoot<mirror::Class>(klass));
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004418 }
4419 }
Mathieu Chartier65975772016-08-05 10:46:36 -07004420 if (kIsDebugBuild) {
4421 // Test that copied methods correctly can find their holder.
4422 for (ArtMethod& method : klass->GetCopiedMethods(image_pointer_size_)) {
4423 CHECK_EQ(GetHoldingClassOfCopiedMethod(&method), klass);
4424 }
Mathieu Chartier893263b2014-03-04 11:07:42 -08004425 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004426 return nullptr;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07004427}
4428
Vladimir Marko1998cd02017-01-13 13:02:58 +00004429void ClassLinker::WriteBarrierForBootOatFileBssRoots(const OatFile* oat_file) {
Mathieu Chartiera1467d02017-02-22 09:22:50 -08004430 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
4431 DCHECK(!oat_file->GetBssGcRoots().empty()) << oat_file->GetLocation();
4432 if (log_new_roots_ && !ContainsElement(new_bss_roots_boot_oat_files_, oat_file)) {
4433 new_bss_roots_boot_oat_files_.push_back(oat_file);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004434 }
4435}
4436
Alex Lighte64300b2015-12-15 15:02:47 -08004437// TODO This should really be in mirror::Class.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004438void ClassLinker::UpdateClassMethods(ObjPtr<mirror::Class> klass,
Alex Lighte64300b2015-12-15 15:02:47 -08004439 LengthPrefixedArray<ArtMethod>* new_methods) {
4440 klass->SetMethodsPtrUnchecked(new_methods,
4441 klass->NumDirectMethods(),
4442 klass->NumDeclaredVirtualMethods());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004443 // Need to mark the card so that the remembered sets and mod union tables get updated.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07004444 WriteBarrier::ForEveryFieldWrite(klass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004445}
4446
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004447ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4448 const char* descriptor,
4449 ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2ff3b972017-06-05 18:14:53 -07004450 return LookupClass(self, descriptor, ComputeModifiedUtf8Hash(descriptor), class_loader);
4451}
4452
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004453ObjPtr<mirror::Class> ClassLinker::LookupClass(Thread* self,
4454 const char* descriptor,
4455 size_t hash,
4456 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko1a1de672016-10-13 12:53:15 +01004457 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
4458 ClassTable* const class_table = ClassTableForClassLoader(class_loader);
4459 if (class_table != nullptr) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004460 ObjPtr<mirror::Class> result = class_table->Lookup(descriptor, hash);
Vladimir Marko1a1de672016-10-13 12:53:15 +01004461 if (result != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01004462 return result;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004463 }
Sameer Abu Asal2c6de222013-05-02 17:38:59 -07004464 }
Vladimir Marko1a1de672016-10-13 12:53:15 +01004465 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004466}
4467
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004468class MoveClassTableToPreZygoteVisitor : public ClassLoaderVisitor {
4469 public:
Igor Murashkin2ffb7032017-11-08 13:35:21 -08004470 MoveClassTableToPreZygoteVisitor() {}
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004471
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004472 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004473 REQUIRES(Locks::classlinker_classes_lock_)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004474 REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004475 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -07004476 if (class_table != nullptr) {
4477 class_table->FreezeSnapshot();
4478 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07004479 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004480};
4481
4482void ClassLinker::MoveClassTableToPreZygote() {
4483 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Andreas Gampe2af99022017-04-25 08:32:59 -07004484 boot_class_table_->FreezeSnapshot();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004485 MoveClassTableToPreZygoteVisitor visitor;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004486 VisitClassLoaders(&visitor);
Mathieu Chartierc2e20622014-11-03 11:41:47 -08004487}
4488
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004489// Look up classes by hash and descriptor and put all matching ones in the result array.
4490class LookupClassesVisitor : public ClassLoaderVisitor {
4491 public:
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004492 LookupClassesVisitor(const char* descriptor,
4493 size_t hash,
4494 std::vector<ObjPtr<mirror::Class>>* result)
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004495 : descriptor_(descriptor),
4496 hash_(hash),
4497 result_(result) {}
4498
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004499 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01004500 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004501 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004502 ObjPtr<mirror::Class> klass = class_table->Lookup(descriptor_, hash_);
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004503 // Add `klass` only if `class_loader` is its defining (not just initiating) class loader.
4504 if (klass != nullptr && klass->GetClassLoader() == class_loader) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004505 result_->push_back(klass);
4506 }
4507 }
4508
4509 private:
4510 const char* const descriptor_;
4511 const size_t hash_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004512 std::vector<ObjPtr<mirror::Class>>* const result_;
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004513};
4514
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004515void ClassLinker::LookupClasses(const char* descriptor,
4516 std::vector<ObjPtr<mirror::Class>>& result) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07004517 result.clear();
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004518 Thread* const self = Thread::Current();
4519 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004520 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Andreas Gampe2af99022017-04-25 08:32:59 -07004521 ObjPtr<mirror::Class> klass = boot_class_table_->Lookup(descriptor, hash);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004522 if (klass != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +00004523 DCHECK(klass->GetClassLoader() == nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07004524 result.push_back(klass);
4525 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -07004526 LookupClassesVisitor visitor(descriptor, hash, &result);
4527 VisitClassLoaders(&visitor);
Elliott Hughes6fa602d2011-12-02 17:54:25 -08004528}
4529
Alex Lightf1f10492015-10-07 16:08:36 -07004530bool ClassLinker::AttemptSupertypeVerification(Thread* self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004531 verifier::VerifierDeps* verifier_deps,
Alex Lightf1f10492015-10-07 16:08:36 -07004532 Handle<mirror::Class> klass,
4533 Handle<mirror::Class> supertype) {
4534 DCHECK(self != nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08004535 DCHECK(klass != nullptr);
4536 DCHECK(supertype != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004537
Alex Lightf1f10492015-10-07 16:08:36 -07004538 if (!supertype->IsVerified() && !supertype->IsErroneous()) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004539 VerifyClass(self, verifier_deps, supertype);
Alex Lightf1f10492015-10-07 16:08:36 -07004540 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004541
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004542 if (supertype->IsVerified()
4543 || supertype->ShouldVerifyAtRuntime()
4544 || supertype->IsVerifiedNeedsAccessChecks()) {
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004545 // The supertype is either verified, or we soft failed at AOT time.
4546 DCHECK(supertype->IsVerified() || Runtime::Current()->IsAotCompiler());
Alex Lightf1f10492015-10-07 16:08:36 -07004547 return true;
4548 }
4549 // If we got this far then we have a hard failure.
4550 std::string error_msg =
4551 StringPrintf("Rejecting class %s that attempts to sub-type erroneous class %s",
David Sehr709b0702016-10-13 09:12:37 -07004552 klass->PrettyDescriptor().c_str(),
4553 supertype->PrettyDescriptor().c_str());
Alex Lightf1f10492015-10-07 16:08:36 -07004554 LOG(WARNING) << error_msg << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004555 StackHandleScope<1> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004556 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004557 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004558 // Set during VerifyClass call (if at all).
4559 self->ClearException();
4560 }
4561 // Change into a verify error.
4562 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Andreas Gampefa4333d2017-02-14 11:10:34 -08004563 if (cause != nullptr) {
Alex Lightf1f10492015-10-07 16:08:36 -07004564 self->GetException()->SetCause(cause.Get());
4565 }
4566 ClassReference ref(klass->GetDexCache()->GetDexFile(), klass->GetDexClassDefIndex());
4567 if (Runtime::Current()->IsAotCompiler()) {
4568 Runtime::Current()->GetCompilerCallbacks()->ClassRejected(ref);
4569 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004570 // Need to grab the lock to change status.
4571 ObjectLock<mirror::Class> super_lock(self, klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00004572 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Lightf1f10492015-10-07 16:08:36 -07004573 return false;
4574}
4575
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004576verifier::FailureKind ClassLinker::VerifyClass(Thread* self,
4577 verifier::VerifierDeps* verifier_deps,
4578 Handle<mirror::Class> klass,
4579 verifier::HardFailLogMode log_level) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004580 {
4581 // TODO: assert that the monitor on the Class is held
4582 ObjectLock<mirror::Class> lock(self, klass);
Elliott Hughesd9c67be2012-02-02 19:54:06 -08004583
Andreas Gampe884f3b82016-03-30 19:52:58 -07004584 // Is somebody verifying this now?
Vladimir Marko2c64a832018-01-04 11:31:56 +00004585 ClassStatus old_status = klass->GetStatus();
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004586 while (old_status == ClassStatus::kVerifying) {
Andreas Gampe884f3b82016-03-30 19:52:58 -07004587 lock.WaitIgnoringInterrupts();
Mathieu Chartier5ef70202017-06-29 10:45:10 -07004588 // WaitIgnoringInterrupts can still receive an interrupt and return early, in this
4589 // case we may see the same status again. b/62912904. This is why the check is
4590 // greater or equal.
4591 CHECK(klass->IsErroneous() || (klass->GetStatus() >= old_status))
David Sehr709b0702016-10-13 09:12:37 -07004592 << "Class '" << klass->PrettyClass()
4593 << "' performed an illegal verification state transition from " << old_status
4594 << " to " << klass->GetStatus();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004595 old_status = klass->GetStatus();
4596 }
jeffhao98eacac2011-09-14 16:11:53 -07004597
Andreas Gampe884f3b82016-03-30 19:52:58 -07004598 // The class might already be erroneous, for example at compile time if we attempted to verify
4599 // this class as a parent to another.
4600 if (klass->IsErroneous()) {
4601 ThrowEarlierClassFailure(klass.Get());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004602 return verifier::FailureKind::kHardFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004603 }
Brian Carlstrom9b5ee882012-02-28 09:48:54 -08004604
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004605 // Don't attempt to re-verify if already verified.
Andreas Gampe884f3b82016-03-30 19:52:58 -07004606 if (klass->IsVerified()) {
Nicolas Geoffray80789962021-04-30 16:50:39 +01004607 if (verifier_deps != nullptr &&
4608 verifier_deps->ContainsDexFile(klass->GetDexFile()) &&
4609 !verifier_deps->HasRecordedVerifiedStatus(klass->GetDexFile(), *klass->GetClassDef()) &&
4610 !Runtime::Current()->IsAotCompiler()) {
4611 // If the klass is verified, but `verifier_deps` did not record it, this
4612 // means we are running background verification of a secondary dex file.
4613 // Re-run the verifier to populate `verifier_deps`.
4614 // No need to run the verification when running on the AOT Compiler, as
4615 // the driver handles those multithreaded cases already.
4616 std::string error_msg;
4617 verifier::FailureKind failure =
4618 PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
4619 // We could have soft failures, so just check that we don't have a hard
4620 // failure.
4621 DCHECK_NE(failure, verifier::FailureKind::kHardFailure) << error_msg;
4622 }
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004623 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004624 }
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004625
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004626 if (klass->IsVerifiedNeedsAccessChecks()) {
4627 if (!Runtime::Current()->IsAotCompiler()) {
4628 // Mark the class as having a verification attempt to avoid re-running
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004629 // the verifier.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004630 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4631 }
4632 return verifier::FailureKind::kAccessChecksFailure;
4633 }
4634
Nicolas Geoffray7cc3ae52017-03-07 14:33:37 +00004635 // For AOT, don't attempt to re-verify if we have already found we should
4636 // verify at runtime.
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004637 if (klass->ShouldVerifyAtRuntime()) {
4638 CHECK(Runtime::Current()->IsAotCompiler());
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004639 return verifier::FailureKind::kSoftFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004640 }
jeffhao98eacac2011-09-14 16:11:53 -07004641
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004642 DCHECK_EQ(klass->GetStatus(), ClassStatus::kResolved);
4643 mirror::Class::SetStatus(klass, ClassStatus::kVerifying, self);
Andreas Gampe884f3b82016-03-30 19:52:58 -07004644
4645 // Skip verification if disabled.
4646 if (!Runtime::Current()->IsVerificationEnabled()) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00004647 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004648 UpdateClassAfterVerification(klass, image_pointer_size_, verifier::FailureKind::kNoFailure);
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004649 return verifier::FailureKind::kNoFailure;
Andreas Gampe884f3b82016-03-30 19:52:58 -07004650 }
Jeff Hao4a200f52014-04-01 14:58:49 -07004651 }
4652
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004653 VLOG(class_linker) << "Beginning verification for class: "
4654 << klass->PrettyDescriptor()
4655 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8();
4656
Ian Rogers9ffb0392012-09-10 11:56:50 -07004657 // Verify super class.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004658 StackHandleScope<2> hs(self);
Alex Lightf1f10492015-10-07 16:08:36 -07004659 MutableHandle<mirror::Class> supertype(hs.NewHandle(klass->GetSuperClass()));
4660 // If we have a superclass and we get a hard verification failure we can return immediately.
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004661 if (supertype != nullptr &&
4662 !AttemptSupertypeVerification(self, verifier_deps, klass, supertype)) {
Alex Lightf1f10492015-10-07 16:08:36 -07004663 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004664 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004665 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004666
Alex Lightf1f10492015-10-07 16:08:36 -07004667 // Verify all default super-interfaces.
4668 //
4669 // (1) Don't bother if the superclass has already had a soft verification failure.
4670 //
4671 // (2) Interfaces shouldn't bother to do this recursive verification because they cannot cause
4672 // recursive initialization by themselves. This is because when an interface is initialized
4673 // directly it must not initialize its superinterfaces. We are allowed to verify regardless
4674 // but choose not to for an optimization. If the interfaces is being verified due to a class
4675 // initialization (which would need all the default interfaces to be verified) the class code
4676 // will trigger the recursive verification anyway.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004677 if ((supertype == nullptr || supertype->IsVerified()) // See (1)
Alex Lightf1f10492015-10-07 16:08:36 -07004678 && !klass->IsInterface()) { // See (2)
4679 int32_t iftable_count = klass->GetIfTableCount();
4680 MutableHandle<mirror::Class> iface(hs.NewHandle<mirror::Class>(nullptr));
4681 // Loop through all interfaces this class has defined. It doesn't matter the order.
4682 for (int32_t i = 0; i < iftable_count; i++) {
4683 iface.Assign(klass->GetIfTable()->GetInterface(i));
Andreas Gampefa4333d2017-02-14 11:10:34 -08004684 DCHECK(iface != nullptr);
Alex Lightf1f10492015-10-07 16:08:36 -07004685 // We only care if we have default interfaces and can skip if we are already verified...
4686 if (LIKELY(!iface->HasDefaultMethods() || iface->IsVerified())) {
4687 continue;
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004688 } else if (UNLIKELY(!AttemptSupertypeVerification(self, verifier_deps, klass, iface))) {
Alex Lightf1f10492015-10-07 16:08:36 -07004689 // We had a hard failure while verifying this interface. Just return immediately.
4690 CHECK(self->IsExceptionPending()) << "Verification error should be pending.";
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004691 return verifier::FailureKind::kHardFailure;
Alex Lightf1f10492015-10-07 16:08:36 -07004692 } else if (UNLIKELY(!iface->IsVerified())) {
4693 // We softly failed to verify the iface. Stop checking and clean up.
4694 // Put the iface into the supertype handle so we know what caused us to fail.
4695 supertype.Assign(iface.Get());
4696 break;
Ian Rogers1c5eb702012-02-01 09:18:34 -08004697 }
Ian Rogers1c5eb702012-02-01 09:18:34 -08004698 }
4699 }
4700
Alex Lightf1f10492015-10-07 16:08:36 -07004701 // At this point if verification failed, then supertype is the "first" supertype that failed
4702 // verification (without a specific order). If verification succeeded, then supertype is either
4703 // null or the original superclass of klass and is verified.
Andreas Gampefa4333d2017-02-14 11:10:34 -08004704 DCHECK(supertype == nullptr ||
Alex Lightf1f10492015-10-07 16:08:36 -07004705 supertype.Get() == klass->GetSuperClass() ||
4706 !supertype->IsVerified());
4707
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004708 // Try to use verification information from the oat file, otherwise do runtime verification.
Ian Rogers4445a7e2012-10-05 17:19:13 -07004709 const DexFile& dex_file = *klass->GetDexCache()->GetDexFile();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004710 ClassStatus oat_file_class_status(ClassStatus::kNotReady);
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004711 bool preverified = VerifyClassUsingOatFile(self, dex_file, klass, oat_file_class_status);
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004712
4713 VLOG(class_linker) << "Class preverified status for class "
4714 << klass->PrettyDescriptor()
4715 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4716 << ": "
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004717 << preverified
4718 << "( " << oat_file_class_status << ")";
Bharadwaj Kalandhabhatta271c1e12017-06-27 11:14:49 -07004719
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004720 // If the oat file says the class had an error, re-run the verifier. That way we will either:
4721 // 1) Be successful at runtime, or
4722 // 2) Get a precise error message.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00004723 DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004724
Ian Rogers62d6c772013-02-27 08:32:07 -08004725 std::string error_msg;
Andreas Gampe6d7abbd2017-04-24 13:19:09 -07004726 verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
jeffhaof1e6b7c2012-06-05 18:33:30 -07004727 if (!preverified) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004728 verifier_failure = PerformClassVerification(self, verifier_deps, klass, log_level, &error_msg);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004729 } else if (oat_file_class_status == ClassStatus::kVerifiedNeedsAccessChecks) {
4730 verifier_failure = verifier::FailureKind::kAccessChecksFailure;
jeffhaof1e6b7c2012-06-05 18:33:30 -07004731 }
Andreas Gampe884f3b82016-03-30 19:52:58 -07004732
4733 // Verification is done, grab the lock again.
4734 ObjectLock<mirror::Class> lock(self, klass);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004735 self->AssertNoPendingException();
Andreas Gampe884f3b82016-03-30 19:52:58 -07004736
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004737 if (verifier_failure == verifier::FailureKind::kHardFailure) {
David Sehr709b0702016-10-13 09:12:37 -07004738 VLOG(verifier) << "Verification failed on class " << klass->PrettyDescriptor()
Andreas Gampeec6e6c12015-11-05 20:39:56 -08004739 << " in " << klass->GetDexCache()->GetLocation()->ToModifiedUtf8()
4740 << " because: " << error_msg;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07004741 ThrowVerifyError(klass.Get(), "%s", error_msg.c_str());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004742 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004743 return verifier_failure;
jeffhao5cfd6fb2011-09-27 13:54:29 -07004744 }
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004745
4746 // Make sure all classes referenced by catch blocks are resolved.
4747 ResolveClassExceptionHandlerTypes(klass);
4748
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004749 if (Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004750 if (supertype != nullptr && supertype->ShouldVerifyAtRuntime()) {
4751 // Regardless of our own verification result, we need to verify the class
4752 // at runtime if the super class is not verified. This is required in case
4753 // we generate an app/boot image.
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004754 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4755 } else if (verifier_failure == verifier::FailureKind::kNoFailure) {
4756 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
4757 } else if (verifier_failure == verifier::FailureKind::kSoftFailure ||
4758 verifier_failure == verifier::FailureKind::kTypeChecksFailure) {
4759 mirror::Class::SetStatus(klass, ClassStatus::kRetryVerificationAtRuntime, self);
4760 } else {
4761 mirror::Class::SetStatus(klass, ClassStatus::kVerifiedNeedsAccessChecks, self);
4762 }
4763 // Notify the compiler about the verification status, in case the class
4764 // was verified implicitly (eg super class of a compiled class). When the
4765 // compiler unloads dex file after compilation, we still want to keep
4766 // verification states.
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004767 Runtime::Current()->GetCompilerCallbacks()->UpdateClassState(
4768 ClassReference(&klass->GetDexFile(), klass->GetDexClassDefIndex()), klass->GetStatus());
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004769 } else {
4770 mirror::Class::SetStatus(klass, ClassStatus::kVerified, self);
Nicolas Geoffray486dda02017-09-11 14:15:52 +01004771 }
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004772
4773 UpdateClassAfterVerification(klass, image_pointer_size_, verifier_failure);
Nicolas Geoffray08025182016-10-25 17:20:18 +01004774 return verifier_failure;
Andreas Gampe48498592014-09-10 19:48:05 -07004775}
4776
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004777verifier::FailureKind ClassLinker::PerformClassVerification(Thread* self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004778 verifier::VerifierDeps* verifier_deps,
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004779 Handle<mirror::Class> klass,
4780 verifier::HardFailLogMode log_level,
4781 std::string* error_msg) {
4782 Runtime* const runtime = Runtime::Current();
Nicolas Geoffray7744b692021-07-06 16:19:32 +01004783 StackHandleScope<2> hs(self);
4784 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
4785 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004786 return verifier::ClassVerifier::VerifyClass(self,
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00004787 verifier_deps,
Nicolas Geoffray7744b692021-07-06 16:19:32 +01004788 dex_cache->GetDexFile(),
4789 klass,
4790 dex_cache,
4791 class_loader,
4792 *klass->GetClassDef(),
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004793 runtime->GetCompilerCallbacks(),
Andreas Gampea43ba3d2019-03-13 15:49:20 -07004794 log_level,
4795 Runtime::Current()->GetTargetSdkVersion(),
4796 error_msg);
Mathieu Chartier9e050df2017-08-09 10:05:47 -07004797}
4798
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004799bool ClassLinker::VerifyClassUsingOatFile(Thread* self,
4800 const DexFile& dex_file,
4801 Handle<mirror::Class> klass,
Vladimir Marko2c64a832018-01-04 11:31:56 +00004802 ClassStatus& oat_file_class_status) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004803 // If we're compiling, we can only verify the class using the oat file if
4804 // we are not compiling the image or if the class we're verifying is not part of
Andreas Gampee9934582018-01-19 21:23:04 -08004805 // the compilation unit (app - dependencies). We will let the compiler callback
4806 // tell us about the latter.
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08004807 if (Runtime::Current()->IsAotCompiler()) {
Andreas Gampee9934582018-01-19 21:23:04 -08004808 CompilerCallbacks* callbacks = Runtime::Current()->GetCompilerCallbacks();
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004809 // We are compiling an app (not the image).
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004810 if (!callbacks->CanUseOatStatusForVerification(klass.Get())) {
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004811 return false;
4812 }
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004813 }
Anwar Ghuloum044d2832013-07-17 15:22:31 -07004814
Andreas Gampeb40d3612018-06-26 15:49:42 -07004815 const OatDexFile* oat_dex_file = dex_file.GetOatDexFile();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004816 // In case we run without an image there won't be a backing oat file.
Mathieu Chartier1b868492016-11-16 16:22:37 -08004817 if (oat_dex_file == nullptr || oat_dex_file->GetOatFile() == nullptr) {
Anwar Ghuloumad256bb2013-07-18 14:58:55 -07004818 return false;
4819 }
4820
Ian Rogers8b2c0b92013-09-19 02:56:49 -07004821 uint16_t class_def_index = klass->GetDexClassDefIndex();
Vladimir Markod3c5beb2014-04-11 16:32:51 +01004822 oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
Vladimir Marko2c64a832018-01-04 11:31:56 +00004823 if (oat_file_class_status >= ClassStatus::kVerified) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004824 return true;
4825 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004826 if (oat_file_class_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
4827 // We return that the clas has already been verified, and the caller should
4828 // check the class status to ensure we run with access checks.
4829 return true;
4830 }
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004831
4832 // Check the class status with the vdex file.
4833 const OatFile* oat_file = oat_dex_file->GetOatFile();
4834 if (oat_file != nullptr) {
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004835 ClassStatus vdex_status = oat_file->GetVdexFile()->ComputeClassStatus(self, klass);
4836 if (vdex_status >= ClassStatus::kVerifiedNeedsAccessChecks) {
Nicolas Geoffray327cfcf2021-10-12 14:13:25 +01004837 VLOG(verifier) << "Vdex verification success for " << klass->PrettyClass();
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004838 oat_file_class_status = vdex_status;
Nicolas Geoffray6df45112021-02-07 21:51:58 +00004839 return true;
4840 }
4841 }
4842
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004843 // If we only verified a subset of the classes at compile time, we can end up with classes that
4844 // were resolved by the verifier.
Vladimir Marko2c64a832018-01-04 11:31:56 +00004845 if (oat_file_class_status == ClassStatus::kResolved) {
Mathieu Chartiera079e3a2016-03-16 19:08:31 -07004846 return false;
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004847 }
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01004848 // We never expect a .oat file to have kRetryVerificationAtRuntime statuses.
4849 CHECK_NE(oat_file_class_status, ClassStatus::kRetryVerificationAtRuntime)
4850 << klass->PrettyClass() << " " << dex_file.GetLocation();
4851
Vladimir Marko72ab6842017-01-20 19:32:50 +00004852 if (mirror::Class::IsErroneous(oat_file_class_status)) {
Nicolas Geoffray44dc8a32021-06-21 15:23:49 +01004853 // Compile time verification failed with a hard error. We'll re-run
4854 // verification, which might be successful at runtime.
jeffhao1ac29442012-03-26 11:37:32 -07004855 return false;
4856 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00004857 if (oat_file_class_status == ClassStatus::kNotReady) {
Ian Rogersc4762272012-02-01 15:55:55 -08004858 // Status is uninitialized if we couldn't determine the status at compile time, for example,
4859 // not loading the class.
4860 // TODO: when the verifier doesn't rely on Class-es failing to resolve/load the type hierarchy
4861 // isn't a problem and this case shouldn't occur
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004862 return false;
4863 }
Ian Rogers1ff3c982014-08-12 02:30:58 -07004864 std::string temp;
Elliott Hughes634eb2e2012-03-22 16:06:28 -07004865 LOG(FATAL) << "Unexpected class status: " << oat_file_class_status
David Sehr709b0702016-10-13 09:12:37 -07004866 << " " << dex_file.GetLocation() << " " << klass->PrettyClass() << " "
Ian Rogers1ff3c982014-08-12 02:30:58 -07004867 << klass->GetDescriptor(&temp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08004868 UNREACHABLE();
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004869}
4870
Alex Light5a559862016-01-29 12:24:48 -08004871void ClassLinker::ResolveClassExceptionHandlerTypes(Handle<mirror::Class> klass) {
Alex Light51a64d52015-12-17 13:55:59 -08004872 for (ArtMethod& method : klass->GetMethods(image_pointer_size_)) {
Alex Light5a559862016-01-29 12:24:48 -08004873 ResolveMethodExceptionHandlerTypes(&method);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004874 }
4875}
4876
Alex Light5a559862016-01-29 12:24:48 -08004877void ClassLinker::ResolveMethodExceptionHandlerTypes(ArtMethod* method) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004878 // similar to DexVerifier::ScanTryCatchBlocks and dex2oat's ResolveExceptionsForMethod.
David Sehr0225f8e2018-01-31 08:52:24 +00004879 CodeItemDataAccessor accessor(method->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004880 if (!accessor.HasCodeItem()) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004881 return; // native or abstract method
4882 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004883 if (accessor.TriesSize() == 0) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004884 return; // nothing to process
4885 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -08004886 const uint8_t* handlers_ptr = accessor.GetCatchHandlerData(0);
Nicolas Geoffrayade0ca62022-10-10 13:35:11 +01004887 CHECK(method->GetDexFile()->IsInDataSection(handlers_ptr))
4888 << method->PrettyMethod()
4889 << "@" << method->GetDexFile()->GetLocation()
4890 << "@" << reinterpret_cast<const void*>(handlers_ptr)
4891 << " is_compact_dex=" << method->GetDexFile()->IsCompactDexFile();
4892
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004893 uint32_t handlers_size = DecodeUnsignedLeb128(&handlers_ptr);
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004894 for (uint32_t idx = 0; idx < handlers_size; idx++) {
4895 CatchHandlerIterator iterator(handlers_ptr);
4896 for (; iterator.HasNext(); iterator.Next()) {
4897 // Ensure exception types are resolved so that they don't need resolution to be delivered,
4898 // unresolved exception types will be ignored by exception delivery
Andreas Gampea5b09a62016-11-17 15:21:22 -08004899 if (iterator.GetHandlerTypeIndex().IsValid()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07004900 ObjPtr<mirror::Class> exception_type = ResolveType(iterator.GetHandlerTypeIndex(), method);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004901 if (exception_type == nullptr) {
Brian Carlstrome7d856b2012-01-11 18:10:55 -08004902 DCHECK(Thread::Current()->IsExceptionPending());
4903 Thread::Current()->ClearException();
4904 }
4905 }
4906 }
4907 handlers_ptr = iterator.EndDataPointer();
4908 }
4909}
4910
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01004911ObjPtr<mirror::Class> ClassLinker::CreateProxyClass(ScopedObjectAccessAlreadyRunnable& soa,
4912 jstring name,
4913 jobjectArray interfaces,
4914 jobject loader,
4915 jobjectArray methods,
4916 jobjectArray throws) {
Mathieu Chartier590fee92013-09-13 13:46:47 -07004917 Thread* self = soa.Self();
Alex Lighte9f61032018-09-24 16:04:51 -07004918
4919 // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
4920 // code to be executed. We put it up here so we can avoid all the allocations associated with
4921 // creating the class. This can happen with (eg) jit-threads.
4922 if (!self->CanLoadClasses()) {
4923 // Make sure we don't try to load anything, potentially causing an infinite loop.
4924 ObjPtr<mirror::Throwable> pre_allocated =
4925 Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
4926 self->SetException(pre_allocated);
4927 return nullptr;
4928 }
4929
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00004930 StackHandleScope<12> hs(self);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004931 MutableHandle<mirror::Class> temp_klass(hs.NewHandle(
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004932 AllocClass(self, GetClassRoot<mirror::Class>(this), sizeof(mirror::Class))));
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004933 if (temp_klass == nullptr) {
Ian Rogersa436fde2013-08-27 23:34:06 -07004934 CHECK(self->IsExceptionPending()); // OOME.
Andreas Gampe2ed8def2014-08-28 14:41:02 -07004935 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07004936 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004937 DCHECK(temp_klass->GetClass() != nullptr);
4938 temp_klass->SetObjectSize(sizeof(mirror::Proxy));
Igor Murashkindf707e42016-02-02 16:56:50 -08004939 // Set the class access flags incl. VerificationAttempted, so we do not try to set the flag on
4940 // the methods.
Nicolas Geoffray66934ef2021-07-07 14:56:23 +01004941 temp_klass->SetAccessFlagsDuringLinking(kAccClassIsProxy | kAccPublic | kAccFinal);
Nicolas Geoffrayc9d59492022-11-17 15:03:09 +00004942 temp_klass->SetClassLoader(soa.Decode<mirror::ClassLoader>(loader));
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004943 DCHECK_EQ(temp_klass->GetPrimitiveType(), Primitive::kPrimNot);
4944 temp_klass->SetName(soa.Decode<mirror::String>(name));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004945 temp_klass->SetDexCache(GetClassRoot<mirror::Proxy>(this)->GetDexCache());
Mathieu Chartier6beced42016-11-15 15:51:31 -08004946 // Object has an empty iftable, copy it for that reason.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01004947 temp_klass->SetIfTable(GetClassRoot<mirror::Object>(this)->GetIfTable());
Vladimir Marko2c64a832018-01-04 11:31:56 +00004948 mirror::Class::SetStatus(temp_klass, ClassStatus::kIdx, self);
Vladimir Marko3892e622019-03-15 15:22:18 +00004949 std::string storage;
4950 const char* descriptor = temp_klass->GetDescriptor(&storage);
4951 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004952
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004953 // Needs to be before we insert the class so that the allocator field is set.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004954 LinearAlloc* const allocator = GetOrCreateAllocatorForClassLoader(temp_klass->GetClassLoader());
Mathieu Chartierd57d4542015-10-14 10:55:30 -07004955
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004956 // Insert the class before loading the fields as the field roots
4957 // (ArtField::declaring_class_) are only visited from the class
4958 // table. There can't be any suspend points between inserting the
4959 // class and setting the field arrays below.
Vladimir Marko3892e622019-03-15 15:22:18 +00004960 ObjPtr<mirror::Class> existing = InsertClass(descriptor, temp_klass.Get(), hash);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07004961 CHECK(existing == nullptr);
Ian Rogersc2b44472011-12-14 21:17:17 -08004962
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004963 // Instance fields are inherited, but we add a couple of static fields...
Mathieu Chartierc7853442015-03-27 14:35:38 -07004964 const size_t num_fields = 2;
Mathieu Chartier951ec2c2015-09-22 08:50:05 -07004965 LengthPrefixedArray<ArtField>* sfields = AllocArtFieldArray(self, allocator, num_fields);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004966 temp_klass->SetSFieldsPtr(sfields);
Mathieu Chartierc7853442015-03-27 14:35:38 -07004967
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004968 // 1. Create a static field 'interfaces' that holds the _declared_ interfaces implemented by
4969 // our proxy, so Class.getInterfaces doesn't return the flattened set.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004970 ArtField& interfaces_sfield = sfields->At(0);
4971 interfaces_sfield.SetDexFieldIndex(0);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004972 interfaces_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004973 interfaces_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Mathieu Chartierc7853442015-03-27 14:35:38 -07004974
Elliott Hughes2ed52c42012-03-21 16:56:56 -07004975 // 2. Create a static field 'throws' that holds exceptions thrown by our methods.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004976 ArtField& throws_sfield = sfields->At(1);
4977 throws_sfield.SetDexFieldIndex(1);
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07004978 throws_sfield.SetDeclaringClass(temp_klass.Get());
Mathieu Chartier54d220e2015-07-30 16:20:06 -07004979 throws_sfield.SetAccessFlags(kAccStatic | kAccPublic | kAccFinal);
Jesse Wilson95caa792011-10-12 18:14:17 -04004980
Ian Rogers466bb252011-10-14 03:29:56 -07004981 // Proxies have 1 direct method, the constructor
Alex Lighte64300b2015-12-15 15:02:47 -08004982 const size_t num_direct_methods = 1;
Jesse Wilson95caa792011-10-12 18:14:17 -04004983
Alex Light133987d2020-03-26 19:22:12 +00004984 // The array we get passed contains all methods, including private and static
4985 // ones that aren't proxied. We need to filter those out since only interface
4986 // methods (non-private & virtual) are actually proxied.
4987 Handle<mirror::ObjectArray<mirror::Method>> h_methods =
4988 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Method>>(methods));
Vladimir Marko679730e2018-05-25 15:06:48 +01004989 DCHECK_EQ(h_methods->GetClass(), GetClassRoot<mirror::ObjectArray<mirror::Method>>())
David Sehr709b0702016-10-13 09:12:37 -07004990 << mirror::Class::PrettyClass(h_methods->GetClass());
Alex Light133987d2020-03-26 19:22:12 +00004991 // List of the actual virtual methods this class will have.
4992 std::vector<ArtMethod*> proxied_methods;
4993 std::vector<size_t> proxied_throws_idx;
4994 proxied_methods.reserve(h_methods->GetLength());
4995 proxied_throws_idx.reserve(h_methods->GetLength());
4996 // Filter out to only the non-private virtual methods.
4997 for (auto [mirror, idx] : ZipCount(h_methods.Iterate<mirror::Method>())) {
4998 ArtMethod* m = mirror->GetArtMethod();
4999 if (!m->IsPrivate() && !m->IsStatic()) {
5000 proxied_methods.push_back(m);
5001 proxied_throws_idx.push_back(idx);
5002 }
5003 }
5004 const size_t num_virtual_methods = proxied_methods.size();
Alex Lightbc115092020-03-27 11:25:16 -07005005 // We also need to filter out the 'throws'. The 'throws' are a Class[][] that
5006 // contains an array of all the classes each function is declared to throw.
5007 // This is used to wrap unexpected exceptions in a
5008 // UndeclaredThrowableException exception. This array is in the same order as
5009 // the methods array and like the methods array must be filtered to remove any
5010 // non-proxied methods.
Alex Light133987d2020-03-26 19:22:12 +00005011 const bool has_filtered_methods =
5012 static_cast<int32_t>(num_virtual_methods) != h_methods->GetLength();
5013 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> original_proxied_throws(
5014 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(throws)));
5015 MutableHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>> proxied_throws(
5016 hs.NewHandle<mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>>(
5017 (has_filtered_methods)
5018 ? mirror::ObjectArray<mirror::ObjectArray<mirror::Class>>::Alloc(
5019 self, original_proxied_throws->GetClass(), num_virtual_methods)
5020 : original_proxied_throws.Get()));
Alex Lightbc115092020-03-27 11:25:16 -07005021 if (proxied_throws.IsNull() && !original_proxied_throws.IsNull()) {
5022 self->AssertPendingOOMException();
5023 return nullptr;
5024 }
Alex Light133987d2020-03-26 19:22:12 +00005025 if (has_filtered_methods) {
5026 for (auto [orig_idx, new_idx] : ZipCount(MakeIterationRange(proxied_throws_idx))) {
5027 DCHECK_LE(new_idx, orig_idx);
5028 proxied_throws->Set(new_idx, original_proxied_throws->Get(orig_idx));
5029 }
5030 }
Alex Lighte64300b2015-12-15 15:02:47 -08005031
5032 // Create the methods array.
5033 LengthPrefixedArray<ArtMethod>* proxy_class_methods = AllocArtMethodArray(
5034 self, allocator, num_direct_methods + num_virtual_methods);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005035 // Currently AllocArtMethodArray cannot return null, but the OOM logic is left there in case we
5036 // want to throw OOM in the future.
Alex Lighte64300b2015-12-15 15:02:47 -08005037 if (UNLIKELY(proxy_class_methods == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005038 self->AssertPendingOOMException();
5039 return nullptr;
Ian Rogersa436fde2013-08-27 23:34:06 -07005040 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005041 temp_klass->SetMethodsPtr(proxy_class_methods, num_direct_methods, num_virtual_methods);
Alex Lighte64300b2015-12-15 15:02:47 -08005042
5043 // Create the single direct method.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005044 CreateProxyConstructor(temp_klass, temp_klass->GetDirectMethodUnchecked(0, image_pointer_size_));
Alex Lighte64300b2015-12-15 15:02:47 -08005045
5046 // Create virtual method using specified prototypes.
5047 // TODO These should really use the iterators.
Jesse Wilson95caa792011-10-12 18:14:17 -04005048 for (size_t i = 0; i < num_virtual_methods; ++i) {
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005049 auto* virtual_method = temp_klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00005050 auto* prototype = proxied_methods[i];
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005051 CreateProxyMethod(temp_klass, prototype, virtual_method);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005052 DCHECK(virtual_method->GetDeclaringClass() != nullptr);
5053 DCHECK(prototype->GetDeclaringClass() != nullptr);
Jesse Wilson95caa792011-10-12 18:14:17 -04005054 }
Ian Rogersc2b44472011-12-14 21:17:17 -08005055
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005056 // The super class is java.lang.reflect.Proxy
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005057 temp_klass->SetSuperClass(GetClassRoot<mirror::Proxy>(this));
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005058 // Now effectively in the loaded state.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005059 mirror::Class::SetStatus(temp_klass, ClassStatus::kLoaded, self);
Ian Rogers62d6c772013-02-27 08:32:07 -08005060 self->AssertNoPendingException();
Ian Rogersc2b44472011-12-14 21:17:17 -08005061
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005062 // At this point the class is loaded. Publish a ClassLoad event.
5063 // Note: this may be a temporary class. It is a listener's responsibility to handle this.
5064 Runtime::Current()->GetRuntimeCallbacks()->ClassLoad(temp_klass);
5065
5066 MutableHandle<mirror::Class> klass = hs.NewHandle<mirror::Class>(nullptr);
Ian Rogersc8982582012-09-07 16:53:25 -07005067 {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005068 // Must hold lock on object when resolved.
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005069 ObjectLock<mirror::Class> resolution_lock(self, temp_klass);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005070 // Link the fields and virtual methods, creating vtable and iftables.
5071 // The new class will replace the old one in the class table.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005072 Handle<mirror::ObjectArray<mirror::Class>> h_interfaces(
Mathieu Chartier0795f232016-09-27 18:43:30 -07005073 hs.NewHandle(soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces)));
Vladimir Marko3892e622019-03-15 15:22:18 +00005074 if (!LinkClass(self, descriptor, temp_klass, h_interfaces, &klass)) {
Vladimir Markoa4d28dd2021-06-30 11:28:06 +01005075 if (!temp_klass->IsErroneous()) {
5076 mirror::Class::SetStatus(temp_klass, ClassStatus::kErrorUnresolved, self);
5077 }
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005078 return nullptr;
Ian Rogers7dfb28c2013-08-22 08:18:36 -07005079 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005080 }
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005081 CHECK(temp_klass->IsRetired());
5082 CHECK_NE(temp_klass.Get(), klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005083
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005084 CHECK_EQ(interfaces_sfield.GetDeclaringClass(), klass.Get());
Mathieu Chartier0795f232016-09-27 18:43:30 -07005085 interfaces_sfield.SetObject<false>(
5086 klass.Get(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005087 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005088 CHECK_EQ(throws_sfield.GetDeclaringClass(), klass.Get());
5089 throws_sfield.SetObject<false>(
Mathieu Chartier0795f232016-09-27 18:43:30 -07005090 klass.Get(),
Alex Light133987d2020-03-26 19:22:12 +00005091 proxied_throws.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005092
Andreas Gampe6cfd4c92017-04-06 08:03:32 -07005093 Runtime::Current()->GetRuntimeCallbacks()->ClassPrepare(temp_klass, klass);
5094
Vladimir Marko305c38b2018-02-14 11:50:07 +00005095 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5096 // See also ClassLinker::EnsureInitialized().
5097 if (kBitstringSubtypeCheckEnabled) {
5098 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
5099 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(klass.Get());
5100 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck for j.l.r.Proxy is already assigned.
5101 }
5102
Vladimir Markobf121912019-06-04 13:49:05 +01005103 VisiblyInitializedCallback* callback = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005104 {
5105 // Lock on klass is released. Lock new class object.
5106 ObjectLock<mirror::Class> initialization_lock(self, klass);
Vladimir Markobf121912019-06-04 13:49:05 +01005107 // Conservatively go through the ClassStatus::kInitialized state.
5108 callback = MarkClassInitialized(self, klass);
5109 }
5110 if (callback != nullptr) {
5111 callback->MakeVisible(self);
Ian Rogersc8982582012-09-07 16:53:25 -07005112 }
Ian Rogersc2b44472011-12-14 21:17:17 -08005113
David Srbecky346fd962020-07-27 16:51:00 +01005114 // Consistency checks.
Elliott Hughes67d92002012-03-26 15:08:51 -07005115 if (kIsDebugBuild) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005116 CHECK(klass->GetIFieldsPtr() == nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005117 CheckProxyConstructor(klass->GetDirectMethod(0, image_pointer_size_));
5118
Ian Rogersc2b44472011-12-14 21:17:17 -08005119 for (size_t i = 0; i < num_virtual_methods; ++i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005120 auto* virtual_method = klass->GetVirtualMethodUnchecked(i, image_pointer_size_);
Alex Light133987d2020-03-26 19:22:12 +00005121 CheckProxyMethod(virtual_method, proxied_methods[i]);
Ian Rogersc2b44472011-12-14 21:17:17 -08005122 }
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005123
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07005124 StackHandleScope<1> hs2(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07005125 Handle<mirror::String> decoded_name = hs2.NewHandle(soa.Decode<mirror::String>(name));
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005126 std::string interfaces_field_name(StringPrintf("java.lang.Class[] %s.interfaces",
Mathieu Chartier590fee92013-09-13 13:46:47 -07005127 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07005128 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(0)), interfaces_field_name);
Elliott Hughes2ed52c42012-03-21 16:56:56 -07005129
5130 std::string throws_field_name(StringPrintf("java.lang.Class[][] %s.throws",
Mathieu Chartier590fee92013-09-13 13:46:47 -07005131 decoded_name->ToModifiedUtf8().c_str()));
David Sehr709b0702016-10-13 09:12:37 -07005132 CHECK_EQ(ArtField::PrettyField(klass->GetStaticField(1)), throws_field_name);
Ian Rogersc2b44472011-12-14 21:17:17 -08005133
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005134 CHECK_EQ(klass.Get()->GetProxyInterfaces(),
Mathieu Chartierf8ac97f2016-10-05 15:56:52 -07005135 soa.Decode<mirror::ObjectArray<mirror::Class>>(interfaces));
Narayan Kamath6b2dc312017-03-14 13:26:12 +00005136 CHECK_EQ(klass.Get()->GetProxyThrows(),
Alex Light133987d2020-03-26 19:22:12 +00005137 proxied_throws.Get());
Ian Rogersc2b44472011-12-14 21:17:17 -08005138 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005139 return klass.Get();
Jesse Wilson95caa792011-10-12 18:14:17 -04005140}
5141
Mathieu Chartiere401d142015-04-22 13:56:20 -07005142void ClassLinker::CreateProxyConstructor(Handle<mirror::Class> klass, ArtMethod* out) {
5143 // Create constructor for Proxy that must initialize the method.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01005144 ObjPtr<mirror::Class> proxy_class = GetClassRoot<mirror::Proxy>(this);
5145 CHECK_EQ(proxy_class->NumDirectMethods(), 21u);
Przemyslaw Szczepaniakf11cd292016-08-17 17:46:38 +01005146
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005147 // Find the <init>(InvocationHandler)V method. The exact method offset varies depending
5148 // on which front-end compiler was used to build the libcore DEX files.
Vladimir Markoaddc2d12022-11-29 08:47:02 +00005149 ArtMethod* proxy_constructor = WellKnownClasses::java_lang_reflect_Proxy_init;
Igor Murashkin9d3d7522017-02-27 10:39:49 -08005150 DCHECK(proxy_constructor != nullptr)
5151 << "Could not find <init> method in java.lang.reflect.Proxy";
5152
Jeff Haodb8a6642014-08-14 17:18:52 -07005153 // Clone the existing constructor of Proxy (our constructor would just invoke it so steal its
5154 // code_ too)
Mathieu Chartiere401d142015-04-22 13:56:20 -07005155 DCHECK(out != nullptr);
5156 out->CopyFrom(proxy_constructor, image_pointer_size_);
Vladimir Markoba118822017-06-12 15:41:56 +01005157 // Make this constructor public and fix the class to be our Proxy version.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005158 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
Vladimir Markoba118822017-06-12 15:41:56 +01005159 // Note that the compiler calls a ResolveMethod() overload that does not handle a Proxy referrer.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005160 out->SetAccessFlags((out->GetAccessFlags() & ~kAccProtected) |
5161 kAccPublic |
5162 kAccCompileDontBother);
Mathieu Chartiere401d142015-04-22 13:56:20 -07005163 out->SetDeclaringClass(klass.Get());
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005164
5165 // Set the original constructor method.
5166 out->SetDataPtrSize(proxy_constructor, image_pointer_size_);
Ian Rogersc2b44472011-12-14 21:17:17 -08005167}
5168
Mathieu Chartiere401d142015-04-22 13:56:20 -07005169void ClassLinker::CheckProxyConstructor(ArtMethod* constructor) const {
Ian Rogers466bb252011-10-14 03:29:56 -07005170 CHECK(constructor->IsConstructor());
Mathieu Chartiere401d142015-04-22 13:56:20 -07005171 auto* np = constructor->GetInterfaceMethodIfProxy(image_pointer_size_);
5172 CHECK_STREQ(np->GetName(), "<init>");
5173 CHECK_STREQ(np->GetSignature().ToString().c_str(), "(Ljava/lang/reflect/InvocationHandler;)V");
Ian Rogers466bb252011-10-14 03:29:56 -07005174 DCHECK(constructor->IsPublic());
Jesse Wilson95caa792011-10-12 18:14:17 -04005175}
5176
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005177void ClassLinker::CreateProxyMethod(Handle<mirror::Class> klass, ArtMethod* prototype,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005178 ArtMethod* out) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005179 // We steal everything from the prototype (such as DexCache, invoke stub, etc.) then specialize
Ian Rogers466bb252011-10-14 03:29:56 -07005180 // as necessary
Mathieu Chartiere401d142015-04-22 13:56:20 -07005181 DCHECK(out != nullptr);
5182 out->CopyFrom(prototype, image_pointer_size_);
Ian Rogers466bb252011-10-14 03:29:56 -07005183
Alex Lighte9dd04f2016-03-16 16:09:45 -07005184 // Set class to be the concrete proxy class.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005185 out->SetDeclaringClass(klass.Get());
Vladimir Markode0d0de2021-03-18 14:12:35 +00005186 // Clear the abstract and default flags to ensure that defaults aren't picked in
Alex Lighte9dd04f2016-03-16 16:09:45 -07005187 // preference to the invocation handler.
Vladimir Markode0d0de2021-03-18 14:12:35 +00005188 const uint32_t kRemoveFlags = kAccAbstract | kAccDefault;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005189 // Make the method final.
Mathieu Chartier201e2972017-06-05 18:34:53 -07005190 // Mark kAccCompileDontBother so that we don't take JIT samples for the method. b/62349349
5191 const uint32_t kAddFlags = kAccFinal | kAccCompileDontBother;
Alex Lighte9dd04f2016-03-16 16:09:45 -07005192 out->SetAccessFlags((out->GetAccessFlags() & ~kRemoveFlags) | kAddFlags);
5193
Vladimir Markod1ee20f2017-08-17 09:21:16 +00005194 // Set the original interface method.
5195 out->SetDataPtrSize(prototype, image_pointer_size_);
5196
Ian Rogers466bb252011-10-14 03:29:56 -07005197 // At runtime the method looks like a reference and argument saving method, clone the code
5198 // related parameters from this method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005199 out->SetEntryPointFromQuickCompiledCode(GetQuickProxyInvokeHandler());
Ian Rogersc2b44472011-12-14 21:17:17 -08005200}
Jesse Wilson95caa792011-10-12 18:14:17 -04005201
Mathieu Chartiere401d142015-04-22 13:56:20 -07005202void ClassLinker::CheckProxyMethod(ArtMethod* method, ArtMethod* prototype) const {
David Srbecky346fd962020-07-27 16:51:00 +01005203 // Basic consistency checks.
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08005204 CHECK(!prototype->IsFinal());
5205 CHECK(method->IsFinal());
Alex Light9139e002015-10-09 15:59:48 -07005206 CHECK(method->IsInvokable());
Ian Rogers19846512012-02-24 11:42:47 -08005207
5208 // The proxy method doesn't have its own dex cache or dex file and so it steals those of its
5209 // interface prototype. The exception to this are Constructors and the Class of the Proxy itself.
Ian Rogers19846512012-02-24 11:42:47 -08005210 CHECK_EQ(prototype->GetDexMethodIndex(), method->GetDexMethodIndex());
Vladimir Marko5c3e9d12017-08-30 16:43:54 +01005211 CHECK_EQ(prototype, method->GetInterfaceMethodIfProxy(image_pointer_size_));
Jesse Wilson95caa792011-10-12 18:14:17 -04005212}
5213
Nicolas Geoffray4a10dcc2022-08-12 19:18:43 +01005214bool ClassLinker::CanWeInitializeClass(ObjPtr<mirror::Class> klass,
5215 bool can_init_statics,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005216 bool can_init_parents) {
Brian Carlstrom610e49f2013-11-04 17:07:22 -08005217 if (can_init_statics && can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005218 return true;
5219 }
Nicolas Geoffray4a10dcc2022-08-12 19:18:43 +01005220 DCHECK(Runtime::Current()->IsAotCompiler());
5221
5222 // We currently don't support initializing at AOT time classes that need access
5223 // checks.
5224 if (klass->IsVerifiedNeedsAccessChecks()) {
5225 return false;
5226 }
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005227 if (!can_init_statics) {
5228 // Check if there's a class initializer.
Mathieu Chartiere401d142015-04-22 13:56:20 -07005229 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005230 if (clinit != nullptr) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005231 return false;
5232 }
5233 // Check if there are encoded static values needing initialization.
5234 if (klass->NumStaticFields() != 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005235 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005236 DCHECK(dex_class_def != nullptr);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005237 if (dex_class_def->static_values_off_ != 0) {
5238 return false;
5239 }
5240 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005241 }
5242 // If we are a class we need to initialize all interfaces with default methods when we are
5243 // initialized. Check all of them.
5244 if (!klass->IsInterface()) {
5245 size_t num_interfaces = klass->GetIfTableCount();
5246 for (size_t i = 0; i < num_interfaces; i++) {
5247 ObjPtr<mirror::Class> iface = klass->GetIfTable()->GetInterface(i);
5248 if (iface->HasDefaultMethods() && !iface->IsInitialized()) {
5249 if (!can_init_parents || !CanWeInitializeClass(iface, can_init_statics, can_init_parents)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005250 return false;
5251 }
5252 }
5253 }
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005254 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07005255 if (klass->IsInterface() || !klass->HasSuperClass()) {
5256 return true;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005257 }
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005258 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Vladimir Marko889b72d2019-11-12 11:01:13 +00005259 if (super_class->IsInitialized()) {
5260 return true;
Mathieu Chartiere401d142015-04-22 13:56:20 -07005261 }
Vladimir Marko889b72d2019-11-12 11:01:13 +00005262 return can_init_parents && CanWeInitializeClass(super_class, can_init_statics, can_init_parents);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005263}
5264
Mathieu Chartier23369542020-03-04 08:24:11 -08005265bool ClassLinker::InitializeClass(Thread* self,
5266 Handle<mirror::Class> klass,
5267 bool can_init_statics,
5268 bool can_init_parents) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005269 // see JLS 3rd edition, 12.4.2 "Detailed Initialization Procedure" for the locking protocol
5270
5271 // Are we already initialized and therefore done?
5272 // Note: we differ from the JLS here as we don't do this under the lock, this is benign as
5273 // an initialized class will never change its state.
5274 if (klass->IsInitialized()) {
5275 return true;
5276 }
5277
5278 // Fast fail if initialization requires a full runtime. Not part of the JLS.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005279 if (!CanWeInitializeClass(klass.Get(), can_init_statics, can_init_parents)) {
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005280 return false;
5281 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005282
Ian Rogers7b078e82014-09-10 14:44:24 -07005283 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005284 Runtime* const runtime = Runtime::Current();
5285 const bool stats_enabled = runtime->HasStatsEnabled();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005286 uint64_t t0;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005287 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005288 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005289
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005290 // Re-check under the lock in case another thread initialized ahead of us.
5291 if (klass->IsInitialized()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005292 return true;
5293 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005294
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005295 // Was the class already found to be erroneous? Done under the lock to match the JLS.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005296 if (klass->IsErroneous()) {
Andreas Gampe7b3063b2019-01-07 14:12:52 -08005297 ThrowEarlierClassFailure(klass.Get(), true, /* log= */ true);
Brian Carlstromb23eab12014-10-08 17:55:21 -07005298 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005299 return false;
5300 }
5301
Vladimir Marko72ab6842017-01-20 19:32:50 +00005302 CHECK(klass->IsResolved() && !klass->IsErroneousResolved())
5303 << klass->PrettyClass() << ": state=" << klass->GetStatus();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005304
5305 if (!klass->IsVerified()) {
Nicolas Geoffray5b0b2e12021-03-19 14:48:40 +00005306 VerifyClass(self, /*verifier_deps= */ nullptr, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005307 if (!klass->IsVerified()) {
5308 // We failed to verify, expect either the klass to be erroneous or verification failed at
5309 // compile time.
5310 if (klass->IsErroneous()) {
Andreas Gampefc49fa02016-04-21 12:21:55 -07005311 // The class is erroneous. This may be a verifier error, or another thread attempted
5312 // verification and/or initialization and failed. We can distinguish those cases by
5313 // whether an exception is already pending.
5314 if (self->IsExceptionPending()) {
5315 // Check that it's a VerifyError.
Nicolas Geoffray4dc65892021-07-05 17:43:35 +01005316 DCHECK(IsVerifyError(self->GetException()));
Andreas Gampefc49fa02016-04-21 12:21:55 -07005317 } else {
5318 // Check that another thread attempted initialization.
5319 DCHECK_NE(0, klass->GetClinitThreadId());
5320 DCHECK_NE(self->GetTid(), klass->GetClinitThreadId());
5321 // Need to rethrow the previous failure now.
5322 ThrowEarlierClassFailure(klass.Get(), true);
5323 }
Brian Carlstromb23eab12014-10-08 17:55:21 -07005324 VlogClassInitializationFailure(klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005325 } else {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005326 CHECK(Runtime::Current()->IsAotCompiler());
Nicolas Geoffray1715efa2020-06-05 18:34:49 +01005327 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerifiedNeedsAccessChecks());
Vladimir Markod79b37b2018-11-02 13:06:22 +00005328 self->AssertNoPendingException();
5329 self->SetException(Runtime::Current()->GetPreAllocatedNoClassDefFoundError());
jeffhaoa9b3bf42012-06-06 17:18:39 -07005330 }
Vladimir Markod79b37b2018-11-02 13:06:22 +00005331 self->AssertPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005332 return false;
Mathieu Chartier524507a2014-08-27 15:28:28 -07005333 } else {
5334 self->AssertNoPendingException();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005335 }
Andreas Gampefc49fa02016-04-21 12:21:55 -07005336
5337 // A separate thread could have moved us all the way to initialized. A "simple" example
5338 // involves a subclass of the current class being initialized at the same time (which
5339 // will implicitly initialize the superclass, if scheduled that way). b/28254258
Vladimir Marko72ab6842017-01-20 19:32:50 +00005340 DCHECK(!klass->IsErroneous()) << klass->GetStatus();
Andreas Gampefc49fa02016-04-21 12:21:55 -07005341 if (klass->IsInitialized()) {
5342 return true;
5343 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005344 }
5345
Vladimir Marko2c64a832018-01-04 11:31:56 +00005346 // If the class is ClassStatus::kInitializing, either this thread is
Brian Carlstromd1422f82011-09-28 11:37:09 -07005347 // initializing higher up the stack or another thread has beat us
5348 // to initializing and we need to wait. Either way, this
5349 // invocation of InitializeClass will not be responsible for
5350 // running <clinit> and will return.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005351 if (klass->GetStatus() == ClassStatus::kInitializing) {
Mathieu Chartier524507a2014-08-27 15:28:28 -07005352 // Could have got an exception during verification.
5353 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005354 VlogClassInitializationFailure(klass);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005355 return false;
5356 }
Elliott Hughes005ab2e2011-09-11 17:15:31 -07005357 // We caught somebody else in the act; was it us?
Elliott Hughesdcc24742011-09-07 14:02:44 -07005358 if (klass->GetClinitThreadId() == self->GetTid()) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005359 // Yes. That's fine. Return so we can continue initializing.
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005360 return true;
5361 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005362 // No. That's fine. Wait for another thread to finish initializing.
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005363 return WaitForInitializeClass(klass, self, lock);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005364 }
5365
Jeff Haoe2e40342017-07-19 10:45:18 -07005366 // Try to get the oat class's status for this class if the oat file is present. The compiler
5367 // tries to validate superclass descriptors, and writes the result into the oat file.
5368 // Runtime correctness is guaranteed by classpath checks done on loading. If the classpath
5369 // is different at runtime than it was at compile time, the oat file is rejected. So if the
5370 // oat file is present, the classpaths must match, and the runtime time check can be skipped.
Jeff Hao0cb17282017-07-12 14:51:49 -07005371 bool has_oat_class = false;
Jeff Haoe2e40342017-07-19 10:45:18 -07005372 const OatFile::OatClass oat_class = (runtime->IsStarted() && !runtime->IsAotCompiler())
5373 ? OatFile::FindOatClass(klass->GetDexFile(), klass->GetDexClassDefIndex(), &has_oat_class)
5374 : OatFile::OatClass::Invalid();
Vladimir Marko2c64a832018-01-04 11:31:56 +00005375 if (oat_class.GetStatus() < ClassStatus::kSuperclassValidated &&
Jeff Hao0cb17282017-07-12 14:51:49 -07005376 !ValidateSuperClassDescriptors(klass)) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005377 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005378 return false;
5379 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005380 self->AllowThreadSuspension();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005381
Vladimir Marko2c64a832018-01-04 11:31:56 +00005382 CHECK_EQ(klass->GetStatus(), ClassStatus::kVerified) << klass->PrettyClass()
Andreas Gampe9510ccd2016-04-20 09:55:25 -07005383 << " self.tid=" << self->GetTid() << " clinit.tid=" << klass->GetClinitThreadId();
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005384
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005385 // From here out other threads may observe that we're initializing and so changes of state
5386 // require the a notification.
Elliott Hughesdcc24742011-09-07 14:02:44 -07005387 klass->SetClinitThreadId(self->GetTid());
Vladimir Marko2c64a832018-01-04 11:31:56 +00005388 mirror::Class::SetStatus(klass, ClassStatus::kInitializing, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005389
Mathieu Chartier23369542020-03-04 08:24:11 -08005390 t0 = stats_enabled ? NanoTime() : 0u;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005391 }
5392
Andreas Gampeaf864702019-07-23 14:05:35 -07005393 uint64_t t_sub = 0;
5394
Brian Carlstrom6d3f72c2013-08-21 18:06:34 -07005395 // Initialize super classes, must be done while initializing for the JLS.
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005396 if (!klass->IsInterface() && klass->HasSuperClass()) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005397 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005398 if (!super_class->IsInitialized()) {
5399 CHECK(!super_class->IsInterface());
5400 CHECK(can_init_parents);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005401 StackHandleScope<1> hs(self);
5402 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class));
Mathieu Chartier23369542020-03-04 08:24:11 -08005403 uint64_t super_t0 = stats_enabled ? NanoTime() : 0u;
Ian Rogers7b078e82014-09-10 14:44:24 -07005404 bool super_initialized = InitializeClass(self, handle_scope_super, can_init_statics, true);
Mathieu Chartier23369542020-03-04 08:24:11 -08005405 uint64_t super_t1 = stats_enabled ? NanoTime() : 0u;
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005406 if (!super_initialized) {
5407 // The super class was verified ahead of entering initializing, we should only be here if
5408 // the super class became erroneous due to initialization.
Chang Xingadbb91c2017-07-17 11:23:55 -07005409 // For the case of aot compiler, the super class might also be initializing but we don't
5410 // want to process circular dependencies in pre-compile.
5411 CHECK(self->IsExceptionPending())
Brian Carlstromf3632832014-05-20 15:36:53 -07005412 << "Super class initialization failed for "
David Sehr709b0702016-10-13 09:12:37 -07005413 << handle_scope_super->PrettyDescriptor()
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005414 << " that has unexpected status " << handle_scope_super->GetStatus()
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005415 << "\nPending exception:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +00005416 << (self->GetException() != nullptr ? self->GetException()->Dump() : "");
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005417 ObjectLock<mirror::Class> lock(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005418 // Initialization failed because the super-class is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005419 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005420 return false;
5421 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005422 t_sub = super_t1 - super_t0;
Ian Rogers1bddec32012-02-04 12:27:34 -08005423 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005424 }
5425
Alex Lighteb7c1442015-08-31 13:17:42 -07005426 if (!klass->IsInterface()) {
5427 // Initialize interfaces with default methods for the JLS.
5428 size_t num_direct_interfaces = klass->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005429 // Only setup the (expensive) handle scope if we actually need to.
5430 if (UNLIKELY(num_direct_interfaces > 0)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07005431 StackHandleScope<1> hs_iface(self);
Alex Light56a40f52015-10-14 11:07:41 -07005432 MutableHandle<mirror::Class> handle_scope_iface(hs_iface.NewHandle<mirror::Class>(nullptr));
5433 for (size_t i = 0; i < num_direct_interfaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01005434 handle_scope_iface.Assign(klass->GetDirectInterface(i));
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005435 CHECK(handle_scope_iface != nullptr) << klass->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005436 CHECK(handle_scope_iface->IsInterface());
5437 if (handle_scope_iface->HasBeenRecursivelyInitialized()) {
5438 // We have already done this for this interface. Skip it.
5439 continue;
5440 }
5441 // We cannot just call initialize class directly because we need to ensure that ALL
5442 // interfaces with default methods are initialized. Non-default interface initialization
5443 // will not affect other non-default super-interfaces.
Mathieu Chartier23369542020-03-04 08:24:11 -08005444 // This is not very precise, misses all walking.
5445 uint64_t inf_t0 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005446 bool iface_initialized = InitializeDefaultInterfaceRecursive(self,
5447 handle_scope_iface,
5448 can_init_statics,
5449 can_init_parents);
Mathieu Chartier23369542020-03-04 08:24:11 -08005450 uint64_t inf_t1 = stats_enabled ? NanoTime() : 0u;
Alex Light56a40f52015-10-14 11:07:41 -07005451 if (!iface_initialized) {
5452 ObjectLock<mirror::Class> lock(self, klass);
5453 // Initialization failed because one of our interfaces with default methods is erroneous.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005454 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Alex Light56a40f52015-10-14 11:07:41 -07005455 return false;
5456 }
Andreas Gampeaf864702019-07-23 14:05:35 -07005457 t_sub += inf_t1 - inf_t0;
Alex Lighteb7c1442015-08-31 13:17:42 -07005458 }
5459 }
5460 }
5461
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005462 const size_t num_static_fields = klass->NumStaticFields();
5463 if (num_static_fields > 0) {
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005464 const dex::ClassDef* dex_class_def = klass->GetClassDef();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07005465 CHECK(dex_class_def != nullptr);
Hiroshi Yamauchi67ef46a2014-08-21 15:59:43 -07005466 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07005467 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(klass->GetClassLoader()));
Mathieu Chartierf8322842014-05-16 10:59:25 -07005468 Handle<mirror::DexCache> dex_cache(hs.NewHandle(klass->GetDexCache()));
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005469
5470 // Eagerly fill in static fields so that the we don't have to do as many expensive
5471 // Class::FindStaticField in ResolveField.
5472 for (size_t i = 0; i < num_static_fields; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07005473 ArtField* field = klass->GetStaticField(i);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005474 const uint32_t field_idx = field->GetDexFieldIndex();
David Srbecky5de5efe2021-02-15 21:23:00 +00005475 ArtField* resolved_field = dex_cache->GetResolvedField(field_idx);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005476 if (resolved_field == nullptr) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01005477 // Populating cache of a dex file which defines `klass` should always be allowed.
David Brazdilf50ac102018-10-17 18:00:06 +01005478 DCHECK(!hiddenapi::ShouldDenyAccessToMember(
5479 field,
5480 hiddenapi::AccessContext(class_loader.Get(), dex_cache.Get()),
5481 hiddenapi::AccessMethod::kNone));
David Srbecky5de5efe2021-02-15 21:23:00 +00005482 dex_cache->SetResolvedField(field_idx, field);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07005483 } else {
5484 DCHECK_EQ(field, resolved_field);
Mathieu Chartier05d89ee2014-10-28 13:57:04 -07005485 }
5486 }
5487
Vladimir Markoe11dd502017-12-08 14:09:45 +00005488 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache,
5489 class_loader,
David Sehr9323e6e2016-09-13 08:58:35 -07005490 this,
5491 *dex_class_def);
Vladimir Markoe11dd502017-12-08 14:09:45 +00005492 const DexFile& dex_file = *dex_cache->GetDexFile();
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005493
Hiroshi Yamauchi88500112014-08-22 12:12:56 -07005494 if (value_it.HasNext()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005495 ClassAccessor accessor(dex_file, *dex_class_def);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005496 CHECK(can_init_statics);
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005497 for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
5498 if (!value_it.HasNext()) {
5499 break;
5500 }
5501 ArtField* art_field = ResolveField(field.GetIndex(),
5502 dex_cache,
5503 class_loader,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07005504 /* is_static= */ true);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005505 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005506 value_it.ReadValueToField<true>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005507 } else {
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005508 value_it.ReadValueToField<false>(art_field);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +01005509 }
Mathieu Chartierda595be2016-08-10 13:57:39 -07005510 if (self->IsExceptionPending()) {
5511 break;
5512 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005513 value_it.Next();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005514 }
Mathieu Chartier1f1cb9f2018-06-04 09:22:46 -07005515 DCHECK(self->IsExceptionPending() || !value_it.HasNext());
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005516 }
5517 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005518
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005519
Mathieu Chartierda595be2016-08-10 13:57:39 -07005520 if (!self->IsExceptionPending()) {
5521 ArtMethod* clinit = klass->FindClassInitializer(image_pointer_size_);
5522 if (clinit != nullptr) {
5523 CHECK(can_init_statics);
5524 JValue result;
5525 clinit->Invoke(self, nullptr, 0, &result, "V");
5526 }
5527 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005528 self->AllowThreadSuspension();
Mathieu Chartier23369542020-03-04 08:24:11 -08005529 uint64_t t1 = stats_enabled ? NanoTime() : 0u;
Elliott Hughes83df2ac2011-10-11 16:37:54 -07005530
Vladimir Markobf121912019-06-04 13:49:05 +01005531 VisiblyInitializedCallback* callback = nullptr;
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005532 bool success = true;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005533 {
Mathieu Chartierdb2633c2014-05-16 09:59:29 -07005534 ObjectLock<mirror::Class> lock(self, klass);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005535
5536 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005537 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005538 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005539 success = false;
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005540 } else if (Runtime::Current()->IsTransactionAborted()) {
5541 // The exception thrown when the transaction aborted has been caught and cleared
5542 // so we need to throw it again now.
David Sehr709b0702016-10-13 09:12:37 -07005543 VLOG(compiler) << "Return from class initializer of "
5544 << mirror::Class::PrettyDescriptor(klass.Get())
Sebastien Hertzbd9cf9f2015-03-03 12:16:13 +01005545 << " without exception while transaction was aborted: re-throw it now.";
Mathieu Chartier23369542020-03-04 08:24:11 -08005546 runtime->ThrowTransactionAbortError(self);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005547 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Sebastien Hertz1c80bec2015-02-03 11:58:06 +01005548 success = false;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005549 } else {
Mathieu Chartier23369542020-03-04 08:24:11 -08005550 if (stats_enabled) {
5551 RuntimeStats* global_stats = runtime->GetStats();
5552 RuntimeStats* thread_stats = self->GetStats();
5553 ++global_stats->class_init_count;
5554 ++thread_stats->class_init_count;
5555 global_stats->class_init_time_ns += (t1 - t0 - t_sub);
5556 thread_stats->class_init_time_ns += (t1 - t0 - t_sub);
5557 }
Ian Rogerse6bb3b22013-08-19 21:51:45 -07005558 // Set the class as initialized except if failed to initialize static fields.
Vladimir Markobf121912019-06-04 13:49:05 +01005559 callback = MarkClassInitialized(self, klass);
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005560 if (VLOG_IS_ON(class_linker)) {
Ian Rogers1ff3c982014-08-12 02:30:58 -07005561 std::string temp;
5562 LOG(INFO) << "Initialized class " << klass->GetDescriptor(&temp) << " from " <<
Mathieu Chartierf8322842014-05-16 10:59:25 -07005563 klass->GetLocation();
Brian Carlstromae826982011-11-09 01:33:42 -08005564 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005565 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005566 }
Vladimir Markobf121912019-06-04 13:49:05 +01005567 if (callback != nullptr) {
5568 callback->MakeVisible(self);
5569 }
Ian Rogersbdfb1a52012-01-12 14:05:22 -08005570 return success;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005571}
5572
Alex Lighteb7c1442015-08-31 13:17:42 -07005573// We recursively run down the tree of interfaces. We need to do this in the order they are declared
5574// and perform the initialization only on those interfaces that contain default methods.
5575bool ClassLinker::InitializeDefaultInterfaceRecursive(Thread* self,
5576 Handle<mirror::Class> iface,
5577 bool can_init_statics,
5578 bool can_init_parents) {
5579 CHECK(iface->IsInterface());
5580 size_t num_direct_ifaces = iface->NumDirectInterfaces();
Alex Light56a40f52015-10-14 11:07:41 -07005581 // Only create the (expensive) handle scope if we need it.
5582 if (UNLIKELY(num_direct_ifaces > 0)) {
5583 StackHandleScope<1> hs(self);
5584 MutableHandle<mirror::Class> handle_super_iface(hs.NewHandle<mirror::Class>(nullptr));
5585 // First we initialize all of iface's super-interfaces recursively.
5586 for (size_t i = 0; i < num_direct_ifaces; i++) {
Vladimir Markob10668c2021-06-10 09:52:53 +01005587 ObjPtr<mirror::Class> super_iface = iface->GetDirectInterface(i);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00005588 CHECK(super_iface != nullptr) << iface->PrettyDescriptor() << " iface #" << i;
Alex Light56a40f52015-10-14 11:07:41 -07005589 if (!super_iface->HasBeenRecursivelyInitialized()) {
5590 // Recursive step
5591 handle_super_iface.Assign(super_iface);
5592 if (!InitializeDefaultInterfaceRecursive(self,
5593 handle_super_iface,
5594 can_init_statics,
5595 can_init_parents)) {
5596 return false;
5597 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005598 }
5599 }
5600 }
5601
5602 bool result = true;
5603 // Then we initialize 'iface' if it has default methods. We do not need to (and in fact must not)
5604 // initialize if we don't have default methods.
5605 if (iface->HasDefaultMethods()) {
5606 result = EnsureInitialized(self, iface, can_init_statics, can_init_parents);
5607 }
5608
5609 // Mark that this interface has undergone recursive default interface initialization so we know we
5610 // can skip it on any later class initializations. We do this even if we are not a default
5611 // interface since we can still avoid the traversal. This is purely a performance optimization.
5612 if (result) {
5613 // TODO This should be done in a better way
Andreas Gampe976b2982018-03-02 17:54:22 -08005614 // Note: Use a try-lock to avoid blocking when someone else is holding the lock on this
5615 // interface. It is bad (Java) style, but not impossible. Marking the recursive
5616 // initialization is a performance optimization (to avoid another idempotent visit
5617 // for other implementing classes/interfaces), and can be revisited later.
5618 ObjectTryLock<mirror::Class> lock(self, iface);
5619 if (lock.Acquired()) {
5620 iface->SetRecursivelyInitialized();
5621 }
Alex Lighteb7c1442015-08-31 13:17:42 -07005622 }
5623 return result;
5624}
5625
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005626bool ClassLinker::WaitForInitializeClass(Handle<mirror::Class> klass,
5627 Thread* self,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005628 ObjectLock<mirror::Class>& lock)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005629 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005630 while (true) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -07005631 self->AssertNoPendingException();
Ian Rogers8f3c9ae2013-08-20 17:26:41 -07005632 CHECK(!klass->IsInitialized());
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005633 lock.WaitIgnoringInterrupts();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005634
5635 // When we wake up, repeat the test for init-in-progress. If
5636 // there's an exception pending (only possible if
Brian Carlstromb23eab12014-10-08 17:55:21 -07005637 // we were not using WaitIgnoringInterrupts), bail out.
Brian Carlstromd1422f82011-09-28 11:37:09 -07005638 if (self->IsExceptionPending()) {
Brian Carlstromb23eab12014-10-08 17:55:21 -07005639 WrapExceptionInInitializer(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00005640 mirror::Class::SetStatus(klass, ClassStatus::kErrorResolved, self);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005641 return false;
5642 }
5643 // Spurious wakeup? Go back to waiting.
Vladimir Marko2c64a832018-01-04 11:31:56 +00005644 if (klass->GetStatus() == ClassStatus::kInitializing) {
Brian Carlstromd1422f82011-09-28 11:37:09 -07005645 continue;
5646 }
Vladimir Marko2c64a832018-01-04 11:31:56 +00005647 if (klass->GetStatus() == ClassStatus::kVerified &&
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08005648 Runtime::Current()->IsAotCompiler()) {
Ian Rogers3d1548d2012-09-24 14:08:03 -07005649 // Compile time initialization failed.
5650 return false;
5651 }
Brian Carlstromd1422f82011-09-28 11:37:09 -07005652 if (klass->IsErroneous()) {
5653 // The caller wants an exception, but it was thrown in a
5654 // different thread. Synthesize one here.
Brian Carlstromdf143242011-10-10 18:05:34 -07005655 ThrowNoClassDefFoundError("<clinit> failed for class %s; see exception in other thread",
David Sehr709b0702016-10-13 09:12:37 -07005656 klass->PrettyDescriptor().c_str());
Brian Carlstromb23eab12014-10-08 17:55:21 -07005657 VlogClassInitializationFailure(klass);
Brian Carlstromd1422f82011-09-28 11:37:09 -07005658 return false;
5659 }
5660 if (klass->IsInitialized()) {
5661 return true;
5662 }
David Sehr709b0702016-10-13 09:12:37 -07005663 LOG(FATAL) << "Unexpected class status. " << klass->PrettyClass() << " is "
Mathieu Chartierc528dba2013-11-26 12:00:11 -08005664 << klass->GetStatus();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005665 }
Ian Rogers07140832014-09-30 15:43:59 -07005666 UNREACHABLE();
Brian Carlstromd1422f82011-09-28 11:37:09 -07005667}
5668
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005669static void ThrowSignatureCheckResolveReturnTypeException(Handle<mirror::Class> klass,
5670 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005671 ArtMethod* method,
5672 ArtMethod* m)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005673 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005674 DCHECK(Thread::Current()->IsExceptionPending());
5675 DCHECK(!m->IsProxyMethod());
5676 const DexFile* dex_file = m->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005677 const dex::MethodId& method_id = dex_file->GetMethodId(m->GetDexMethodIndex());
5678 const dex::ProtoId& proto_id = dex_file->GetMethodPrototype(method_id);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005679 dex::TypeIndex return_type_idx = proto_id.return_type_idx_;
David Sehr709b0702016-10-13 09:12:37 -07005680 std::string return_type = dex_file->PrettyType(return_type_idx);
5681 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005682 ThrowWrappedLinkageError(klass.Get(),
5683 "While checking class %s method %s signature against %s %s: "
5684 "Failed to resolve return type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005685 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5686 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005687 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005688 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005689 return_type.c_str(), class_loader.c_str());
5690}
5691
5692static void ThrowSignatureCheckResolveArgException(Handle<mirror::Class> klass,
5693 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005694 ArtMethod* method,
5695 ArtMethod* m,
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005696 uint32_t index,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005697 dex::TypeIndex arg_type_idx)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005698 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005699 DCHECK(Thread::Current()->IsExceptionPending());
5700 DCHECK(!m->IsProxyMethod());
5701 const DexFile* dex_file = m->GetDexFile();
David Sehr709b0702016-10-13 09:12:37 -07005702 std::string arg_type = dex_file->PrettyType(arg_type_idx);
5703 std::string class_loader = mirror::Object::PrettyTypeOf(m->GetDeclaringClass()->GetClassLoader());
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005704 ThrowWrappedLinkageError(klass.Get(),
5705 "While checking class %s method %s signature against %s %s: "
5706 "Failed to resolve arg %u type %s with %s",
David Sehr709b0702016-10-13 09:12:37 -07005707 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5708 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005709 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005710 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005711 index, arg_type.c_str(), class_loader.c_str());
5712}
5713
5714static void ThrowSignatureMismatch(Handle<mirror::Class> klass,
5715 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005716 ArtMethod* method,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005717 const std::string& error_msg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005718 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005719 ThrowLinkageError(klass.Get(),
5720 "Class %s method %s resolves differently in %s %s: %s",
David Sehr709b0702016-10-13 09:12:37 -07005721 mirror::Class::PrettyDescriptor(klass.Get()).c_str(),
5722 ArtMethod::PrettyMethod(method).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005723 super_klass->IsInterface() ? "interface" : "superclass",
David Sehr709b0702016-10-13 09:12:37 -07005724 mirror::Class::PrettyDescriptor(super_klass.Get()).c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005725 error_msg.c_str());
5726}
5727
Ian Rogersb5fb2072014-12-02 17:22:02 -08005728static bool HasSameSignatureWithDifferentClassLoaders(Thread* self,
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005729 Handle<mirror::Class> klass,
5730 Handle<mirror::Class> super_klass,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005731 ArtMethod* method1,
5732 ArtMethod* method2)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005733 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb5fb2072014-12-02 17:22:02 -08005734 {
5735 StackHandleScope<1> hs(self);
Vladimir Markob45528c2017-07-27 14:14:28 +01005736 Handle<mirror::Class> return_type(hs.NewHandle(method1->ResolveReturnType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005737 if (UNLIKELY(return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005738 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method1);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005739 return false;
5740 }
Vladimir Markob45528c2017-07-27 14:14:28 +01005741 ObjPtr<mirror::Class> other_return_type = method2->ResolveReturnType();
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005742 if (UNLIKELY(other_return_type == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005743 ThrowSignatureCheckResolveReturnTypeException(klass, super_klass, method1, method2);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005744 return false;
5745 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005746 if (UNLIKELY(other_return_type != return_type.Get())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005747 ThrowSignatureMismatch(klass, super_klass, method1,
5748 StringPrintf("Return types mismatch: %s(%p) vs %s(%p)",
David Sehr709b0702016-10-13 09:12:37 -07005749 return_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005750 return_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005751 other_return_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005752 other_return_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005753 return false;
5754 }
5755 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08005756 const dex::TypeList* types1 = method1->GetParameterTypeList();
5757 const dex::TypeList* types2 = method2->GetParameterTypeList();
Ian Rogersb5fb2072014-12-02 17:22:02 -08005758 if (types1 == nullptr) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005759 if (types2 != nullptr && types2->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005760 ThrowSignatureMismatch(klass, super_klass, method1,
5761 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005762 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005763 return false;
5764 }
5765 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005766 } else if (UNLIKELY(types2 == nullptr)) {
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005767 if (types1->Size() != 0) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005768 ThrowSignatureMismatch(klass, super_klass, method1,
5769 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005770 method2->PrettyMethod(true).c_str()));
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005771 return false;
5772 }
5773 return true;
Ian Rogersb5fb2072014-12-02 17:22:02 -08005774 }
5775 uint32_t num_types = types1->Size();
5776 if (UNLIKELY(num_types != types2->Size())) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005777 ThrowSignatureMismatch(klass, super_klass, method1,
5778 StringPrintf("Type list mismatch with %s",
David Sehr709b0702016-10-13 09:12:37 -07005779 method2->PrettyMethod(true).c_str()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005780 return false;
5781 }
5782 for (uint32_t i = 0; i < num_types; ++i) {
Vladimir Marko862f43c2015-02-10 18:22:57 +00005783 StackHandleScope<1> hs(self);
Andreas Gampea5b09a62016-11-17 15:21:22 -08005784 dex::TypeIndex param_type_idx = types1->GetTypeItem(i).type_idx_;
Vladimir Marko862f43c2015-02-10 18:22:57 +00005785 Handle<mirror::Class> param_type(hs.NewHandle(
Vladimir Markob45528c2017-07-27 14:14:28 +01005786 method1->ResolveClassFromTypeIndex(param_type_idx)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08005787 if (UNLIKELY(param_type == nullptr)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005788 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005789 method1, i, param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005790 return false;
5791 }
Andreas Gampea5b09a62016-11-17 15:21:22 -08005792 dex::TypeIndex other_param_type_idx = types2->GetTypeItem(i).type_idx_;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005793 ObjPtr<mirror::Class> other_param_type =
Vladimir Markob45528c2017-07-27 14:14:28 +01005794 method2->ResolveClassFromTypeIndex(other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005795 if (UNLIKELY(other_param_type == nullptr)) {
5796 ThrowSignatureCheckResolveArgException(klass, super_klass, method1,
Mathieu Chartiere401d142015-04-22 13:56:20 -07005797 method2, i, other_param_type_idx);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005798 return false;
5799 }
Vladimir Marko862f43c2015-02-10 18:22:57 +00005800 if (UNLIKELY(param_type.Get() != other_param_type)) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005801 ThrowSignatureMismatch(klass, super_klass, method1,
5802 StringPrintf("Parameter %u type mismatch: %s(%p) vs %s(%p)",
5803 i,
David Sehr709b0702016-10-13 09:12:37 -07005804 param_type->PrettyClassAndClassLoader().c_str(),
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005805 param_type.Get(),
David Sehr709b0702016-10-13 09:12:37 -07005806 other_param_type->PrettyClassAndClassLoader().c_str(),
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005807 other_param_type.Ptr()));
Ian Rogersb5fb2072014-12-02 17:22:02 -08005808 return false;
5809 }
5810 }
5811 return true;
5812}
5813
5814
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005815bool ClassLinker::ValidateSuperClassDescriptors(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005816 if (klass->IsInterface()) {
5817 return true;
5818 }
Ian Rogers151f2212014-05-06 11:27:27 -07005819 // Begin with the methods local to the superclass.
Ian Rogersded66a02014-10-28 18:12:55 -07005820 Thread* self = Thread::Current();
Mathieu Chartiere401d142015-04-22 13:56:20 -07005821 StackHandleScope<1> hs(self);
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005822 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(nullptr));
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005823 if (klass->HasSuperClass() &&
5824 klass->GetClassLoader() != klass->GetSuperClass()->GetClassLoader()) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005825 super_klass.Assign(klass->GetSuperClass());
Mingyao Yang2cdbad72014-07-16 10:44:41 -07005826 for (int i = klass->GetSuperClass()->GetVTableLength() - 1; i >= 0; --i) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005827 auto* m = klass->GetVTableEntry(i, image_pointer_size_);
5828 auto* super_m = klass->GetSuperClass()->GetVTableEntry(i, image_pointer_size_);
5829 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005830 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5831 klass,
5832 super_klass,
5833 m,
5834 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005835 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005836 return false;
5837 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005838 }
5839 }
5840 }
Brian Carlstrom4b620ff2011-09-11 01:11:01 -07005841 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005842 super_klass.Assign(klass->GetIfTable()->GetInterface(i));
5843 if (klass->GetClassLoader() != super_klass->GetClassLoader()) {
5844 uint32_t num_methods = super_klass->NumVirtualMethods();
Ian Rogers151f2212014-05-06 11:27:27 -07005845 for (uint32_t j = 0; j < num_methods; ++j) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005846 auto* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>(
5847 j, image_pointer_size_);
5848 auto* super_m = super_klass->GetVirtualMethod(j, image_pointer_size_);
5849 if (m != super_m) {
Vladimir Marko942fd312017-01-16 20:52:19 +00005850 if (UNLIKELY(!HasSameSignatureWithDifferentClassLoaders(self,
5851 klass,
5852 super_klass,
5853 m,
5854 super_m))) {
Vladimir Markod5e5a0e2015-05-08 12:26:59 +01005855 self->AssertPendingException();
Andreas Gamped8ca52e2015-02-13 15:23:18 -08005856 return false;
5857 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005858 }
5859 }
5860 }
5861 }
5862 return true;
5863}
5864
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005865bool ClassLinker::EnsureInitialized(Thread* self,
5866 Handle<mirror::Class> c,
5867 bool can_init_fields,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005868 bool can_init_parents) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08005869 DCHECK(c != nullptr);
Igor Murashkin86083f72017-10-27 10:59:04 -07005870
Mathieu Chartier524507a2014-08-27 15:28:28 -07005871 if (c->IsInitialized()) {
Vladimir Marko8e110652019-07-30 10:14:41 +01005872 // If we've seen an initialized but not visibly initialized class
5873 // many times, request visible initialization.
5874 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
5875 // Thanks to the x86 memory model classes skip the initialized status.
5876 DCHECK(c->IsVisiblyInitialized());
5877 } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
5878 if (self->IncrementMakeVisiblyInitializedCounter()) {
5879 MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
5880 }
5881 }
Mathieu Chartier524507a2014-08-27 15:28:28 -07005882 return true;
5883 }
Igor Murashkin86083f72017-10-27 10:59:04 -07005884 // SubtypeCheckInfo::Initialized must happen-before any new-instance for that type.
5885 //
5886 // Ensure the bitstring is initialized before any of the class initialization
5887 // logic occurs. Once a class initializer starts running, objects can
5888 // escape into the heap and use the subtype checking code.
5889 //
5890 // Note: A class whose SubtypeCheckInfo is at least Initialized means it
5891 // can be used as a source for the IsSubClass check, and that all ancestors
5892 // of the class are Assigned (can be used as a target for IsSubClass check)
5893 // or Overflowed (can be used as a source for IsSubClass check).
Vladimir Marko305c38b2018-02-14 11:50:07 +00005894 if (kBitstringSubtypeCheckEnabled) {
Igor Murashkin86083f72017-10-27 10:59:04 -07005895 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
Vladimir Marko38b8b252018-01-02 19:07:06 +00005896 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureInitialized(c.Get());
Igor Murashkin86083f72017-10-27 10:59:04 -07005897 // TODO: Avoid taking subtype_check_lock_ if SubtypeCheck is already initialized.
5898 }
Ian Rogers7b078e82014-09-10 14:44:24 -07005899 const bool success = InitializeClass(self, c, can_init_fields, can_init_parents);
Mathieu Chartier524507a2014-08-27 15:28:28 -07005900 if (!success) {
5901 if (can_init_fields && can_init_parents) {
David Sehr709b0702016-10-13 09:12:37 -07005902 CHECK(self->IsExceptionPending()) << c->PrettyClass();
Vladimir Markoac576912021-03-31 11:16:22 +01005903 } else {
5904 // There may or may not be an exception pending. If there is, clear it.
5905 // We propagate the exception only if we can initialize fields and parents.
5906 self->ClearException();
Mathieu Chartier524507a2014-08-27 15:28:28 -07005907 }
5908 } else {
5909 self->AssertNoPendingException();
Ian Rogers595799e2012-01-11 17:32:51 -08005910 }
5911 return success;
Elliott Hughesf4c21c92011-08-19 17:31:31 -07005912}
5913
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005914void ClassLinker::FixupTemporaryDeclaringClass(ObjPtr<mirror::Class> temp_class,
5915 ObjPtr<mirror::Class> new_class) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005916 DCHECK_EQ(temp_class->NumInstanceFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005917 for (ArtField& field : new_class->GetIFields()) {
5918 if (field.GetDeclaringClass() == temp_class) {
5919 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005920 }
5921 }
5922
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005923 DCHECK_EQ(temp_class->NumStaticFields(), 0u);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07005924 for (ArtField& field : new_class->GetSFields()) {
5925 if (field.GetDeclaringClass() == temp_class) {
5926 field.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005927 }
5928 }
5929
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005930 DCHECK_EQ(temp_class->NumDirectMethods(), 0u);
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005931 DCHECK_EQ(temp_class->NumVirtualMethods(), 0u);
Alex Lighte64300b2015-12-15 15:02:47 -08005932 for (auto& method : new_class->GetMethods(image_pointer_size_)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07005933 if (method.GetDeclaringClass() == temp_class) {
5934 method.SetDeclaringClass(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005935 }
5936 }
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07005937
5938 // Make sure the remembered set and mod-union tables know that we updated some of the native
5939 // roots.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07005940 WriteBarrier::ForEveryFieldWrite(new_class);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005941}
5942
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005943void ClassLinker::RegisterClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08005944 CHECK(class_loader->GetAllocator() == nullptr);
5945 CHECK(class_loader->GetClassTable() == nullptr);
5946 Thread* const self = Thread::Current();
5947 ClassLoaderData data;
Ian Rogers55256cb2017-12-21 17:07:11 -08005948 data.weak_root = self->GetJniEnv()->GetVm()->AddWeakGlobalRef(self, class_loader);
Mathieu Chartier5b830502016-03-02 10:30:23 -08005949 // Create and set the class table.
5950 data.class_table = new ClassTable;
5951 class_loader->SetClassTable(data.class_table);
5952 // Create and set the linear allocator.
5953 data.allocator = Runtime::Current()->CreateLinearAlloc();
5954 class_loader->SetAllocator(data.allocator);
5955 // Add to the list so that we know to free the data later.
5956 class_loaders_.push_back(data);
5957}
5958
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005959ClassTable* ClassLinker::InsertClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier6b069532015-08-05 15:08:12 -07005960 if (class_loader == nullptr) {
Andreas Gampe2af99022017-04-25 08:32:59 -07005961 return boot_class_table_.get();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005962 }
Mathieu Chartier6b069532015-08-05 15:08:12 -07005963 ClassTable* class_table = class_loader->GetClassTable();
5964 if (class_table == nullptr) {
Mathieu Chartier5b830502016-03-02 10:30:23 -08005965 RegisterClassLoader(class_loader);
5966 class_table = class_loader->GetClassTable();
5967 DCHECK(class_table != nullptr);
Mathieu Chartier6b069532015-08-05 15:08:12 -07005968 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005969 return class_table;
5970}
5971
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005972ClassTable* ClassLinker::ClassTableForClassLoader(ObjPtr<mirror::ClassLoader> class_loader) {
Andreas Gampe2af99022017-04-25 08:32:59 -07005973 return class_loader == nullptr ? boot_class_table_.get() : class_loader->GetClassTable();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07005974}
5975
Mathieu Chartier28357fa2016-10-18 16:27:40 -07005976static ImTable* FindSuperImt(ObjPtr<mirror::Class> klass, PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07005977 REQUIRES_SHARED(Locks::mutator_lock_) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005978 while (klass->HasSuperClass()) {
5979 klass = klass->GetSuperClass();
5980 if (klass->ShouldHaveImt()) {
5981 return klass->GetImt(pointer_size);
5982 }
5983 }
5984 return nullptr;
5985}
5986
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07005987bool ClassLinker::LinkClass(Thread* self,
5988 const char* descriptor,
5989 Handle<mirror::Class> klass,
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07005990 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Igor Murashkinb1d8c312015-08-04 11:18:43 -07005991 MutableHandle<mirror::Class>* h_new_class_out) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00005992 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07005993
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07005994 if (!LinkSuperClass(klass)) {
5995 return false;
5996 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005997 ArtMethod* imt_data[ImTable::kSize];
5998 // If there are any new conflicts compared to super class.
5999 bool new_conflict = false;
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00006000 std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006001 if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006002 return false;
6003 }
Ian Rogers7b078e82014-09-10 14:44:24 -07006004 if (!LinkInstanceFields(self, klass)) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006005 return false;
6006 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006007 size_t class_size;
Igor Murashkinb1d8c312015-08-04 11:18:43 -07006008 if (!LinkStaticFields(self, klass, &class_size)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -07006009 return false;
6010 }
6011 CreateReferenceInstanceOffsets(klass);
Vladimir Marko2c64a832018-01-04 11:31:56 +00006012 CHECK_EQ(ClassStatus::kLoaded, klass->GetStatus());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006013
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006014 ImTable* imt = nullptr;
6015 if (klass->ShouldHaveImt()) {
6016 // If there are any new conflicts compared to the super class we can not make a copy. There
6017 // can be cases where both will have a conflict method at the same slot without having the same
6018 // set of conflicts. In this case, we can not share the IMT since the conflict table slow path
6019 // will possibly create a table that is incorrect for either of the classes.
6020 // Same IMT with new_conflict does not happen very often.
6021 if (!new_conflict) {
6022 ImTable* super_imt = FindSuperImt(klass.Get(), image_pointer_size_);
6023 if (super_imt != nullptr) {
6024 bool imt_equals = true;
6025 for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) {
6026 imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]);
6027 }
6028 if (imt_equals) {
6029 imt = super_imt;
6030 }
6031 }
6032 }
6033 if (imt == nullptr) {
6034 LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader());
6035 imt = reinterpret_cast<ImTable*>(
Lokesh Gidra606bd942022-05-23 19:00:09 +00006036 allocator->Alloc(self,
6037 ImTable::SizeInBytes(image_pointer_size_),
6038 LinearAllocKind::kNoGCRoots));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006039 if (imt == nullptr) {
6040 return false;
6041 }
6042 imt->Populate(imt_data, image_pointer_size_);
6043 }
6044 }
6045
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006046 if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) {
6047 // We don't need to retire this class as it has no embedded tables or it was created the
6048 // correct size during class linker initialization.
David Sehr709b0702016-10-13 09:12:37 -07006049 CHECK_EQ(klass->GetClassSize(), class_size) << klass->PrettyDescriptor();
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006050
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006051 if (klass->ShouldHaveEmbeddedVTable()) {
6052 klass->PopulateEmbeddedVTable(image_pointer_size_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006053 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006054 if (klass->ShouldHaveImt()) {
6055 klass->SetImt(imt, image_pointer_size_);
6056 }
Mingyao Yang063fc772016-08-02 11:02:54 -07006057
6058 // Update CHA info based on whether we override methods.
6059 // Have to do this before setting the class as resolved which allows
6060 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006061 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07006062 cha_->UpdateAfterLoadingOf(klass);
6063 }
Nicolas Geoffray918dcea2017-07-21 07:58:14 +00006064
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006065 // This will notify waiters on klass that saw the not yet resolved
6066 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006067 mirror::Class::SetStatus(klass, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006068 h_new_class_out->Assign(klass.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006069 } else {
6070 CHECK(!klass->IsResolved());
6071 // Retire the temporary class and create the correctly sized resolved class.
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006072 StackHandleScope<1> hs(self);
Vladimir Marko3068d582019-05-28 16:39:29 +01006073 Handle<mirror::Class> h_new_class =
6074 hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
Mathieu Chartier3ee25bb2015-08-10 10:13:02 -07006075 // Set arrays to null since we don't want to have multiple classes with the same ArtField or
6076 // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
6077 // may not see any references to the target space and clean the card for a class if another
6078 // class had the same array pointer.
Alex Lighte64300b2015-12-15 15:02:47 -08006079 klass->SetMethodsPtrUnchecked(nullptr, 0, 0);
Mathieu Chartier54d220e2015-07-30 16:20:06 -07006080 klass->SetSFieldsPtrUnchecked(nullptr);
6081 klass->SetIFieldsPtrUnchecked(nullptr);
Andreas Gampefa4333d2017-02-14 11:10:34 -08006082 if (UNLIKELY(h_new_class == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07006083 self->AssertPendingOOMException();
Vladimir Marko2c64a832018-01-04 11:31:56 +00006084 mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006085 return false;
6086 }
6087
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006088 CHECK_EQ(h_new_class->GetClassSize(), class_size);
6089 ObjectLock<mirror::Class> lock(self, h_new_class);
6090 FixupTemporaryDeclaringClass(klass.Get(), h_new_class.Get());
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006091
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006092 if (LIKELY(descriptor != nullptr)) {
Mathieu Chartiereb837eb2015-07-29 17:25:41 -07006093 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Marko0984e482019-03-27 16:41:41 +00006094 const ObjPtr<mirror::ClassLoader> class_loader = h_new_class.Get()->GetClassLoader();
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006095 ClassTable* const table = InsertClassTableForClassLoader(class_loader);
Vladimir Marko0984e482019-03-27 16:41:41 +00006096 const ObjPtr<mirror::Class> existing =
6097 table->UpdateClass(descriptor, h_new_class.Get(), ComputeModifiedUtf8Hash(descriptor));
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07006098 if (class_loader != nullptr) {
6099 // We updated the class in the class table, perform the write barrier so that the GC knows
6100 // about the change.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -07006101 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier05aa4d32015-09-19 12:44:38 -07006102 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006103 CHECK_EQ(existing, klass.Get());
Vladimir Marko1998cd02017-01-13 13:02:58 +00006104 if (log_new_roots_) {
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -07006105 new_class_roots_.push_back(GcRoot<mirror::Class>(h_new_class.Get()));
6106 }
6107 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006108
Mingyao Yang063fc772016-08-02 11:02:54 -07006109 // Update CHA info based on whether we override methods.
6110 // Have to do this before setting the class as resolved which allows
6111 // instantiation of klass.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00006112 if (LIKELY(descriptor != nullptr) && cha_ != nullptr) {
Andreas Gampec1ac9ee2017-07-24 22:35:49 -07006113 cha_->UpdateAfterLoadingOf(h_new_class);
6114 }
Mingyao Yang063fc772016-08-02 11:02:54 -07006115
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006116 // This will notify waiters on temp class that saw the not yet resolved class in the
6117 // class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006118 mirror::Class::SetStatus(klass, ClassStatus::kRetired, self);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006119
Vladimir Marko2c64a832018-01-04 11:31:56 +00006120 CHECK_EQ(h_new_class->GetStatus(), ClassStatus::kResolving);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006121 // This will notify waiters on new_class that saw the not yet resolved
6122 // class in the class_table_ during EnsureResolved.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006123 mirror::Class::SetStatus(h_new_class, ClassStatus::kResolved, self);
Hiroshi Yamauchi679b1cf2015-05-21 12:05:27 -07006124 // Return the new class.
6125 h_new_class_out->Assign(h_new_class.Get());
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006126 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006127 return true;
6128}
6129
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006130bool ClassLinker::LoadSuperAndInterfaces(Handle<mirror::Class> klass, const DexFile& dex_file) {
Vladimir Marko2c64a832018-01-04 11:31:56 +00006131 CHECK_EQ(ClassStatus::kIdx, klass->GetStatus());
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006132 const dex::ClassDef& class_def = dex_file.GetClassDef(klass->GetDexClassDefIndex());
Andreas Gampea5b09a62016-11-17 15:21:22 -08006133 dex::TypeIndex super_class_idx = class_def.superclass_idx_;
6134 if (super_class_idx.IsValid()) {
Roland Levillain90328ac2016-05-18 12:25:38 +01006135 // Check that a class does not inherit from itself directly.
6136 //
6137 // TODO: This is a cheap check to detect the straightforward case
6138 // of a class extending itself (b/28685551), but we should do a
6139 // proper cycle detection on loaded classes, to detect all cases
6140 // of class circularity errors (b/28830038).
6141 if (super_class_idx == class_def.class_idx_) {
6142 ThrowClassCircularityError(klass.Get(),
6143 "Class %s extends itself",
David Sehr709b0702016-10-13 09:12:37 -07006144 klass->PrettyDescriptor().c_str());
Roland Levillain90328ac2016-05-18 12:25:38 +01006145 return false;
6146 }
6147
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006148 ObjPtr<mirror::Class> super_class = ResolveType(super_class_idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006149 if (super_class == nullptr) {
Brian Carlstrom65ca0772011-09-24 16:03:08 -07006150 DCHECK(Thread::Current()->IsExceptionPending());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006151 return false;
6152 }
Ian Rogersbe125a92012-01-11 15:19:49 -08006153 // Verify
6154 if (!klass->CanAccess(super_class)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006155 ThrowIllegalAccessError(klass.Get(), "Class %s extended by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006156 super_class->PrettyDescriptor().c_str(),
6157 klass->PrettyDescriptor().c_str());
Ian Rogersbe125a92012-01-11 15:19:49 -08006158 return false;
6159 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -07006160 CHECK(super_class->IsResolved());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006161 klass->SetSuperClass(super_class);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006162 }
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006163 const dex::TypeList* interfaces = dex_file.GetInterfacesList(class_def);
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006164 if (interfaces != nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006165 for (size_t i = 0; i < interfaces->Size(); i++) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08006166 dex::TypeIndex idx = interfaces->GetTypeItem(i).type_idx_;
Vladimir Marko666ee3d2017-12-11 18:37:36 +00006167 ObjPtr<mirror::Class> interface = ResolveType(idx, klass.Get());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006168 if (interface == nullptr) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006169 DCHECK(Thread::Current()->IsExceptionPending());
6170 return false;
6171 }
6172 // Verify
6173 if (!klass->CanAccess(interface)) {
6174 // TODO: the RI seemed to ignore this in my testing.
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006175 ThrowIllegalAccessError(klass.Get(),
6176 "Interface %s implemented by class %s is inaccessible",
David Sehr709b0702016-10-13 09:12:37 -07006177 interface->PrettyDescriptor().c_str(),
6178 klass->PrettyDescriptor().c_str());
Ian Rogers6d4d9fc2011-11-30 16:24:48 -08006179 return false;
6180 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006181 }
6182 }
Brian Carlstrom74eb46a2011-08-02 20:10:14 -07006183 // Mark the class as loaded.
Vladimir Marko2c64a832018-01-04 11:31:56 +00006184 mirror::Class::SetStatus(klass, ClassStatus::kLoaded, nullptr);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006185 return true;
6186}
6187
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07006188bool ClassLinker::LinkSuperClass(Handle<mirror::Class> klass) {
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006189 CHECK(!klass->IsPrimitive());
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006190 ObjPtr<mirror::Class> super = klass->GetSuperClass();
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006191 ObjPtr<mirror::Class> object_class = GetClassRoot<mirror::Object>(this);
6192 if (klass.Get() == object_class) {
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006193 if (super != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006194 ThrowClassFormatError(klass.Get(), "java.lang.Object must not have a superclass");
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006195 return false;
6196 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006197 return true;
6198 }
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006199 if (super == nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006200 ThrowLinkageError(klass.Get(), "No superclass defined for class %s",
David Sehr709b0702016-10-13 09:12:37 -07006201 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006202 return false;
6203 }
6204 // Verify
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006205 if (klass->IsInterface() && super != object_class) {
Vladimir Marko1fcae9f2017-11-28 14:14:19 +00006206 ThrowClassFormatError(klass.Get(), "Interfaces must have java.lang.Object as superclass");
6207 return false;
6208 }
Vladimir Markob43b2d82017-07-18 17:46:38 +01006209 if (super->IsFinal()) {
6210 ThrowVerifyError(klass.Get(),
6211 "Superclass %s of %s is declared final",
6212 super->PrettyDescriptor().c_str(),
6213 klass->PrettyDescriptor().c_str());
6214 return false;
6215 }
6216 if (super->IsInterface()) {
Mathieu Chartierc77f3ab2015-09-03 19:41:50 -07006217 ThrowIncompatibleClassChangeError(klass.Get(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006218 "Superclass %s of %s is an interface",
David Sehr709b0702016-10-13 09:12:37 -07006219 super->PrettyDescriptor().c_str(),
Vladimir Markob43b2d82017-07-18 17:46:38 +01006220 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006221 return false;
6222 }
6223 if (!klass->CanAccess(super)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006224 ThrowIllegalAccessError(klass.Get(), "Superclass %s is inaccessible to class %s",
David Sehr709b0702016-10-13 09:12:37 -07006225 super->PrettyDescriptor().c_str(),
6226 klass->PrettyDescriptor().c_str());
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006227 return false;
6228 }
Elliott Hughes20cde902011-10-04 17:37:27 -07006229
Brian Carlstromf3632832014-05-20 15:36:53 -07006230 // Inherit kAccClassIsFinalizable from the superclass in case this
6231 // class doesn't override finalize.
Elliott Hughes20cde902011-10-04 17:37:27 -07006232 if (super->IsFinalizable()) {
6233 klass->SetFinalizable();
6234 }
6235
Mathieu Chartiere4275c02015-08-06 15:34:15 -07006236 // Inherit class loader flag form super class.
6237 if (super->IsClassLoaderClass()) {
6238 klass->SetClassLoaderClass();
6239 }
6240
Elliott Hughes2da50362011-10-10 16:57:08 -07006241 // Inherit reference flags (if any) from the superclass.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006242 uint32_t reference_flags = (super->GetClassFlags() & mirror::kClassFlagReference);
Elliott Hughes2da50362011-10-10 16:57:08 -07006243 if (reference_flags != 0) {
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07006244 CHECK_EQ(klass->GetClassFlags(), 0u);
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07006245 klass->SetClassFlags(klass->GetClassFlags() | reference_flags);
Elliott Hughes2da50362011-10-10 16:57:08 -07006246 }
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006247 // Disallow custom direct subclasses of java.lang.ref.Reference.
Vladimir Markob4eb1b12018-05-24 11:09:38 +01006248 if (init_done_ && super == GetClassRoot<mirror::Reference>(this)) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07006249 ThrowLinkageError(klass.Get(),
Ian Rogers62d6c772013-02-27 08:32:07 -08006250 "Class %s attempts to subclass java.lang.ref.Reference, which is not allowed",
David Sehr709b0702016-10-13 09:12:37 -07006251 klass->PrettyDescriptor().c_str());
Elliott Hughes72ee0ae2011-10-10 17:31:28 -07006252 return false;
6253 }
Elliott Hughes2da50362011-10-10 16:57:08 -07006254
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006255 if (kIsDebugBuild) {
6256 // Ensure super classes are fully resolved prior to resolving fields..
Andreas Gampe2ed8def2014-08-28 14:41:02 -07006257 while (super != nullptr) {
Ian Rogers7dfb28c2013-08-22 08:18:36 -07006258 CHECK(super->IsResolved());
6259 super = super->GetSuperClass();
6260 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07006261 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07006262 return true;
6263}
6264
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006265// Comparator for name and signature of a method, used in finding overriding methods. Implementation
6266// avoids the use of handles, if it didn't then rather than compare dex files we could compare dex
6267// caches in the implementation below.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01006268class MethodNameAndSignatureComparator final : public ValueObject {
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006269 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07006270 explicit MethodNameAndSignatureComparator(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006271 REQUIRES_SHARED(Locks::mutator_lock_) :
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006272 dex_file_(method->GetDexFile()), mid_(&dex_file_->GetMethodId(method->GetDexMethodIndex())),
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006273 name_view_() {
David Sehr709b0702016-10-13 09:12:37 -07006274 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006275 }
6276
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006277 ALWAYS_INLINE std::string_view GetNameView() {
6278 if (name_view_.empty()) {
6279 name_view_ = dex_file_->StringViewByIdx(mid_->name_idx_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -07006280 }
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006281 return name_view_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006282 }
6283
Mathieu Chartiere401d142015-04-22 13:56:20 -07006284 bool HasSameNameAndSignature(ArtMethod* other)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006285 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -07006286 DCHECK(!other->IsProxyMethod()) << other->PrettyMethod();
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006287 const DexFile* other_dex_file = other->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006288 const dex::MethodId& other_mid = other_dex_file->GetMethodId(other->GetDexMethodIndex());
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006289 if (dex_file_ == other_dex_file) {
6290 return mid_->name_idx_ == other_mid.name_idx_ && mid_->proto_idx_ == other_mid.proto_idx_;
6291 }
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006292 return GetNameView() == other_dex_file->StringViewByIdx(other_mid.name_idx_) &&
6293 dex_file_->GetMethodSignature(*mid_) == other_dex_file->GetMethodSignature(other_mid);
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006294 }
6295
6296 private:
6297 // Dex file for the method to compare against.
6298 const DexFile* const dex_file_;
6299 // MethodId for the method to compare against.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08006300 const dex::MethodId* const mid_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006301 // Lazily computed name from the dex file's strings.
Vladimir Markob4bd92f2021-07-05 12:18:26 +01006302 std::string_view name_view_;
Ian Rogers03b6eaf2014-10-28 09:34:57 -07006303};
6304
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006305ArtMethod* ClassLinker::AddMethodToConflictTable(ObjPtr<mirror::Class> klass,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006306 ArtMethod* conflict_method,
6307 ArtMethod* interface_method,
Nicolas Geoffray47213e42020-12-30 15:12:00 +00006308 ArtMethod* method) {
Andreas Gampe542451c2016-07-26 09:02:02 -07006309 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006310 Runtime* const runtime = Runtime::Current();
6311 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006312
6313 // Create a new entry if the existing one is the shared conflict method.
Nicolas Geoffray47213e42020-12-30 15:12:00 +00006314 ArtMethod* new_conflict_method = (conflict_method == runtime->GetImtConflictMethod())
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006315 ? runtime->CreateImtConflictMethod(linear_alloc)
6316 : conflict_method;
6317
6318 // Allocate a new table. Note that we will leak this table at the next conflict,
6319 // but that's a tradeoff compared to making the table fixed size.
6320 void* data = linear_alloc->Alloc(
Lokesh Gidra606bd942022-05-23 19:00:09 +00006321 Thread::Current(),
6322 ImtConflictTable::ComputeSizeWithOneMoreEntry(current_table, image_pointer_size_),
6323 LinearAllocKind::kNoGCRoots);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006324 if (data == nullptr) {
6325 LOG(ERROR) << "Failed to allocate conflict table";
6326 return conflict_method;
6327 }
6328 ImtConflictTable* new_table = new (data) ImtConflictTable(current_table,
6329 interface_method,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006330 method,
6331 image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006332
6333 // Do a fence to ensure threads see the data in the table before it is assigned
6334 // to the conflict method.
6335 // Note that there is a race in the presence of multiple threads and we may leak
6336 // memory from the LinearAlloc, but that's a tradeoff compared to using
6337 // atomic operations.
Orion Hodson27b96762018-03-13 16:06:57 +00006338 std::atomic_thread_fence(std::memory_order_release);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006339 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006340 return new_conflict_method;
6341}
6342
6343void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method,
6344 ArtMethod* imt_conflict_method,
6345 ArtMethod* current_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006346 /*out*/bool* new_conflict,
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006347 /*out*/ArtMethod** imt_ref) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006348 // Place method in imt if entry is empty, place conflict otherwise.
6349 if (*imt_ref == unimplemented_method) {
6350 *imt_ref = current_method;
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006351 } else if (!(*imt_ref)->IsRuntimeMethod()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006352 // If we are not a conflict and we have the same signature and name as the imt
6353 // entry, it must be that we overwrote a superclass vtable entry.
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006354 // Note that we have checked IsRuntimeMethod, as there may be multiple different
6355 // conflict methods.
Alex Lighteb7c1442015-08-31 13:17:42 -07006356 MethodNameAndSignatureComparator imt_comparator(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006357 (*imt_ref)->GetInterfaceMethodIfProxy(image_pointer_size_));
Alex Lighteb7c1442015-08-31 13:17:42 -07006358 if (imt_comparator.HasSameNameAndSignature(
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006359 current_method->GetInterfaceMethodIfProxy(image_pointer_size_))) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006360 *imt_ref = current_method;
6361 } else {
Alex Light9139e002015-10-09 15:59:48 -07006362 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006363 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07006364 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00006365 } else {
6366 // Place the default conflict method. Note that there may be an existing conflict
6367 // method in the IMT, but it could be one tailored to the super class, with a
6368 // specific ImtConflictTable.
6369 *imt_ref = imt_conflict_method;
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006370 *new_conflict = true;
Alex Lighteb7c1442015-08-31 13:17:42 -07006371 }
6372}
6373
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006374void ClassLinker::FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) {
David Sehr709b0702016-10-13 09:12:37 -07006375 DCHECK(klass->ShouldHaveImt()) << klass->PrettyClass();
6376 DCHECK(!klass->IsTemp()) << klass->PrettyClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006377 ArtMethod* imt_data[ImTable::kSize];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006378 Runtime* const runtime = Runtime::Current();
6379 ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod();
6380 ArtMethod* const conflict_method = runtime->GetImtConflictMethod();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006381 std::fill_n(imt_data, arraysize(imt_data), unimplemented_method);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006382 if (klass->GetIfTable() != nullptr) {
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006383 bool new_conflict = false;
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006384 FillIMTFromIfTable(klass->GetIfTable(),
6385 unimplemented_method,
6386 conflict_method,
6387 klass,
Andreas Gampe98ea9d92018-10-19 14:06:15 -07006388 /*create_conflict_tables=*/true,
6389 /*ignore_copied_methods=*/false,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006390 &new_conflict,
6391 &imt_data[0]);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006392 }
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006393 // Compare the IMT with the super class including the conflict methods. If they are equivalent,
6394 // we can just use the same pointer.
6395 ImTable* imt = nullptr;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006396 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006397 if (super_class != nullptr && super_class->ShouldHaveImt()) {
6398 ImTable* super_imt = super_class->GetImt(image_pointer_size_);
6399 bool same = true;
6400 for (size_t i = 0; same && i < ImTable::kSize; ++i) {
6401 ArtMethod* method = imt_data[i];
6402 ArtMethod* super_method = super_imt->Get(i, image_pointer_size_);
6403 if (method != super_method) {
6404 bool is_conflict_table = method->IsRuntimeMethod() &&
6405 method != unimplemented_method &&
6406 method != conflict_method;
6407 // Verify conflict contents.
6408 bool super_conflict_table = super_method->IsRuntimeMethod() &&
6409 super_method != unimplemented_method &&
6410 super_method != conflict_method;
6411 if (!is_conflict_table || !super_conflict_table) {
6412 same = false;
6413 } else {
6414 ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_);
6415 ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_);
6416 same = same && table1->Equals(table2, image_pointer_size_);
6417 }
6418 }
6419 }
6420 if (same) {
6421 imt = super_imt;
6422 }
6423 }
6424 if (imt == nullptr) {
6425 imt = klass->GetImt(image_pointer_size_);
6426 DCHECK(imt != nullptr);
6427 imt->Populate(imt_data, image_pointer_size_);
6428 } else {
6429 klass->SetImt(imt, image_pointer_size_);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006430 }
6431}
6432
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006433ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count,
6434 LinearAlloc* linear_alloc,
Andreas Gampe542451c2016-07-26 09:02:02 -07006435 PointerSize image_pointer_size) {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006436 void* data = linear_alloc->Alloc(Thread::Current(),
Lokesh Gidra606bd942022-05-23 19:00:09 +00006437 ImtConflictTable::ComputeSize(count, image_pointer_size),
6438 LinearAllocKind::kNoGCRoots);
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006439 return (data != nullptr) ? new (data) ImtConflictTable(count, image_pointer_size) : nullptr;
6440}
6441
6442ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, LinearAlloc* linear_alloc) {
6443 return CreateImtConflictTable(count, linear_alloc, image_pointer_size_);
6444}
6445
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006446void ClassLinker::FillIMTFromIfTable(ObjPtr<mirror::IfTable> if_table,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006447 ArtMethod* unimplemented_method,
6448 ArtMethod* imt_conflict_method,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006449 ObjPtr<mirror::Class> klass,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006450 bool create_conflict_tables,
6451 bool ignore_copied_methods,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006452 /*out*/bool* new_conflict,
6453 /*out*/ArtMethod** imt) {
6454 uint32_t conflict_counts[ImTable::kSize] = {};
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006455 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006456 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006457 const size_t num_virtuals = interface->NumVirtualMethods();
6458 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6459 // Virtual methods can be larger than the if table methods if there are default methods.
6460 DCHECK_GE(num_virtuals, method_array_count);
6461 if (kIsDebugBuild) {
6462 if (klass->IsInterface()) {
6463 DCHECK_EQ(method_array_count, 0u);
6464 } else {
6465 DCHECK_EQ(interface->NumDeclaredVirtualMethods(), method_array_count);
6466 }
6467 }
6468 if (method_array_count == 0) {
6469 continue;
6470 }
Vladimir Marko557fece2019-03-26 14:29:41 +00006471 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006472 for (size_t j = 0; j < method_array_count; ++j) {
6473 ArtMethod* implementation_method =
6474 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6475 if (ignore_copied_methods && implementation_method->IsCopied()) {
6476 continue;
6477 }
6478 DCHECK(implementation_method != nullptr);
6479 // Miranda methods cannot be used to implement an interface method, but they are safe to put
6480 // in the IMT since their entrypoint is the interface trampoline. If we put any copied methods
6481 // or interface methods in the IMT here they will not create extra conflicts since we compare
6482 // names and signatures in SetIMTRef.
6483 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00006484 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006485
6486 // There is only any conflicts if all of the interface methods for an IMT slot don't have
6487 // the same implementation method, keep track of this to avoid creating a conflict table in
6488 // this case.
6489
6490 // Conflict table size for each IMT slot.
6491 ++conflict_counts[imt_index];
6492
6493 SetIMTRef(unimplemented_method,
6494 imt_conflict_method,
6495 implementation_method,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006496 /*out*/new_conflict,
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006497 /*out*/&imt[imt_index]);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006498 }
6499 }
6500
6501 if (create_conflict_tables) {
6502 // Create the conflict tables.
6503 LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006504 for (size_t i = 0; i < ImTable::kSize; ++i) {
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006505 size_t conflicts = conflict_counts[i];
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006506 if (imt[i] == imt_conflict_method) {
6507 ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc);
6508 if (new_table != nullptr) {
6509 ArtMethod* new_conflict_method =
6510 Runtime::Current()->CreateImtConflictMethod(linear_alloc);
6511 new_conflict_method->SetImtConflictTable(new_table, image_pointer_size_);
6512 imt[i] = new_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006513 } else {
6514 LOG(ERROR) << "Failed to allocate conflict table";
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006515 imt[i] = imt_conflict_method;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006516 }
6517 } else {
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006518 DCHECK_NE(imt[i], imt_conflict_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006519 }
6520 }
6521
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006522 for (size_t i = 0, length = if_table->Count(); i < length; ++i) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006523 ObjPtr<mirror::Class> interface = if_table->GetInterface(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006524 const size_t method_array_count = if_table->GetMethodArrayCount(i);
6525 // Virtual methods can be larger than the if table methods if there are default methods.
6526 if (method_array_count == 0) {
6527 continue;
6528 }
Vladimir Marko557fece2019-03-26 14:29:41 +00006529 ObjPtr<mirror::PointerArray> method_array = if_table->GetMethodArray(i);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006530 for (size_t j = 0; j < method_array_count; ++j) {
6531 ArtMethod* implementation_method =
6532 method_array->GetElementPtrSize<ArtMethod*>(j, image_pointer_size_);
6533 if (ignore_copied_methods && implementation_method->IsCopied()) {
6534 continue;
6535 }
6536 DCHECK(implementation_method != nullptr);
6537 ArtMethod* interface_method = interface->GetVirtualMethod(j, image_pointer_size_);
David Srbeckye36e7f22018-11-14 14:21:23 +00006538 const uint32_t imt_index = interface_method->GetImtIndex();
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006539 if (!imt[imt_index]->IsRuntimeMethod() ||
6540 imt[imt_index] == unimplemented_method ||
6541 imt[imt_index] == imt_conflict_method) {
6542 continue;
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006543 }
Mathieu Chartiercdca4762016-04-28 09:44:54 -07006544 ImtConflictTable* table = imt[imt_index]->GetImtConflictTable(image_pointer_size_);
6545 const size_t num_entries = table->NumEntries(image_pointer_size_);
6546 table->SetInterfaceMethod(num_entries, image_pointer_size_, interface_method);
6547 table->SetImplementationMethod(num_entries, image_pointer_size_, implementation_method);
Mathieu Chartier49b5ced2016-04-14 10:49:19 -07006548 }
6549 }
6550 }
6551}
6552
Vladimir Marko78f62d82022-01-10 16:25:19 +00006553namespace {
6554
Alex Lighteb7c1442015-08-31 13:17:42 -07006555// Simple helper function that checks that no subtypes of 'val' are contained within the 'classes'
6556// set.
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006557static bool NotSubinterfaceOfAny(
Vladimir Marko78f62d82022-01-10 16:25:19 +00006558 const ScopedArenaHashSet<mirror::Class*>& classes,
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006559 ObjPtr<mirror::Class> val)
Alex Lighteb7c1442015-08-31 13:17:42 -07006560 REQUIRES(Roles::uninterruptible_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006561 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006562 DCHECK(val != nullptr);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006563 for (ObjPtr<mirror::Class> c : classes) {
6564 if (val->IsAssignableFrom(c)) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006565 return false;
6566 }
6567 }
6568 return true;
6569}
6570
Vladimir Marko78f62d82022-01-10 16:25:19 +00006571// We record new interfaces by the index of the direct interface and the index in the
6572// direct interface's `IfTable`, or `dex::kDexNoIndex` if it's the direct interface itself.
6573struct NewInterfaceReference {
6574 uint32_t direct_interface_index;
6575 uint32_t direct_interface_iftable_index;
6576};
6577
6578class ProxyInterfacesAccessor {
6579 public:
6580 explicit ProxyInterfacesAccessor(Handle<mirror::ObjectArray<mirror::Class>> interfaces)
6581 REQUIRES_SHARED(Locks::mutator_lock_)
6582 : interfaces_(interfaces) {}
6583
6584 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6585 return interfaces_->GetLength();
6586 }
6587
6588 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6589 DCHECK_LT(index, GetLength());
6590 return interfaces_->GetWithoutChecks(index);
6591 }
6592
6593 private:
6594 Handle<mirror::ObjectArray<mirror::Class>> interfaces_;
6595};
6596
6597class NonProxyInterfacesAccessor {
6598 public:
6599 NonProxyInterfacesAccessor(ClassLinker* class_linker, Handle<mirror::Class> klass)
6600 REQUIRES_SHARED(Locks::mutator_lock_)
6601 : interfaces_(klass->GetInterfaceTypeList()),
6602 class_linker_(class_linker),
6603 klass_(klass) {
6604 DCHECK(!klass->IsProxyClass());
6605 }
6606
6607 size_t GetLength() REQUIRES_SHARED(Locks::mutator_lock_) {
6608 return (interfaces_ != nullptr) ? interfaces_->Size() : 0u;
6609 }
6610
6611 ObjPtr<mirror::Class> GetInterface(size_t index) REQUIRES_SHARED(Locks::mutator_lock_) {
6612 DCHECK_LT(index, GetLength());
6613 dex::TypeIndex type_index = interfaces_->GetTypeItem(index).type_idx_;
6614 return class_linker_->LookupResolvedType(type_index, klass_.Get());
6615 }
6616
6617 private:
6618 const dex::TypeList* interfaces_;
6619 ClassLinker* class_linker_;
6620 Handle<mirror::Class> klass_;
6621};
6622
6623// Finds new interfaces to add to the interface table in addition to superclass interfaces.
Alex Lighteb7c1442015-08-31 13:17:42 -07006624//
Vladimir Marko78f62d82022-01-10 16:25:19 +00006625// Interfaces in the interface table must satisfy the following constraint:
6626// all I, J: Interface | I <: J implies J precedes I
6627// (note A <: B means that A is a subtype of B). We order this backwards so that we do not need
6628// to reorder superclass interfaces when new interfaces are added in subclass's interface tables.
Alex Lighteb7c1442015-08-31 13:17:42 -07006629//
Vladimir Marko78f62d82022-01-10 16:25:19 +00006630// This function returns a list of references for all interfaces in the transitive
6631// closure of the direct interfaces that are not in the superclass interfaces.
6632// The entries in the list are ordered to satisfy the interface table ordering
6633// constraint and therefore the interface table formed by appending them to the
6634// superclass interface table shall also satisfy that constraint.
6635template <typename InterfaceAccessor>
6636ALWAYS_INLINE
6637static ArrayRef<const NewInterfaceReference> FindNewIfTableInterfaces(
6638 ObjPtr<mirror::IfTable> super_iftable,
6639 size_t super_ifcount,
6640 ScopedArenaAllocator* allocator,
6641 InterfaceAccessor&& interfaces,
6642 ArrayRef<NewInterfaceReference> initial_storage,
6643 /*out*/ScopedArenaVector<NewInterfaceReference>* supplemental_storage)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006644 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko782fb712020-12-23 12:47:31 +00006645 ScopedAssertNoThreadSuspension nts(__FUNCTION__);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006646
Vladimir Marko782fb712020-12-23 12:47:31 +00006647 // This is the set of all classes already in the iftable. Used to make checking
6648 // if a class has already been added quicker.
6649 constexpr size_t kBufferSize = 32; // 256 bytes on 64-bit architectures.
6650 mirror::Class* buffer[kBufferSize];
Vladimir Marko78f62d82022-01-10 16:25:19 +00006651 ScopedArenaHashSet<mirror::Class*> classes_in_iftable(buffer, kBufferSize, allocator->Adapter());
Alex Lighteb7c1442015-08-31 13:17:42 -07006652 // The first super_ifcount elements are from the superclass. We note that they are already added.
6653 for (size_t i = 0; i < super_ifcount; i++) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00006654 ObjPtr<mirror::Class> iface = super_iftable->GetInterface(i);
Alex Lighteb7c1442015-08-31 13:17:42 -07006655 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, iface)) << "Bad ordering.";
Vladimir Marko78f62d82022-01-10 16:25:19 +00006656 classes_in_iftable.Put(iface.Ptr());
Alex Lighteb7c1442015-08-31 13:17:42 -07006657 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006658
6659 ArrayRef<NewInterfaceReference> current_storage = initial_storage;
6660 DCHECK_NE(current_storage.size(), 0u);
6661 size_t num_new_interfaces = 0u;
6662 auto insert_reference = [&](uint32_t direct_interface_index,
6663 uint32_t direct_interface_iface_index) {
6664 if (UNLIKELY(num_new_interfaces == current_storage.size())) {
6665 bool copy = current_storage.data() != supplemental_storage->data();
6666 supplemental_storage->resize(2u * num_new_interfaces);
6667 if (copy) {
6668 std::copy_n(current_storage.data(), num_new_interfaces, supplemental_storage->data());
6669 }
6670 current_storage = ArrayRef<NewInterfaceReference>(*supplemental_storage);
6671 }
6672 current_storage[num_new_interfaces] = {direct_interface_index, direct_interface_iface_index};
6673 ++num_new_interfaces;
6674 };
6675
6676 for (size_t i = 0, num_interfaces = interfaces.GetLength(); i != num_interfaces; ++i) {
6677 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
Vladimir Marko782fb712020-12-23 12:47:31 +00006678
Alex Lighteb7c1442015-08-31 13:17:42 -07006679 // Let us call the first filled_ifcount elements of iftable the current-iface-list.
6680 // At this point in the loop current-iface-list has the invariant that:
6681 // for every pair of interfaces I,J within it:
6682 // if index_of(I) < index_of(J) then I is not a subtype of J
6683
6684 // If we have already seen this element then all of its super-interfaces must already be in the
6685 // current-iface-list so we can skip adding it.
Vladimir Marko782fb712020-12-23 12:47:31 +00006686 if (classes_in_iftable.find(interface.Ptr()) == classes_in_iftable.end()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006687 // We haven't seen this interface so add all of its super-interfaces onto the
6688 // current-iface-list, skipping those already on it.
6689 int32_t ifcount = interface->GetIfTableCount();
6690 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006691 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006692 if (classes_in_iftable.find(super_interface.Ptr()) == classes_in_iftable.end()) {
Alex Lighteb7c1442015-08-31 13:17:42 -07006693 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, super_interface)) << "Bad ordering.";
Vladimir Marko78f62d82022-01-10 16:25:19 +00006694 classes_in_iftable.Put(super_interface.Ptr());
6695 insert_reference(i, j);
Alex Lighteb7c1442015-08-31 13:17:42 -07006696 }
6697 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006698 // Add this interface reference after all of its super-interfaces.
Alex Lighteb7c1442015-08-31 13:17:42 -07006699 DCHECK(NotSubinterfaceOfAny(classes_in_iftable, interface)) << "Bad ordering";
Vladimir Marko78f62d82022-01-10 16:25:19 +00006700 classes_in_iftable.Put(interface.Ptr());
6701 insert_reference(i, dex::kDexNoIndex);
Alex Lighteb7c1442015-08-31 13:17:42 -07006702 } else if (kIsDebugBuild) {
6703 // Check all super-interfaces are already in the list.
6704 int32_t ifcount = interface->GetIfTableCount();
6705 for (int32_t j = 0; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006706 ObjPtr<mirror::Class> super_interface = interface->GetIfTable()->GetInterface(j);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006707 DCHECK(classes_in_iftable.find(super_interface.Ptr()) != classes_in_iftable.end())
David Sehr709b0702016-10-13 09:12:37 -07006708 << "Iftable does not contain " << mirror::Class::PrettyClass(super_interface)
6709 << ", a superinterface of " << interface->PrettyClass();
Alex Lighteb7c1442015-08-31 13:17:42 -07006710 }
6711 }
6712 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006713 return ArrayRef<const NewInterfaceReference>(current_storage.data(), num_new_interfaces);
6714}
6715
6716template <typename InterfaceAccessor>
6717static ObjPtr<mirror::IfTable> SetupInterfaceLookupTable(
6718 Thread* self,
6719 Handle<mirror::Class> klass,
6720 ScopedArenaAllocator* allocator,
6721 InterfaceAccessor&& interfaces)
6722 REQUIRES_SHARED(Locks::mutator_lock_) {
6723 DCHECK(klass->HasSuperClass());
6724 ObjPtr<mirror::IfTable> super_iftable = klass->GetSuperClass()->GetIfTable();
Vladimir Marko0441d202022-02-18 13:55:15 +00006725 DCHECK(super_iftable != nullptr);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006726 const size_t num_interfaces = interfaces.GetLength();
6727
Vladimir Marko0441d202022-02-18 13:55:15 +00006728 // If there are no new interfaces, return the interface table from superclass.
6729 // If any implementation methods are overridden, we shall copy the table and
6730 // the method arrays that contain any differences (copy-on-write).
6731 if (num_interfaces == 0) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00006732 return super_iftable;
6733 }
6734
6735 // Check that every class being implemented is an interface.
6736 for (size_t i = 0; i != num_interfaces; ++i) {
6737 ObjPtr<mirror::Class> interface = interfaces.GetInterface(i);
6738 DCHECK(interface != nullptr);
6739 if (UNLIKELY(!interface->IsInterface())) {
6740 ThrowIncompatibleClassChangeError(klass.Get(),
6741 "Class %s implements non-interface class %s",
6742 klass->PrettyDescriptor().c_str(),
6743 interface->PrettyDescriptor().c_str());
6744 return nullptr;
6745 }
6746 }
6747
6748 static constexpr size_t kMaxStackReferences = 16;
6749 NewInterfaceReference initial_storage[kMaxStackReferences];
6750 ScopedArenaVector<NewInterfaceReference> supplemental_storage(allocator->Adapter());
Vladimir Marko0441d202022-02-18 13:55:15 +00006751 const size_t super_ifcount = super_iftable->Count();
Vladimir Marko78f62d82022-01-10 16:25:19 +00006752 ArrayRef<const NewInterfaceReference> new_interface_references =
6753 FindNewIfTableInterfaces(
6754 super_iftable,
6755 super_ifcount,
6756 allocator,
6757 interfaces,
6758 ArrayRef<NewInterfaceReference>(initial_storage),
6759 &supplemental_storage);
6760
Vladimir Marko0441d202022-02-18 13:55:15 +00006761 // If all declared interfaces were already present in superclass interface table,
6762 // return the interface table from superclass. See above.
6763 if (UNLIKELY(new_interface_references.empty())) {
6764 return super_iftable;
Vladimir Marko78f62d82022-01-10 16:25:19 +00006765 }
6766
6767 // Create the interface table.
6768 size_t ifcount = super_ifcount + new_interface_references.size();
6769 ObjPtr<mirror::IfTable> iftable = AllocIfTable(self, ifcount, super_iftable->GetClass());
6770 if (UNLIKELY(iftable == nullptr)) {
6771 self->AssertPendingOOMException();
6772 return nullptr;
6773 }
6774 // Fill in table with superclass's iftable.
6775 if (super_ifcount != 0) {
6776 // Reload `super_iftable` as it may have been clobbered by the allocation.
6777 super_iftable = klass->GetSuperClass()->GetIfTable();
Vladimir Marko0441d202022-02-18 13:55:15 +00006778 for (size_t i = 0; i != super_ifcount; i++) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00006779 ObjPtr<mirror::Class> super_interface = super_iftable->GetInterface(i);
Vladimir Marko0441d202022-02-18 13:55:15 +00006780 DCHECK(super_interface != nullptr);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006781 iftable->SetInterface(i, super_interface);
Vladimir Marko0441d202022-02-18 13:55:15 +00006782 ObjPtr<mirror::PointerArray> method_array = super_iftable->GetMethodArrayOrNull(i);
6783 if (method_array != nullptr) {
6784 iftable->SetMethodArray(i, method_array);
6785 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00006786 }
6787 }
6788 // Fill in the table with additional interfaces.
6789 size_t current_index = super_ifcount;
6790 for (NewInterfaceReference ref : new_interface_references) {
6791 ObjPtr<mirror::Class> direct_interface = interfaces.GetInterface(ref.direct_interface_index);
6792 ObjPtr<mirror::Class> new_interface = (ref.direct_interface_iftable_index != dex::kDexNoIndex)
6793 ? direct_interface->GetIfTable()->GetInterface(ref.direct_interface_iftable_index)
6794 : direct_interface;
6795 iftable->SetInterface(current_index, new_interface);
6796 ++current_index;
6797 }
6798 DCHECK_EQ(current_index, ifcount);
6799
Alex Lighteb7c1442015-08-31 13:17:42 -07006800 if (kIsDebugBuild) {
6801 // Check that the iftable is ordered correctly.
Vladimir Marko78f62d82022-01-10 16:25:19 +00006802 for (size_t i = 0; i < ifcount; i++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006803 ObjPtr<mirror::Class> if_a = iftable->GetInterface(i);
Vladimir Marko78f62d82022-01-10 16:25:19 +00006804 for (size_t j = i + 1; j < ifcount; j++) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006805 ObjPtr<mirror::Class> if_b = iftable->GetInterface(j);
Alex Lighteb7c1442015-08-31 13:17:42 -07006806 // !(if_a <: if_b)
6807 CHECK(!if_b->IsAssignableFrom(if_a))
David Sehr709b0702016-10-13 09:12:37 -07006808 << "Bad interface order: " << mirror::Class::PrettyClass(if_a) << " (index " << i
6809 << ") extends "
6810 << if_b->PrettyClass() << " (index " << j << ") and so should be after it in the "
Alex Lighteb7c1442015-08-31 13:17:42 -07006811 << "interface list.";
6812 }
6813 }
6814 }
Alex Lighteb7c1442015-08-31 13:17:42 -07006815
Vladimir Marko78f62d82022-01-10 16:25:19 +00006816 return iftable;
Alex Lighteb7c1442015-08-31 13:17:42 -07006817}
6818
Alex Light1f3925d2016-09-07 12:04:20 -07006819// Check that all vtable entries are present in this class's virtuals or are the same as a
6820// superclasses vtable entry.
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006821void CheckClassOwnsVTableEntries(Thread* self,
6822 Handle<mirror::Class> klass,
6823 PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07006824 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light1f3925d2016-09-07 12:04:20 -07006825 StackHandleScope<2> hs(self);
6826 Handle<mirror::PointerArray> check_vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
Mathieu Chartier28357fa2016-10-18 16:27:40 -07006827 ObjPtr<mirror::Class> super_temp = (klass->HasSuperClass()) ? klass->GetSuperClass() : nullptr;
Alex Light1f3925d2016-09-07 12:04:20 -07006828 Handle<mirror::Class> superclass(hs.NewHandle(super_temp));
Andreas Gampefa4333d2017-02-14 11:10:34 -08006829 int32_t super_vtable_length = (superclass != nullptr) ? superclass->GetVTableLength() : 0;
Alex Lighte64300b2015-12-15 15:02:47 -08006830 for (int32_t i = 0; i < check_vtable->GetLength(); ++i) {
6831 ArtMethod* m = check_vtable->GetElementPtrSize<ArtMethod*>(i, pointer_size);
6832 CHECK(m != nullptr);
6833
Alex Lighta41a30782017-03-29 11:33:19 -07006834 if (m->GetMethodIndexDuringLinking() != i) {
6835 LOG(WARNING) << m->PrettyMethod()
6836 << " has an unexpected method index for its spot in the vtable for class"
6837 << klass->PrettyClass();
6838 }
Alex Lighte64300b2015-12-15 15:02:47 -08006839 ArraySlice<ArtMethod> virtuals = klass->GetVirtualMethodsSliceUnchecked(pointer_size);
6840 auto is_same_method = [m] (const ArtMethod& meth) {
6841 return &meth == m;
6842 };
Alex Light3f980532017-03-17 15:10:32 -07006843 if (!((super_vtable_length > i && superclass->GetVTableEntry(i, pointer_size) == m) ||
6844 std::find_if(virtuals.begin(), virtuals.end(), is_same_method) != virtuals.end())) {
6845 LOG(WARNING) << m->PrettyMethod() << " does not seem to be owned by current class "
6846 << klass->PrettyClass() << " or any of its superclasses!";
6847 }
Alex Lighte64300b2015-12-15 15:02:47 -08006848 }
6849}
6850
Alex Light1f3925d2016-09-07 12:04:20 -07006851// Check to make sure the vtable does not have duplicates. Duplicates could cause problems when a
6852// method is overridden in a subclass.
Andreas Gampea2fed082019-02-01 09:34:43 -08006853template <PointerSize kPointerSize>
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006854void CheckVTableHasNoDuplicates(Thread* self, Handle<mirror::Class> klass)
Alex Light1f3925d2016-09-07 12:04:20 -07006855 REQUIRES_SHARED(Locks::mutator_lock_) {
6856 StackHandleScope<1> hs(self);
6857 Handle<mirror::PointerArray> vtable(hs.NewHandle(klass->GetVTableDuringLinking()));
6858 int32_t num_entries = vtable->GetLength();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006859
6860 // Observations:
6861 // * The older implementation was O(n^2) and got too expensive for apps with larger classes.
6862 // * Many classes do not override Object functions (e.g., equals/hashCode/toString). Thus,
6863 // for many classes outside of libcore a cross-dexfile check has to be run anyways.
6864 // * In the cross-dexfile case, with the O(n^2), in the best case O(n) cross checks would have
6865 // to be done. It is thus OK in a single-pass algorithm to read all data, anyways.
6866 // * The single-pass algorithm will trade memory for speed, but that is OK.
6867
6868 CHECK_GT(num_entries, 0);
6869
6870 auto log_fn = [&vtable, &klass](int32_t i, int32_t j) REQUIRES_SHARED(Locks::mutator_lock_) {
6871 ArtMethod* m1 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(i);
6872 ArtMethod* m2 = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
6873 LOG(WARNING) << "vtable entries " << i << " and " << j << " are identical for "
6874 << klass->PrettyClass() << " in method " << m1->PrettyMethod()
6875 << " (0x" << std::hex << reinterpret_cast<uintptr_t>(m2) << ") and "
6876 << m2->PrettyMethod() << " (0x" << std::hex
6877 << reinterpret_cast<uintptr_t>(m2) << ")";
6878 };
6879 struct BaseHashType {
6880 static size_t HashCombine(size_t seed, size_t val) {
6881 return seed ^ (val + 0x9e3779b9 + (seed << 6) + (seed >> 2));
6882 }
6883 };
6884
6885 // Check assuming all entries come from the same dex file.
6886 {
6887 // Find the first interesting method and its dex file.
6888 int32_t start = 0;
6889 for (; start < num_entries; ++start) {
6890 ArtMethod* vtable_entry = vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start);
6891 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
6892 // maybe).
6893 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6894 vtable_entry->GetAccessFlags())) {
6895 continue;
6896 }
6897 break;
6898 }
6899 if (start == num_entries) {
6900 return;
6901 }
6902 const DexFile* dex_file =
6903 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(start)->
6904 GetInterfaceMethodIfProxy(kPointerSize)->GetDexFile();
6905
6906 // Helper function to avoid logging if we have to run the cross-file checks.
6907 auto check_fn = [&](bool log_warn) REQUIRES_SHARED(Locks::mutator_lock_) {
6908 // Use a map to store seen entries, as the storage space is too large for a bitvector.
6909 using PairType = std::pair<uint32_t, uint16_t>;
6910 struct PairHash : BaseHashType {
6911 size_t operator()(const PairType& key) const {
6912 return BaseHashType::HashCombine(BaseHashType::HashCombine(0, key.first), key.second);
6913 }
6914 };
Vladimir Marko782fb712020-12-23 12:47:31 +00006915 HashMap<PairType, int32_t, DefaultMapEmptyFn<PairType, int32_t>, PairHash> seen;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006916 seen.reserve(2 * num_entries);
6917 bool need_slow_path = false;
6918 bool found_dup = false;
6919 for (int i = start; i < num_entries; ++i) {
6920 // Can use Unchecked here as the start loop already ensured that the arrays are correct
6921 // wrt/ kPointerSize.
6922 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
6923 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6924 vtable_entry->GetAccessFlags())) {
6925 continue;
6926 }
6927 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
6928 if (dex_file != m->GetDexFile()) {
6929 need_slow_path = true;
6930 break;
6931 }
6932 const dex::MethodId* m_mid = &dex_file->GetMethodId(m->GetDexMethodIndex());
6933 PairType pair = std::make_pair(m_mid->name_idx_.index_, m_mid->proto_idx_.index_);
6934 auto it = seen.find(pair);
6935 if (it != seen.end()) {
6936 found_dup = true;
6937 if (log_warn) {
6938 log_fn(it->second, i);
6939 }
6940 } else {
Vladimir Marko782fb712020-12-23 12:47:31 +00006941 seen.insert(std::make_pair(pair, i));
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006942 }
6943 }
6944 return std::make_pair(need_slow_path, found_dup);
6945 };
6946 std::pair<bool, bool> result = check_fn(/* log_warn= */ false);
6947 if (!result.first) {
6948 if (result.second) {
6949 check_fn(/* log_warn= */ true);
6950 }
6951 return;
6952 }
6953 }
6954
6955 // Need to check across dex files.
6956 struct Entry {
6957 size_t cached_hash = 0;
Vladimir Markoaa027b82021-01-06 20:34:20 +00006958 uint32_t name_len = 0;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006959 const char* name = nullptr;
6960 Signature signature = Signature::NoSignature();
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006961
Vladimir Marko782fb712020-12-23 12:47:31 +00006962 Entry() = default;
6963 Entry(const Entry& other) = default;
6964 Entry& operator=(const Entry& other) = default;
6965
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006966 Entry(const DexFile* dex_file, const dex::MethodId& mid)
Vladimir Markoaa027b82021-01-06 20:34:20 +00006967 : name_len(0), // Explicit to enforce ordering with -Werror,-Wreorder-ctor.
6968 // This call writes `name_len` and it is therefore necessary that the
6969 // initializer for `name_len` comes before it, otherwise the value
6970 // from the call would be overwritten by that initializer.
6971 name(dex_file->StringDataAndUtf16LengthByIdx(mid.name_idx_, &name_len)),
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006972 signature(dex_file->GetMethodSignature(mid)) {
Vladimir Markoaa027b82021-01-06 20:34:20 +00006973 // The `name_len` has been initialized to the UTF16 length. Calculate length in bytes.
6974 if (name[name_len] != 0) {
6975 name_len += strlen(name + name_len);
6976 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006977 }
6978
6979 bool operator==(const Entry& other) const {
Vladimir Marko782fb712020-12-23 12:47:31 +00006980 return name_len == other.name_len &&
6981 memcmp(name, other.name, name_len) == 0 &&
6982 signature == other.signature;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006983 }
6984 };
6985 struct EntryHash {
6986 size_t operator()(const Entry& key) const {
6987 return key.cached_hash;
6988 }
6989 };
Vladimir Marko782fb712020-12-23 12:47:31 +00006990 HashMap<Entry, int32_t, DefaultMapEmptyFn<Entry, int32_t>, EntryHash> map;
Andreas Gampe9f3928f2019-02-04 11:19:31 -08006991 for (int32_t i = 0; i < num_entries; ++i) {
6992 // Can use Unchecked here as the first loop already ensured that the arrays are correct
6993 // wrt/ kPointerSize.
6994 ArtMethod* vtable_entry = vtable->GetElementPtrSizeUnchecked<ArtMethod*, kPointerSize>(i);
6995 // Don't bother if we cannot 'see' the vtable entry (i.e. it is a package-private member
6996 // maybe).
Alex Light1f3925d2016-09-07 12:04:20 -07006997 if (!klass->CanAccessMember(vtable_entry->GetDeclaringClass(),
6998 vtable_entry->GetAccessFlags())) {
6999 continue;
7000 }
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007001 ArtMethod* m = vtable_entry->GetInterfaceMethodIfProxy(kPointerSize);
7002 const DexFile* dex_file = m->GetDexFile();
7003 const dex::MethodId& mid = dex_file->GetMethodId(m->GetDexMethodIndex());
7004
7005 Entry e(dex_file, mid);
7006
7007 size_t string_hash = std::hash<std::string_view>()(std::string_view(e.name, e.name_len));
7008 size_t sig_hash = std::hash<std::string>()(e.signature.ToString());
7009 e.cached_hash = BaseHashType::HashCombine(BaseHashType::HashCombine(0u, string_hash),
7010 sig_hash);
7011
7012 auto it = map.find(e);
7013 if (it != map.end()) {
7014 log_fn(it->second, i);
7015 } else {
Vladimir Marko782fb712020-12-23 12:47:31 +00007016 map.insert(std::make_pair(e, i));
Alex Light1f3925d2016-09-07 12:04:20 -07007017 }
7018 }
7019}
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007020
7021void CheckVTableHasNoDuplicates(Thread* self,
7022 Handle<mirror::Class> klass,
7023 PointerSize pointer_size)
Andreas Gampea2fed082019-02-01 09:34:43 -08007024 REQUIRES_SHARED(Locks::mutator_lock_) {
7025 switch (pointer_size) {
7026 case PointerSize::k64:
7027 CheckVTableHasNoDuplicates<PointerSize::k64>(self, klass);
7028 break;
7029 case PointerSize::k32:
7030 CheckVTableHasNoDuplicates<PointerSize::k32>(self, klass);
7031 break;
7032 }
7033}
Alex Light1f3925d2016-09-07 12:04:20 -07007034
Orion Hodson5880c772020-07-28 20:12:08 +01007035static void CheckVTable(Thread* self, Handle<mirror::Class> klass, PointerSize pointer_size)
Alex Light1f3925d2016-09-07 12:04:20 -07007036 REQUIRES_SHARED(Locks::mutator_lock_) {
7037 CheckClassOwnsVTableEntries(self, klass, pointer_size);
7038 CheckVTableHasNoDuplicates(self, klass, pointer_size);
7039}
7040
Andreas Gampe9f3928f2019-02-04 11:19:31 -08007041} // namespace
7042
Vladimir Markob91402f2021-12-21 15:55:06 +00007043template <PointerSize kPointerSize>
Vladimir Markobc893672021-11-10 15:25:46 +00007044class ClassLinker::LinkMethodsHelper {
Vladimir Marko921094a2017-01-12 18:37:06 +00007045 public:
Vladimir Markobc893672021-11-10 15:25:46 +00007046 LinkMethodsHelper(ClassLinker* class_linker,
7047 Handle<mirror::Class> klass,
7048 Thread* self,
7049 Runtime* runtime)
Vladimir Marko921094a2017-01-12 18:37:06 +00007050 : class_linker_(class_linker),
7051 klass_(klass),
Vladimir Marko921094a2017-01-12 18:37:06 +00007052 self_(self),
Vladimir Marko78f62d82022-01-10 16:25:19 +00007053 runtime_(runtime),
Lokesh Gidra606bd942022-05-23 19:00:09 +00007054 stack_(runtime->GetArenaPool()),
Vladimir Marko921094a2017-01-12 18:37:06 +00007055 allocator_(&stack_),
Vladimir Marko19366b82022-01-18 10:41:28 +00007056 copied_method_records_(copied_method_records_initial_buffer_,
7057 kCopiedMethodRecordInitialBufferSize,
7058 allocator_.Adapter()),
7059 num_new_copied_methods_(0u) {
Vladimir Marko921094a2017-01-12 18:37:06 +00007060 }
7061
Vladimir Marko78f62d82022-01-10 16:25:19 +00007062 // Links the virtual and interface methods for the given class.
Vladimir Markobc893672021-11-10 15:25:46 +00007063 //
7064 // Arguments:
7065 // * self - The current thread.
7066 // * klass - class, whose vtable will be filled in.
Vladimir Marko78f62d82022-01-10 16:25:19 +00007067 // * interfaces - implemented interfaces for a proxy class, otherwise null.
7068 // * out_new_conflict - whether there is a new conflict compared to the superclass.
7069 // * out_imt - interface method table to fill.
7070 bool LinkMethods(
Vladimir Markobc893672021-11-10 15:25:46 +00007071 Thread* self,
7072 Handle<mirror::Class> klass,
Vladimir Marko78f62d82022-01-10 16:25:19 +00007073 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
Vladimir Markobc893672021-11-10 15:25:46 +00007074 bool* out_new_conflict,
7075 ArtMethod** out_imt)
7076 REQUIRES_SHARED(Locks::mutator_lock_);
7077
7078 private:
Vladimir Marko0441d202022-02-18 13:55:15 +00007079 // Allocate a pointer array.
7080 static ObjPtr<mirror::PointerArray> AllocPointerArray(Thread* self, size_t length)
7081 REQUIRES_SHARED(Locks::mutator_lock_);
7082
7083 // Allocate method arrays for interfaces.
7084 bool AllocateIfTableMethodArrays(Thread* self,
7085 Handle<mirror::Class> klass,
7086 Handle<mirror::IfTable> iftable)
7087 REQUIRES_SHARED(Locks::mutator_lock_);
7088
Vladimir Marko8670e042021-12-21 17:55:48 +00007089 // Assign vtable indexes to declared virtual methods for a non-interface class other
7090 // than `java.lang.Object`. Returns the number of vtable entries on success, 0 on failure.
Vladimir Marko19366b82022-01-18 10:41:28 +00007091 // This function also assigns vtable indexes for interface methods in new interfaces
7092 // and records data for copied methods which shall be referenced by the vtable.
Vladimir Markobed84ef2022-01-21 13:57:14 +00007093 size_t AssignVTableIndexes(ObjPtr<mirror::Class> klass,
Vladimir Marko8670e042021-12-21 17:55:48 +00007094 ObjPtr<mirror::Class> super_class,
Vladimir Marko51718132022-02-07 16:31:08 +00007095 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00007096 size_t num_virtual_methods,
7097 ObjPtr<mirror::IfTable> iftable)
7098 REQUIRES_SHARED(Locks::mutator_lock_);
7099
7100 bool FindCopiedMethodsForInterface(ObjPtr<mirror::Class> klass,
7101 size_t num_virtual_methods,
7102 ObjPtr<mirror::IfTable> iftable)
Vladimir Marko8670e042021-12-21 17:55:48 +00007103 REQUIRES_SHARED(Locks::mutator_lock_);
7104
Vladimir Marko78f62d82022-01-10 16:25:19 +00007105 bool LinkJavaLangObjectMethods(Thread* self, Handle<mirror::Class> klass)
Vladimir Markob91402f2021-12-21 15:55:06 +00007106 REQUIRES_SHARED(Locks::mutator_lock_) COLD_ATTR;
7107
Vladimir Marko19366b82022-01-18 10:41:28 +00007108 void ReallocMethods(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko0441d202022-02-18 13:55:15 +00007109 bool FinalizeIfTable(Handle<mirror::Class> klass,
7110 MutableHandle<mirror::IfTable> iftable,
7111 Handle<mirror::PointerArray> vtable,
Vladimir Marko51718132022-02-07 16:31:08 +00007112 bool is_klass_abstract,
7113 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00007114 bool* out_new_conflict,
7115 ArtMethod** out_imt)
Vladimir Marko78f62d82022-01-10 16:25:19 +00007116 REQUIRES_SHARED(Locks::mutator_lock_);
7117
Vladimir Marko921094a2017-01-12 18:37:06 +00007118 void ClobberOldMethods(LengthPrefixedArray<ArtMethod>* old_methods,
7119 LengthPrefixedArray<ArtMethod>* methods) {
Vladimir Marko19366b82022-01-18 10:41:28 +00007120 if (kIsDebugBuild && old_methods != nullptr) {
Vladimir Marko921094a2017-01-12 18:37:06 +00007121 CHECK(methods != nullptr);
7122 // Put some random garbage in old methods to help find stale pointers.
Vladimir Marko19366b82022-01-18 10:41:28 +00007123 if (methods != old_methods) {
Vladimir Marko921094a2017-01-12 18:37:06 +00007124 // Need to make sure the GC is not running since it could be scanning the methods we are
7125 // about to overwrite.
Vladimir Markoddf4fd32021-11-22 16:31:57 +00007126 ScopedThreadStateChange tsc(self_, ThreadState::kSuspended);
Vladimir Marko921094a2017-01-12 18:37:06 +00007127 gc::ScopedGCCriticalSection gcs(self_,
7128 gc::kGcCauseClassLinker,
7129 gc::kCollectorTypeClassLinker);
7130 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_methods->size(),
Vladimir Markob91402f2021-12-21 15:55:06 +00007131 kMethodSize,
7132 kMethodAlignment);
Vladimir Marko921094a2017-01-12 18:37:06 +00007133 memset(old_methods, 0xFEu, old_size);
Lokesh Gidra606bd942022-05-23 19:00:09 +00007134 // Set size to 0 to avoid visiting declaring classes.
7135 if (gUseUserfaultfd) {
7136 old_methods->SetSize(0);
7137 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007138 }
7139 }
7140 }
7141
Vladimir Marko19366b82022-01-18 10:41:28 +00007142 NO_INLINE
7143 void LogNewVirtuals(LengthPrefixedArray<ArtMethod>* methods) const
7144 REQUIRES_SHARED(Locks::mutator_lock_) {
7145 ObjPtr<mirror::Class> klass = klass_.Get();
7146 size_t num_new_copied_methods = num_new_copied_methods_;
7147 size_t old_method_count = methods->size() - num_new_copied_methods;
7148 size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
7149 size_t num_miranda_methods = 0u;
7150 size_t num_overriding_default_methods = 0u;
7151 size_t num_default_methods = 0u;
7152 size_t num_overriding_default_conflict_methods = 0u;
7153 size_t num_default_conflict_methods = 0u;
7154 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7155 ArtMethod& m = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7156 if (m.IsDefault()) {
7157 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7158 ++num_overriding_default_methods;
7159 } else {
7160 ++num_default_methods;
7161 }
7162 } else if (m.IsDefaultConflicting()) {
7163 if (m.GetMethodIndexDuringLinking() < super_vtable_length) {
7164 ++num_overriding_default_conflict_methods;
7165 } else {
7166 ++num_default_conflict_methods;
7167 }
7168 } else {
7169 DCHECK(m.IsMiranda());
7170 ++num_miranda_methods;
7171 }
Vladimir Marko5cfb7bb2022-01-28 11:12:17 +00007172 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007173 VLOG(class_linker) << klass->PrettyClass() << ": miranda_methods=" << num_miranda_methods
7174 << " default_methods=" << num_default_methods
7175 << " overriding_default_methods=" << num_overriding_default_methods
7176 << " default_conflict_methods=" << num_default_conflict_methods
Vladimir Marko921094a2017-01-12 18:37:06 +00007177 << " overriding_default_conflict_methods="
Vladimir Marko19366b82022-01-18 10:41:28 +00007178 << num_overriding_default_conflict_methods;
Vladimir Marko921094a2017-01-12 18:37:06 +00007179 }
7180
Vladimir Marko8670e042021-12-21 17:55:48 +00007181 class MethodIndexEmptyFn {
7182 public:
7183 void MakeEmpty(uint32_t& item) const {
7184 item = dex::kDexNoIndex;
7185 }
7186 bool IsEmpty(const uint32_t& item) const {
7187 return item == dex::kDexNoIndex;
7188 }
7189 };
7190
Vladimir Marko7ddae992022-01-18 14:27:20 +00007191 class VTableIndexCheckerDebug {
7192 protected:
7193 explicit VTableIndexCheckerDebug(size_t vtable_length)
7194 : vtable_length_(vtable_length) {}
Vladimir Marko8670e042021-12-21 17:55:48 +00007195
Vladimir Marko7ddae992022-01-18 14:27:20 +00007196 void CheckIndex(uint32_t index) const {
7197 CHECK_LT(index, vtable_length_);
Vladimir Marko8670e042021-12-21 17:55:48 +00007198 }
7199
7200 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007201 uint32_t vtable_length_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007202 };
7203
Vladimir Marko7ddae992022-01-18 14:27:20 +00007204 class VTableIndexCheckerRelease {
7205 protected:
7206 explicit VTableIndexCheckerRelease(size_t vtable_length ATTRIBUTE_UNUSED) {}
7207 void CheckIndex(uint32_t index ATTRIBUTE_UNUSED) const {}
7208 };
Vladimir Marko8670e042021-12-21 17:55:48 +00007209
Vladimir Marko7ddae992022-01-18 14:27:20 +00007210 using VTableIndexChecker =
7211 std::conditional_t<kIsDebugBuild, VTableIndexCheckerDebug, VTableIndexCheckerRelease>;
7212
7213 class VTableAccessor : private VTableIndexChecker {
7214 public:
7215 VTableAccessor(uint8_t* raw_vtable, size_t vtable_length)
7216 REQUIRES_SHARED(Locks::mutator_lock_)
7217 : VTableIndexChecker(vtable_length),
7218 raw_vtable_(raw_vtable) {}
Vladimir Marko8670e042021-12-21 17:55:48 +00007219
7220 ArtMethod* GetVTableEntry(uint32_t index) const REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko7ddae992022-01-18 14:27:20 +00007221 this->CheckIndex(index);
7222 uint8_t* entry = raw_vtable_ + static_cast<size_t>(kPointerSize) * index;
7223 if (kPointerSize == PointerSize::k64) {
7224 return reinterpret_cast64<ArtMethod*>(*reinterpret_cast<uint64_t*>(entry));
7225 } else {
7226 return reinterpret_cast32<ArtMethod*>(*reinterpret_cast<uint32_t*>(entry));
7227 }
Vladimir Marko8670e042021-12-21 17:55:48 +00007228 }
7229
7230 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007231 uint8_t* raw_vtable_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007232 };
7233
Vladimir Marko8670e042021-12-21 17:55:48 +00007234 class VTableSignatureHash {
7235 public:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007236 explicit VTableSignatureHash(VTableAccessor accessor)
Vladimir Marko8670e042021-12-21 17:55:48 +00007237 REQUIRES_SHARED(Locks::mutator_lock_)
7238 : accessor_(accessor) {}
7239
7240 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7241 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7242 return ComputeMethodHash(method);
7243 }
7244
7245 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7246 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7247 return ComputeMethodHash(accessor_.GetVTableEntry(index));
7248 }
7249
7250 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007251 VTableAccessor accessor_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007252 };
7253
Vladimir Marko8670e042021-12-21 17:55:48 +00007254 class VTableSignatureEqual {
7255 public:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007256 explicit VTableSignatureEqual(VTableAccessor accessor)
Vladimir Marko8670e042021-12-21 17:55:48 +00007257 REQUIRES_SHARED(Locks::mutator_lock_)
7258 : accessor_(accessor) {}
7259
7260 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7261 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
Vladimir Markobed84ef2022-01-21 13:57:14 +00007262 return MethodSignatureEquals(accessor_.GetVTableEntry(lhs_index), rhs);
Vladimir Marko8670e042021-12-21 17:55:48 +00007263 }
7264
7265 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7266 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7267 return (*this)(lhs_index, accessor_.GetVTableEntry(rhs_index));
7268 }
7269
7270 private:
Vladimir Marko7ddae992022-01-18 14:27:20 +00007271 VTableAccessor accessor_;
Vladimir Marko8670e042021-12-21 17:55:48 +00007272 };
7273
Vladimir Marko7ddae992022-01-18 14:27:20 +00007274 using VTableSignatureSet =
7275 ScopedArenaHashSet<uint32_t, MethodIndexEmptyFn, VTableSignatureHash, VTableSignatureEqual>;
Vladimir Marko8670e042021-12-21 17:55:48 +00007276
Vladimir Marko19366b82022-01-18 10:41:28 +00007277 class DeclaredVirtualSignatureHash {
7278 public:
7279 explicit DeclaredVirtualSignatureHash(ObjPtr<mirror::Class> klass)
7280 REQUIRES_SHARED(Locks::mutator_lock_)
7281 : klass_(klass) {}
7282
7283 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7284 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7285 return ComputeMethodHash(method);
7286 }
7287
7288 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7289 size_t operator()(uint32_t index) const NO_THREAD_SAFETY_ANALYSIS {
7290 DCHECK_LT(index, klass_->NumDeclaredVirtualMethods());
7291 ArtMethod* method = klass_->GetVirtualMethodDuringLinking(index, kPointerSize);
7292 return ComputeMethodHash(method->GetInterfaceMethodIfProxy(kPointerSize));
7293 }
7294
7295 private:
7296 ObjPtr<mirror::Class> klass_;
7297 };
7298
7299 class DeclaredVirtualSignatureEqual {
7300 public:
7301 explicit DeclaredVirtualSignatureEqual(ObjPtr<mirror::Class> klass)
7302 REQUIRES_SHARED(Locks::mutator_lock_)
7303 : klass_(klass) {}
7304
7305 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7306 bool operator()(uint32_t lhs_index, ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7307 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7308 ArtMethod* lhs = klass_->GetVirtualMethodDuringLinking(lhs_index, kPointerSize);
7309 return MethodSignatureEquals(lhs->GetInterfaceMethodIfProxy(kPointerSize), rhs);
7310 }
7311
7312 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7313 bool operator()(uint32_t lhs_index, uint32_t rhs_index) const NO_THREAD_SAFETY_ANALYSIS {
7314 DCHECK_LT(lhs_index, klass_->NumDeclaredVirtualMethods());
7315 DCHECK_LT(rhs_index, klass_->NumDeclaredVirtualMethods());
7316 return lhs_index == rhs_index;
7317 }
7318
7319 private:
7320 ObjPtr<mirror::Class> klass_;
7321 };
7322
7323 using DeclaredVirtualSignatureSet = ScopedArenaHashSet<uint32_t,
7324 MethodIndexEmptyFn,
7325 DeclaredVirtualSignatureHash,
7326 DeclaredVirtualSignatureEqual>;
7327
7328 // Helper class to keep records for determining the correct copied method to create.
7329 class CopiedMethodRecord {
7330 public:
7331 enum class State : uint32_t {
7332 // Note: The `*Single` values are used when we know that there is only one interface
7333 // method with the given signature that's not masked; that method is the main method.
7334 // We use this knowledge for faster masking check, otherwise we need to search for
7335 // a masking method through methods of all interfaces that could potentially mask it.
7336 kAbstractSingle,
7337 kDefaultSingle,
7338 kAbstract,
7339 kDefault,
7340 kDefaultConflict,
7341 kUseSuperMethod,
7342 };
7343
7344 CopiedMethodRecord()
7345 : main_method_(nullptr),
7346 method_index_(0u),
7347 state_(State::kAbstractSingle) {}
7348
7349 CopiedMethodRecord(ArtMethod* main_method, size_t vtable_index)
7350 : main_method_(main_method),
7351 method_index_(vtable_index),
7352 state_(State::kAbstractSingle) {}
7353
7354 // Set main method. The new main method must be more specific implementation.
7355 void SetMainMethod(ArtMethod* main_method) {
7356 DCHECK(main_method_ != nullptr);
7357 main_method_ = main_method;
7358 }
7359
7360 // The main method is the first encountered default method if any,
7361 // otherwise the first encountered abstract method.
7362 ArtMethod* GetMainMethod() const {
7363 return main_method_;
7364 }
7365
7366 void SetMethodIndex(size_t method_index) {
7367 DCHECK_NE(method_index, dex::kDexNoIndex);
7368 method_index_ = method_index;
7369 }
7370
7371 size_t GetMethodIndex() const {
7372 DCHECK_NE(method_index_, dex::kDexNoIndex);
7373 return method_index_;
7374 }
7375
7376 void SetState(State state) {
7377 state_ = state;
7378 }
7379
7380 State GetState() const {
7381 return state_;
7382 }
7383
7384 ALWAYS_INLINE
7385 void UpdateStateForInterface(ObjPtr<mirror::Class> iface,
7386 ArtMethod* interface_method,
7387 ObjPtr<mirror::IfTable> iftable,
7388 size_t ifcount,
7389 size_t index)
7390 REQUIRES_SHARED(Locks::mutator_lock_) {
7391 DCHECK_EQ(ifcount, iftable->Count());
7392 DCHECK_LT(index, ifcount);
7393 DCHECK(iface == interface_method->GetDeclaringClass());
7394 DCHECK(iface == iftable->GetInterface(index));
7395 DCHECK(interface_method->IsDefault());
7396 if (GetState() != State::kDefaultConflict) {
7397 DCHECK(GetState() == State::kDefault);
7398 // We do not record all overriding methods, so we need to walk over all
7399 // interfaces that could mask the `interface_method`.
7400 if (ContainsOverridingMethodOf(iftable, index + 1, ifcount, iface, interface_method)) {
7401 return; // Found an overriding method that masks `interface_method`.
7402 }
7403 // We have a new default method that's not masked by any other method.
7404 SetState(State::kDefaultConflict);
7405 }
7406 }
7407
7408 ALWAYS_INLINE
7409 void UpdateState(ObjPtr<mirror::Class> iface,
7410 ArtMethod* interface_method,
7411 size_t vtable_index,
7412 ObjPtr<mirror::IfTable> iftable,
7413 size_t ifcount,
7414 size_t index)
7415 REQUIRES_SHARED(Locks::mutator_lock_) {
7416 DCHECK_EQ(ifcount, iftable->Count());
7417 DCHECK_LT(index, ifcount);
7418 if (kIsDebugBuild) {
7419 if (interface_method->IsCopied()) {
7420 // Called from `FinalizeState()` for a default method from superclass.
7421 // The `index` points to the last interface inherited from the superclass
7422 // as we need to search only the new interfaces for masking methods.
7423 DCHECK(interface_method->IsDefault());
7424 } else {
7425 DCHECK(iface == interface_method->GetDeclaringClass());
7426 DCHECK(iface == iftable->GetInterface(index));
7427 }
7428 }
7429 DCHECK_EQ(vtable_index, method_index_);
7430 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7431 return ContainsImplementingMethod(iftable, index + 1, ifcount, iface, vtable_index);
7432 };
7433 UpdateStateImpl(iface, interface_method, slow_is_masked);
7434 }
7435
7436 ALWAYS_INLINE
7437 void FinalizeState(ArtMethod* super_method,
7438 size_t vtable_index,
7439 ObjPtr<mirror::IfTable> iftable,
7440 size_t ifcount,
7441 ObjPtr<mirror::IfTable> super_iftable,
7442 size_t super_ifcount)
7443 REQUIRES_SHARED(Locks::mutator_lock_) {
7444 DCHECK(super_method->IsCopied());
7445 DCHECK_EQ(vtable_index, method_index_);
7446 DCHECK_EQ(vtable_index, super_method->GetMethodIndex());
7447 DCHECK_NE(super_ifcount, 0u);
7448 if (super_method->IsDefault()) {
7449 if (UNLIKELY(super_method->IsDefaultConflicting())) {
7450 // Some of the default methods that contributed to the conflict in the superclass
7451 // may be masked by new interfaces. Walk over all the interfaces and update state
7452 // as long as the current state is not `kDefaultConflict`.
7453 size_t i = super_ifcount;
7454 while (GetState() != State::kDefaultConflict && i != 0u) {
7455 --i;
7456 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7457 DCHECK(iface == super_iftable->GetInterface(i));
7458 auto [found, index] =
7459 MethodArrayContains(super_iftable->GetMethodArrayOrNull(i), super_method);
7460 if (found) {
7461 ArtMethod* interface_method = iface->GetVirtualMethod(index, kPointerSize);
7462 auto slow_is_masked = [=]() REQUIRES_SHARED(Locks::mutator_lock_) {
7463 // Note: The `iftable` has method arrays in range [super_ifcount, ifcount) filled
7464 // with vtable indexes but the range [0, super_ifcount) is empty, so we need to
7465 // use the `super_iftable` filled with implementation methods for that range.
7466 return ContainsImplementingMethod(
7467 super_iftable, i + 1u, super_ifcount, iface, super_method) ||
7468 ContainsImplementingMethod(
7469 iftable, super_ifcount, ifcount, iface, vtable_index);
7470 };
7471 UpdateStateImpl(iface, interface_method, slow_is_masked);
7472 }
7473 }
7474 if (GetState() == State::kDefaultConflict) {
7475 SetState(State::kUseSuperMethod);
7476 }
7477 } else {
7478 // There was exactly one default method in superclass interfaces that was
7479 // not masked by subinterfaces. Use `UpdateState()` to process it and pass
7480 // `super_ifcount - 1` as index for checking if it's been masked by new interfaces.
7481 ObjPtr<mirror::Class> iface = super_method->GetDeclaringClass();
7482 UpdateState(
7483 iface, super_method, vtable_index, iftable, ifcount, /*index=*/ super_ifcount - 1u);
7484 if (GetMainMethod() == super_method) {
7485 DCHECK(GetState() == State::kDefault) << enum_cast<uint32_t>(GetState());
7486 SetState(State::kUseSuperMethod);
7487 }
7488 }
7489 } else {
7490 DCHECK(super_method->IsMiranda());
7491 // Any default methods with this signature in superclass interfaces have been
7492 // masked by subinterfaces. Check if we can reuse the miranda method.
7493 if (GetState() == State::kAbstractSingle || GetState() == State::kAbstract) {
7494 SetState(State::kUseSuperMethod);
7495 }
7496 }
7497 }
7498
7499 private:
7500 template <typename Predicate>
7501 ALWAYS_INLINE
7502 void UpdateStateImpl(ObjPtr<mirror::Class> iface,
7503 ArtMethod* interface_method,
7504 Predicate&& slow_is_masked)
7505 REQUIRES_SHARED(Locks::mutator_lock_) {
7506 bool have_default = false;
7507 switch (GetState()) {
7508 case State::kDefaultSingle:
7509 have_default = true;
7510 FALLTHROUGH_INTENDED;
7511 case State::kAbstractSingle:
7512 if (GetMainMethod()->GetDeclaringClass()->Implements(iface)) {
7513 return; // The main method masks the `interface_method`.
7514 }
7515 if (!interface_method->IsDefault()) {
7516 SetState(have_default ? State::kDefault : State::kAbstract);
7517 return;
7518 }
7519 break;
7520 case State::kDefault:
7521 have_default = true;
7522 FALLTHROUGH_INTENDED;
7523 case State::kAbstract:
7524 if (!interface_method->IsDefault()) {
7525 return; // Keep the same state. We do not need to check for masking.
7526 }
7527 // We do not record all overriding methods, so we need to walk over all
7528 // interfaces that could mask the `interface_method`. The provided
7529 // predicate `slow_is_masked()` does that.
7530 if (slow_is_masked()) {
7531 return; // Found an overriding method that masks `interface_method`.
7532 }
7533 break;
7534 case State::kDefaultConflict:
7535 return; // The state cannot change anymore.
7536 default:
7537 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(GetState());
7538 UNREACHABLE();
7539 }
7540 // We have a new default method that's not masked by any other method.
7541 DCHECK(interface_method->IsDefault());
7542 if (have_default) {
7543 SetState(State::kDefaultConflict);
7544 } else {
7545 SetMainMethod(interface_method);
7546 SetState(State::kDefault);
7547 }
7548 }
7549
7550 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7551 // that declares a method with the same name and signature as 'interface_method'.
7552 //
7553 // Arguments
7554 // - iftable: The iftable we are searching for an overriding method.
7555 // - begin: The start of the range to search.
7556 // - end: The end of the range to search.
7557 // - iface: The interface we are checking to see if anything overrides.
7558 // - interface_method:
7559 // The interface method providing a name and signature we're searching for.
7560 //
7561 // Returns whether an overriding method was found in any subinterface of `iface`.
7562 static bool ContainsOverridingMethodOf(ObjPtr<mirror::IfTable> iftable,
7563 size_t begin,
7564 size_t end,
7565 ObjPtr<mirror::Class> iface,
7566 ArtMethod* interface_method)
7567 REQUIRES_SHARED(Locks::mutator_lock_) {
7568 for (size_t i = begin; i != end; ++i) {
7569 ObjPtr<mirror::Class> current_iface = iftable->GetInterface(i);
7570 for (ArtMethod& current_method : current_iface->GetDeclaredVirtualMethods(kPointerSize)) {
7571 if (MethodSignatureEquals(&current_method, interface_method)) {
7572 // Check if the i'th interface is a subtype of this one.
7573 if (current_iface->Implements(iface)) {
7574 return true;
7575 }
7576 break;
7577 }
7578 }
7579 }
7580 return false;
7581 }
7582
7583 // Determine if the given `iftable` contains in the given range a subinterface of `iface`
7584 // that declares a method implemented by 'target'. This is an optimized version of
7585 // `ContainsOverridingMethodOf()` that searches implementation method arrays instead
7586 // of comparing signatures for declared interface methods.
7587 //
7588 // Arguments
7589 // - iftable: The iftable we are searching for an overriding method.
7590 // - begin: The start of the range to search.
7591 // - end: The end of the range to search.
7592 // - iface: The interface we are checking to see if anything overrides.
7593 // - target: The implementation method we're searching for.
7594 // Note that the new `iftable` is filled with vtable indexes for new interfaces,
7595 // so this needs to be the vtable index if we're searching that range.
7596 //
7597 // Returns whether the `target` was found in a method array for any subinterface of `iface`.
7598 template <typename TargetType>
7599 static bool ContainsImplementingMethod(ObjPtr<mirror::IfTable> iftable,
7600 size_t begin,
7601 size_t end,
7602 ObjPtr<mirror::Class> iface,
7603 TargetType target)
7604 REQUIRES_SHARED(Locks::mutator_lock_) {
7605 for (size_t i = begin; i != end; ++i) {
7606 if (MethodArrayContains(iftable->GetMethodArrayOrNull(i), target).first &&
7607 iftable->GetInterface(i)->Implements(iface)) {
7608 return true;
7609 }
7610 }
7611 return false;
7612 }
7613
7614 template <typename TargetType>
7615 static std::pair<bool, size_t> MethodArrayContains(ObjPtr<mirror::PointerArray> method_array,
7616 TargetType target)
7617 REQUIRES_SHARED(Locks::mutator_lock_) {
7618 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
7619 for (size_t j = 0; j != num_methods; ++j) {
7620 if (method_array->GetElementPtrSize<TargetType, kPointerSize>(j) == target) {
7621 return {true, j};
7622 }
7623 }
7624 return {false, 0};
7625 }
7626
7627 ArtMethod* main_method_;
7628 uint32_t method_index_;
7629 State state_;
7630 };
7631
7632 class CopiedMethodRecordEmptyFn {
7633 public:
7634 void MakeEmpty(CopiedMethodRecord& item) const {
7635 item = CopiedMethodRecord();
7636 }
7637 bool IsEmpty(const CopiedMethodRecord& item) const {
7638 return item.GetMainMethod() == nullptr;
7639 }
7640 };
7641
7642 class CopiedMethodRecordHash {
7643 public:
7644 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7645 size_t operator()(ArtMethod* method) const NO_THREAD_SAFETY_ANALYSIS {
7646 DCHECK(method != nullptr);
7647 return ComputeMethodHash(method);
7648 }
7649
7650 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7651 size_t operator()(const CopiedMethodRecord& record) const NO_THREAD_SAFETY_ANALYSIS {
7652 return (*this)(record.GetMainMethod());
7653 }
7654 };
7655
7656 class CopiedMethodRecordEqual {
7657 public:
7658 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7659 bool operator()(const CopiedMethodRecord& lhs_record,
7660 ArtMethod* rhs) const NO_THREAD_SAFETY_ANALYSIS {
7661 ArtMethod* lhs = lhs_record.GetMainMethod();
7662 DCHECK(lhs != nullptr);
7663 DCHECK(rhs != nullptr);
7664 return MethodSignatureEquals(lhs, rhs);
7665 }
7666
7667 // NO_THREAD_SAFETY_ANALYSIS: This is called from unannotated `HashSet<>` functions.
7668 bool operator()(const CopiedMethodRecord& lhs_record,
7669 const CopiedMethodRecord& rhs_record) const NO_THREAD_SAFETY_ANALYSIS {
7670 return (*this)(lhs_record, rhs_record.GetMainMethod());
7671 }
7672 };
7673
7674 using CopiedMethodRecordSet = ScopedArenaHashSet<CopiedMethodRecord,
7675 CopiedMethodRecordEmptyFn,
7676 CopiedMethodRecordHash,
7677 CopiedMethodRecordEqual>;
7678
Vladimir Markob91402f2021-12-21 15:55:06 +00007679 static constexpr size_t kMethodAlignment = ArtMethod::Alignment(kPointerSize);
7680 static constexpr size_t kMethodSize = ArtMethod::Size(kPointerSize);
7681
Vladimir Marko921094a2017-01-12 18:37:06 +00007682 ClassLinker* class_linker_;
7683 Handle<mirror::Class> klass_;
Vladimir Marko921094a2017-01-12 18:37:06 +00007684 Thread* const self_;
Vladimir Marko78f62d82022-01-10 16:25:19 +00007685 Runtime* const runtime_;
Vladimir Marko921094a2017-01-12 18:37:06 +00007686
7687 // These are allocated on the heap to begin, we then transfer to linear alloc when we re-create
7688 // the virtual methods array.
7689 // Need to use low 4GB arenas for compiler or else the pointers wont fit in 32 bit method array
7690 // during cross compilation.
7691 // Use the linear alloc pool since this one is in the low 4gb for the compiler.
7692 ArenaStack stack_;
7693 ScopedArenaAllocator allocator_;
7694
Vladimir Markod5d11d92021-02-02 16:24:25 +00007695 // If there are multiple methods with the same signature in the superclass vtable
7696 // (which can happen with a new virtual method having the same signature as an
7697 // inaccessible package-private method from another package in the superclass),
7698 // we keep singly-linked lists in this single array that maps vtable index to the
7699 // next vtable index in the list, `dex::kDexNoIndex` denotes the end of a list.
7700 ArrayRef<uint32_t> same_signature_vtable_lists_;
7701
Vladimir Marko19366b82022-01-18 10:41:28 +00007702 // Avoid large allocation for a few copied method records.
Vladimir Markobc893672021-11-10 15:25:46 +00007703 // Keep the initial buffer on the stack to avoid arena allocations
7704 // if there are no special cases (the first arena allocation is costly).
Vladimir Marko19366b82022-01-18 10:41:28 +00007705 static constexpr size_t kCopiedMethodRecordInitialBufferSize = 16u;
7706 CopiedMethodRecord copied_method_records_initial_buffer_[kCopiedMethodRecordInitialBufferSize];
7707 CopiedMethodRecordSet copied_method_records_;
7708 size_t num_new_copied_methods_;
Vladimir Marko921094a2017-01-12 18:37:06 +00007709};
7710
Vladimir Markob91402f2021-12-21 15:55:06 +00007711template <PointerSize kPointerSize>
Vladimir Marko19366b82022-01-18 10:41:28 +00007712NO_INLINE
7713void ClassLinker::LinkMethodsHelper<kPointerSize>::ReallocMethods(ObjPtr<mirror::Class> klass) {
7714 // There should be no thread suspension in this function,
7715 // native allocations do not cause thread suspension.
7716 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Vladimir Marko921094a2017-01-12 18:37:06 +00007717
Vladimir Marko19366b82022-01-18 10:41:28 +00007718 size_t num_new_copied_methods = num_new_copied_methods_;
7719 DCHECK_NE(num_new_copied_methods, 0u);
7720 const size_t old_method_count = klass->NumMethods();
7721 const size_t new_method_count = old_method_count + num_new_copied_methods;
Vladimir Marko921094a2017-01-12 18:37:06 +00007722
7723 // Attempt to realloc to save RAM if possible.
Vladimir Marko19366b82022-01-18 10:41:28 +00007724 LengthPrefixedArray<ArtMethod>* old_methods = klass->GetMethodsPtr();
Vladimir Marko921094a2017-01-12 18:37:06 +00007725 // The Realloced virtual methods aren't visible from the class roots, so there is no issue
7726 // where GCs could attempt to mark stale pointers due to memcpy. And since we overwrite the
7727 // realloced memory with out->CopyFrom, we are guaranteed to have objects in the to space since
7728 // CopyFrom has internal read barriers.
7729 //
7730 // TODO We should maybe move some of this into mirror::Class or at least into another method.
7731 const size_t old_size = LengthPrefixedArray<ArtMethod>::ComputeSize(old_method_count,
Vladimir Markob91402f2021-12-21 15:55:06 +00007732 kMethodSize,
7733 kMethodAlignment);
Vladimir Marko921094a2017-01-12 18:37:06 +00007734 const size_t new_size = LengthPrefixedArray<ArtMethod>::ComputeSize(new_method_count,
Vladimir Markob91402f2021-12-21 15:55:06 +00007735 kMethodSize,
7736 kMethodAlignment);
Vladimir Marko921094a2017-01-12 18:37:06 +00007737 const size_t old_methods_ptr_size = (old_methods != nullptr) ? old_size : 0;
7738 auto* methods = reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
Vladimir Marko19366b82022-01-18 10:41:28 +00007739 class_linker_->GetAllocatorForClassLoader(klass->GetClassLoader())->Realloc(
Lokesh Gidra606bd942022-05-23 19:00:09 +00007740 self_, old_methods, old_methods_ptr_size, new_size, LinearAllocKind::kArtMethodArray));
Vladimir Marko921094a2017-01-12 18:37:06 +00007741 CHECK(methods != nullptr); // Native allocation failure aborts.
7742
Vladimir Marko921094a2017-01-12 18:37:06 +00007743 if (methods != old_methods) {
Lokesh Gidra606bd942022-05-23 19:00:09 +00007744 if (gUseReadBarrier) {
7745 StrideIterator<ArtMethod> out = methods->begin(kMethodSize, kMethodAlignment);
7746 // Copy over the old methods. The `ArtMethod::CopyFrom()` is only necessary to not miss
7747 // read barriers since `LinearAlloc::Realloc()` won't do read barriers when it copies.
7748 for (auto& m : klass->GetMethods(kPointerSize)) {
7749 out->CopyFrom(&m, kPointerSize);
7750 ++out;
7751 }
7752 } else if (gUseUserfaultfd) {
7753 // Clear the declaring class of the old dangling method array so that GC doesn't
7754 // try to update them, which could cause crashes in userfaultfd GC due to
7755 // checks in post-compact address computation.
7756 for (auto& m : klass->GetMethods(kPointerSize)) {
7757 m.SetDeclaringClass(nullptr);
7758 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007759 }
7760 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007761
7762 // Collect and sort copied method records by the vtable index. This places overriding
7763 // copied methods first, sorted by the vtable index already assigned in the superclass,
7764 // followed by copied methods with new signatures in the order in which we encountered
7765 // them when going over virtual methods of new interfaces.
7766 // This order is deterministic but implementation-defined.
7767 //
7768 // Avoid arena allocation for a few records (the first arena allocation is costly).
7769 constexpr size_t kSortedRecordsBufferSize = 16;
7770 CopiedMethodRecord* sorted_records_buffer[kSortedRecordsBufferSize];
7771 CopiedMethodRecord** sorted_records = (num_new_copied_methods <= kSortedRecordsBufferSize)
7772 ? sorted_records_buffer
7773 : allocator_.AllocArray<CopiedMethodRecord*>(num_new_copied_methods);
7774 size_t filled_sorted_records = 0u;
7775 for (CopiedMethodRecord& record : copied_method_records_) {
7776 if (record.GetState() != CopiedMethodRecord::State::kUseSuperMethod) {
7777 DCHECK_LT(filled_sorted_records, num_new_copied_methods);
7778 sorted_records[filled_sorted_records] = &record;
7779 ++filled_sorted_records;
Vladimir Marko921094a2017-01-12 18:37:06 +00007780 }
7781 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007782 DCHECK_EQ(filled_sorted_records, num_new_copied_methods);
7783 std::sort(sorted_records,
7784 sorted_records + num_new_copied_methods,
7785 [](const CopiedMethodRecord* lhs, const CopiedMethodRecord* rhs) {
7786 return lhs->GetMethodIndex() < rhs->GetMethodIndex();
7787 });
7788
7789 if (klass->IsInterface()) {
7790 // Some records may have been pruned. Update method indexes in collected records.
7791 size_t interface_method_index = klass->NumDeclaredVirtualMethods();
7792 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7793 CopiedMethodRecord* record = sorted_records[i];
7794 DCHECK_LE(interface_method_index, record->GetMethodIndex());
7795 record->SetMethodIndex(interface_method_index);
7796 ++interface_method_index;
Vladimir Marko921094a2017-01-12 18:37:06 +00007797 }
7798 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007799
7800 // Add copied methods.
Vladimir Marko921094a2017-01-12 18:37:06 +00007801 methods->SetSize(new_method_count);
Vladimir Marko19366b82022-01-18 10:41:28 +00007802 for (size_t i = 0; i != num_new_copied_methods; ++i) {
7803 const CopiedMethodRecord* record = sorted_records[i];
7804 ArtMethod* interface_method = record->GetMainMethod();
7805 DCHECK(!interface_method->IsCopied());
7806 ArtMethod& new_method = methods->At(old_method_count + i, kMethodSize, kMethodAlignment);
7807 new_method.CopyFrom(interface_method, kPointerSize);
7808 new_method.SetMethodIndex(dchecked_integral_cast<uint16_t>(record->GetMethodIndex()));
7809 switch (record->GetState()) {
7810 case CopiedMethodRecord::State::kAbstractSingle:
7811 case CopiedMethodRecord::State::kAbstract: {
7812 DCHECK(!klass->IsInterface()); // We do not create miranda methods for interfaces.
7813 uint32_t access_flags = new_method.GetAccessFlags();
7814 DCHECK_EQ(access_flags & (kAccAbstract | kAccIntrinsic | kAccDefault), kAccAbstract)
7815 << "Miranda method should be abstract but not intrinsic or default!";
7816 new_method.SetAccessFlags(access_flags | kAccCopied);
7817 break;
7818 }
7819 case CopiedMethodRecord::State::kDefaultSingle:
7820 case CopiedMethodRecord::State::kDefault: {
7821 DCHECK(!klass->IsInterface()); // We do not copy default methods for interfaces.
7822 // Clear the kAccSkipAccessChecks flag if it is present. Since this class hasn't been
7823 // verified yet it shouldn't have methods that are skipping access checks.
7824 // TODO This is rather arbitrary. We should maybe support classes where only some of its
7825 // methods are skip_access_checks.
7826 DCHECK_EQ(new_method.GetAccessFlags() & kAccNative, 0u);
7827 constexpr uint32_t kSetFlags = kAccDefault | kAccCopied;
7828 constexpr uint32_t kMaskFlags = ~kAccSkipAccessChecks;
7829 new_method.SetAccessFlags((new_method.GetAccessFlags() | kSetFlags) & kMaskFlags);
7830 break;
7831 }
7832 case CopiedMethodRecord::State::kDefaultConflict: {
7833 // This is a type of default method (there are default method impls, just a conflict)
7834 // so mark this as a default. We use the `kAccAbstract` flag to distinguish it from
7835 // invokable copied default method without using a separate access flag but the default
7836 // conflicting method is technically not abstract and ArtMethod::IsAbstract() shall
7837 // return false. Also clear the kAccSkipAccessChecks bit since this class hasn't been
7838 // verified yet it shouldn't have methods that are skipping access checks. Also clear
7839 // potential kAccSingleImplementation to avoid CHA trying to inline the default method.
7840 uint32_t access_flags = new_method.GetAccessFlags();
7841 DCHECK_EQ(access_flags & (kAccNative | kAccIntrinsic), 0u);
7842 constexpr uint32_t kSetFlags = kAccDefault | kAccAbstract | kAccCopied;
7843 constexpr uint32_t kMaskFlags = ~(kAccSkipAccessChecks | kAccSingleImplementation);
7844 new_method.SetAccessFlags((access_flags | kSetFlags) & kMaskFlags);
7845 DCHECK(new_method.IsDefaultConflicting());
7846 DCHECK(!new_method.IsAbstract());
7847 // The actual method might or might not be marked abstract since we just copied it from
7848 // a (possibly default) interface method. We need to set its entry point to be the bridge
7849 // so that the compiler will not invoke the implementation of whatever method we copied
7850 // from.
7851 EnsureThrowsInvocationError(class_linker_, &new_method);
7852 break;
7853 }
7854 default:
7855 LOG(FATAL) << "Unexpected state: " << enum_cast<uint32_t>(record->GetState());
7856 UNREACHABLE();
7857 }
7858 }
7859
7860 if (VLOG_IS_ON(class_linker)) {
7861 LogNewVirtuals(methods);
7862 }
7863
7864 class_linker_->UpdateClassMethods(klass, methods);
Vladimir Marko921094a2017-01-12 18:37:06 +00007865}
7866
Vladimir Markob91402f2021-12-21 15:55:06 +00007867template <PointerSize kPointerSize>
Vladimir Marko0441d202022-02-18 13:55:15 +00007868bool ClassLinker::LinkMethodsHelper<kPointerSize>::FinalizeIfTable(
7869 Handle<mirror::Class> klass,
7870 MutableHandle<mirror::IfTable> iftable,
7871 Handle<mirror::PointerArray> vtable,
Vladimir Marko51718132022-02-07 16:31:08 +00007872 bool is_klass_abstract,
7873 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00007874 bool* out_new_conflict,
7875 ArtMethod** out_imt) {
Vladimir Marko19366b82022-01-18 10:41:28 +00007876 size_t ifcount = iftable->Count();
Vladimir Marko0441d202022-02-18 13:55:15 +00007877 // We do not need a read barrier here as the length is constant, both from-space and
7878 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
7879 size_t super_ifcount =
7880 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
Vladimir Marko921094a2017-01-12 18:37:06 +00007881
Vladimir Marko0441d202022-02-18 13:55:15 +00007882 ClassLinker* class_linker = nullptr;
Vladimir Marko51718132022-02-07 16:31:08 +00007883 ArtMethod* unimplemented_method = nullptr;
7884 ArtMethod* imt_conflict_method = nullptr;
7885 uintptr_t imt_methods_begin = 0u;
7886 size_t imt_methods_size = 0u;
7887 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
7888 DCHECK_EQ(klass->GetSuperClass()->ShouldHaveImt(), !is_super_abstract);
7889 if (!is_klass_abstract) {
Vladimir Marko0441d202022-02-18 13:55:15 +00007890 class_linker = class_linker_;
Vladimir Marko51718132022-02-07 16:31:08 +00007891 unimplemented_method = runtime_->GetImtUnimplementedMethod();
7892 imt_conflict_method = runtime_->GetImtConflictMethod();
7893 if (is_super_abstract) {
7894 // There was no IMT in superclass to copy to `out_imt[]`, so we need
7895 // to fill it with all implementation methods from superclass.
7896 DCHECK_EQ(imt_methods_begin, 0u);
7897 imt_methods_size = std::numeric_limits<size_t>::max(); // No method at the last byte.
7898 } else {
7899 // If the superclass has IMT, we have already copied it to `out_imt[]` and
7900 // we do not need to call `SetIMTRef()` for interfaces from superclass when
7901 // the implementation method is already in the superclass, only for new methods.
7902 // For simplicity, use the entire method array including direct methods.
7903 LengthPrefixedArray<ArtMethod>* const new_methods = klass->GetMethodsPtr();
7904 if (new_methods != nullptr) {
7905 DCHECK_NE(new_methods->size(), 0u);
7906 imt_methods_begin = reinterpret_cast<uintptr_t>(&new_methods->At(0));
7907 imt_methods_size = new_methods->size() * kMethodSize;
7908 }
7909 }
7910 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007911
Vladimir Marko0441d202022-02-18 13:55:15 +00007912 auto update_imt = [=](ObjPtr<mirror::Class> iface, size_t j, ArtMethod* implementation)
7913 REQUIRES_SHARED(Locks::mutator_lock_) {
7914 // Place method in imt if entry is empty, place conflict otherwise.
7915 ArtMethod** imt_ptr = &out_imt[iface->GetVirtualMethod(j, kPointerSize)->GetImtIndex()];
7916 class_linker->SetIMTRef(unimplemented_method,
7917 imt_conflict_method,
7918 implementation,
7919 /*out*/out_new_conflict,
7920 /*out*/imt_ptr);
7921 };
7922
Vladimir Marko19366b82022-01-18 10:41:28 +00007923 // For interfaces inherited from superclass, the new method arrays are empty,
7924 // so use vtable indexes from implementation methods from the superclass method array.
7925 for (size_t i = 0; i != super_ifcount; ++i) {
7926 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
Vladimir Marko0441d202022-02-18 13:55:15 +00007927 DCHECK(method_array == klass->GetSuperClass()->GetIfTable()->GetMethodArrayOrNull(i));
Vladimir Marko19366b82022-01-18 10:41:28 +00007928 if (method_array == nullptr) {
7929 continue;
7930 }
7931 size_t num_methods = method_array->GetLength();
7932 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
Vladimir Marko0441d202022-02-18 13:55:15 +00007933 size_t j = 0;
7934 // First loop has method array shared with the super class.
7935 for (; j != num_methods; ++j) {
Vladimir Marko19366b82022-01-18 10:41:28 +00007936 ArtMethod* super_implementation =
Vladimir Marko0441d202022-02-18 13:55:15 +00007937 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
7938 size_t vtable_index = super_implementation->GetMethodIndex();
7939 ArtMethod* implementation =
7940 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7941 // Check if we need to update IMT with this method, see above.
7942 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
7943 update_imt(iface, j, implementation);
7944 }
7945 if (implementation != super_implementation) {
7946 // Copy-on-write and move to the next loop.
7947 Thread* self = self_;
7948 StackHandleScope<2u> hs(self);
7949 Handle<mirror::PointerArray> old_method_array = hs.NewHandle(method_array);
7950 HandleWrapperObjPtr<mirror::Class> h_iface = hs.NewHandleWrapper(&iface);
7951 if (ifcount == super_ifcount && iftable.Get() == klass->GetSuperClass()->GetIfTable()) {
7952 ObjPtr<mirror::IfTable> new_iftable = ObjPtr<mirror::IfTable>::DownCast(
7953 mirror::ObjectArray<mirror::Object>::CopyOf(
7954 iftable, self, ifcount * mirror::IfTable::kMax));
7955 if (new_iftable == nullptr) {
7956 return false;
7957 }
7958 iftable.Assign(new_iftable);
7959 }
7960 method_array = ObjPtr<mirror::PointerArray>::DownCast(
7961 mirror::Array::CopyOf(old_method_array, self, num_methods));
7962 if (method_array == nullptr) {
7963 return false;
7964 }
7965 iftable->SetMethodArray(i, method_array);
7966 method_array->SetElementPtrSize(j, implementation, kPointerSize);
7967 ++j;
7968 break;
7969 }
7970 }
7971 // Second loop (if non-empty) has method array different from the superclass.
7972 for (; j != num_methods; ++j) {
7973 ArtMethod* super_implementation =
7974 method_array->GetElementPtrSize<ArtMethod*, kPointerSize>(j);
Vladimir Marko19366b82022-01-18 10:41:28 +00007975 size_t vtable_index = super_implementation->GetMethodIndex();
7976 ArtMethod* implementation =
7977 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7978 method_array->SetElementPtrSize(j, implementation, kPointerSize);
Vladimir Marko51718132022-02-07 16:31:08 +00007979 // Check if we need to update IMT with this method, see above.
7980 if (reinterpret_cast<uintptr_t>(implementation) - imt_methods_begin < imt_methods_size) {
Vladimir Marko0441d202022-02-18 13:55:15 +00007981 update_imt(iface, j, implementation);
Vladimir Marko51718132022-02-07 16:31:08 +00007982 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007983 }
7984 }
Vladimir Marko921094a2017-01-12 18:37:06 +00007985
Vladimir Marko19366b82022-01-18 10:41:28 +00007986 // New interface method arrays contain vtable indexes. Translate them to methods.
Vladimir Marko51718132022-02-07 16:31:08 +00007987 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
Vladimir Marko19366b82022-01-18 10:41:28 +00007988 for (size_t i = super_ifcount; i != ifcount; ++i) {
7989 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
7990 if (method_array == nullptr) {
7991 continue;
Vladimir Marko921094a2017-01-12 18:37:06 +00007992 }
Vladimir Marko19366b82022-01-18 10:41:28 +00007993 size_t num_methods = method_array->GetLength();
7994 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
7995 for (size_t j = 0; j != num_methods; ++j) {
7996 size_t vtable_index = method_array->GetElementPtrSize<size_t, kPointerSize>(j);
7997 ArtMethod* implementation =
7998 vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(vtable_index);
7999 method_array->SetElementPtrSize(j, implementation, kPointerSize);
Vladimir Marko51718132022-02-07 16:31:08 +00008000 if (!is_klass_abstract) {
Vladimir Marko0441d202022-02-18 13:55:15 +00008001 update_imt(iface, j, implementation);
Vladimir Marko51718132022-02-07 16:31:08 +00008002 }
Vladimir Marko921094a2017-01-12 18:37:06 +00008003 }
8004 }
Vladimir Marko0441d202022-02-18 13:55:15 +00008005
8006 return true;
Vladimir Marko921094a2017-01-12 18:37:06 +00008007}
8008
Vladimir Markob91402f2021-12-21 15:55:06 +00008009template <PointerSize kPointerSize>
Vladimir Marko0441d202022-02-18 13:55:15 +00008010ObjPtr<mirror::PointerArray> ClassLinker::LinkMethodsHelper<kPointerSize>::AllocPointerArray(
8011 Thread* self, size_t length) {
8012 using PointerArrayType = std::conditional_t<
8013 kPointerSize == PointerSize::k64, mirror::LongArray, mirror::IntArray>;
8014 ObjPtr<mirror::Array> array = PointerArrayType::Alloc(self, length);
8015 return ObjPtr<mirror::PointerArray>::DownCast(array);
8016}
8017
8018template <PointerSize kPointerSize>
8019bool ClassLinker::LinkMethodsHelper<kPointerSize>::AllocateIfTableMethodArrays(
8020 Thread* self,
8021 Handle<mirror::Class> klass,
8022 Handle<mirror::IfTable> iftable) {
8023 DCHECK(!klass->IsInterface());
8024 DCHECK(klass_->HasSuperClass());
8025 const size_t ifcount = iftable->Count();
8026 // We do not need a read barrier here as the length is constant, both from-space and
8027 // to-space `IfTable`s shall yield the same result. See also `Class::GetIfTableCount()`.
8028 size_t super_ifcount =
8029 klass->GetSuperClass<kDefaultVerifyFlags, kWithoutReadBarrier>()->GetIfTableCount();
8030 if (ifcount == super_ifcount) {
8031 DCHECK(iftable.Get() == klass_->GetSuperClass()->GetIfTable());
8032 return true;
8033 }
8034
8035 if (kIsDebugBuild) {
8036 // The method array references for superclass interfaces have been copied.
8037 // We shall allocate new arrays if needed (copy-on-write) in `FinalizeIfTable()`.
8038 ObjPtr<mirror::IfTable> super_iftable = klass_->GetSuperClass()->GetIfTable();
8039 for (size_t i = 0; i != super_ifcount; ++i) {
8040 CHECK(iftable->GetInterface(i) == super_iftable->GetInterface(i));
8041 CHECK(iftable->GetMethodArrayOrNull(i) == super_iftable->GetMethodArrayOrNull(i));
8042 }
8043 }
8044
8045 for (size_t i = super_ifcount; i < ifcount; ++i) {
8046 size_t num_methods = iftable->GetInterface(i)->NumDeclaredVirtualMethods();
8047 if (num_methods > 0) {
8048 ObjPtr<mirror::PointerArray> method_array = AllocPointerArray(self, num_methods);
8049 if (UNLIKELY(method_array == nullptr)) {
8050 self->AssertPendingOOMException();
8051 return false;
8052 }
8053 iftable->SetMethodArray(i, method_array);
8054 }
8055 }
8056 return true;
8057}
8058
8059template <PointerSize kPointerSize>
Vladimir Markobed84ef2022-01-21 13:57:14 +00008060size_t ClassLinker::LinkMethodsHelper<kPointerSize>::AssignVTableIndexes(
Vladimir Marko19366b82022-01-18 10:41:28 +00008061 ObjPtr<mirror::Class> klass,
8062 ObjPtr<mirror::Class> super_class,
Vladimir Marko51718132022-02-07 16:31:08 +00008063 bool is_super_abstract,
Vladimir Marko19366b82022-01-18 10:41:28 +00008064 size_t num_virtual_methods,
8065 ObjPtr<mirror::IfTable> iftable) {
Vladimir Marko8670e042021-12-21 17:55:48 +00008066 DCHECK(!klass->IsInterface());
8067 DCHECK(klass->HasSuperClass());
8068 DCHECK(klass->GetSuperClass() == super_class);
Vladimir Marko8670e042021-12-21 17:55:48 +00008069
8070 // There should be no thread suspension unless we want to throw an exception.
Vladimir Marko7ddae992022-01-18 14:27:20 +00008071 // (We are using `ObjPtr<>` and raw vtable pointers that are invalidated by thread suspension.)
Vladimir Marko8670e042021-12-21 17:55:48 +00008072 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8073
8074 // Prepare a hash table with virtual methods from the superclass.
8075 // For the unlikely cases that there are multiple methods with the same signature
8076 // but different vtable indexes, keep an array with indexes of the previous
8077 // methods with the same signature (walked as singly-linked lists).
Vladimir Marko7ddae992022-01-18 14:27:20 +00008078 uint8_t* raw_super_vtable;
8079 size_t super_vtable_length;
Vladimir Marko51718132022-02-07 16:31:08 +00008080 if (is_super_abstract) {
8081 DCHECK(!super_class->ShouldHaveEmbeddedVTable());
Vladimir Marko7ddae992022-01-18 14:27:20 +00008082 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTableDuringLinking();
8083 DCHECK(super_vtable != nullptr);
8084 raw_super_vtable = reinterpret_cast<uint8_t*>(super_vtable.Ptr()) +
8085 mirror::Array::DataOffset(static_cast<size_t>(kPointerSize)).Uint32Value();
8086 super_vtable_length = super_vtable->GetLength();
Vladimir Marko51718132022-02-07 16:31:08 +00008087 } else {
8088 DCHECK(super_class->ShouldHaveEmbeddedVTable());
8089 raw_super_vtable = reinterpret_cast<uint8_t*>(super_class.Ptr()) +
8090 mirror::Class::EmbeddedVTableOffset(kPointerSize).Uint32Value();
8091 super_vtable_length = super_class->GetEmbeddedVTableLength();
Vladimir Marko7ddae992022-01-18 14:27:20 +00008092 }
8093 VTableAccessor super_vtable_accessor(raw_super_vtable, super_vtable_length);
Vladimir Marko8670e042021-12-21 17:55:48 +00008094 static constexpr double kMinLoadFactor = 0.3;
8095 static constexpr double kMaxLoadFactor = 0.5;
Vladimir Marko19366b82022-01-18 10:41:28 +00008096 static constexpr size_t kMaxStackBuferSize = 256;
Vladimir Marko19366b82022-01-18 10:41:28 +00008097 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008098 const size_t super_vtable_buffer_size = super_vtable_length * 3;
8099 const size_t bit_vector_size = BitVector::BitsToWords(num_virtual_methods);
8100 const size_t total_size =
8101 declared_virtuals_buffer_size + super_vtable_buffer_size + bit_vector_size;
Vladimir Marko8670e042021-12-21 17:55:48 +00008102
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008103 uint32_t* declared_virtuals_buffer_ptr = (total_size <= kMaxStackBuferSize)
8104 ? reinterpret_cast<uint32_t*>(alloca(total_size * sizeof(uint32_t)))
8105 : allocator_.AllocArray<uint32_t>(total_size);
8106 uint32_t* bit_vector_buffer_ptr = declared_virtuals_buffer_ptr + declared_virtuals_buffer_size;
8107
Vladimir Marko19366b82022-01-18 10:41:28 +00008108 DeclaredVirtualSignatureSet declared_virtual_signatures(
8109 kMinLoadFactor,
8110 kMaxLoadFactor,
8111 DeclaredVirtualSignatureHash(klass),
8112 DeclaredVirtualSignatureEqual(klass),
8113 declared_virtuals_buffer_ptr,
8114 declared_virtuals_buffer_size,
8115 allocator_.Adapter());
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008116
8117 ArrayRef<uint32_t> same_signature_vtable_lists;
Vladimir Marko8670e042021-12-21 17:55:48 +00008118 const bool is_proxy_class = klass->IsProxyClass();
8119 size_t vtable_length = super_vtable_length;
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008120
8121 // Record which declared methods are overriding a super method.
8122 BitVector initialized_methods(/* expandable= */ false,
8123 Allocator::GetNoopAllocator(),
8124 bit_vector_size,
8125 bit_vector_buffer_ptr);
8126
8127 // Note: our sets hash on the method name, and therefore we pay a high
8128 // performance price when a class has many overloads.
8129 //
8130 // We populate a set of declared signatures instead of signatures from the
8131 // super vtable (which is only lazy populated in case of interface overriding,
8132 // see below). This makes sure that we pay the performance price only on that
8133 // class, and not on its subclasses (except in the case of interface overriding, see below).
Vladimir Marko19366b82022-01-18 10:41:28 +00008134 for (size_t i = 0; i != num_virtual_methods; ++i) {
Vladimir Marko8670e042021-12-21 17:55:48 +00008135 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
Vladimir Marko19366b82022-01-18 10:41:28 +00008136 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
Vladimir Marko8670e042021-12-21 17:55:48 +00008137 ArtMethod* signature_method = UNLIKELY(is_proxy_class)
8138 ? virtual_method->GetInterfaceMethodForProxyUnchecked(kPointerSize)
8139 : virtual_method;
8140 size_t hash = ComputeMethodHash(signature_method);
Vladimir Marko19366b82022-01-18 10:41:28 +00008141 declared_virtual_signatures.PutWithHash(i, hash);
Vladimir Marko8670e042021-12-21 17:55:48 +00008142 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008143
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008144 // Loop through each super vtable method and see if they are overridden by a method we added to
8145 // the hash table.
8146 for (size_t j = 0; j < super_vtable_length; ++j) {
8147 // Search the hash table to see if we are overridden by any method.
8148 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(j);
8149 if (!klass->CanAccessMember(super_method->GetDeclaringClass(),
8150 super_method->GetAccessFlags())) {
8151 // Continue on to the next method since this one is package private and cannot be overridden.
8152 // Before Android 4.1, the package-private method super_method might have been incorrectly
8153 // overridden.
8154 continue;
8155 }
8156 size_t hash = (j < mirror::Object::kVTableLength)
8157 ? class_linker_->object_virtual_method_hashes_[j]
8158 : ComputeMethodHash(super_method);
8159 auto it = declared_virtual_signatures.FindWithHash(super_method, hash);
8160 if (it == declared_virtual_signatures.end()) {
8161 continue;
8162 }
8163 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it, kPointerSize);
8164 if (super_method->IsFinal()) {
8165 sants.reset();
8166 ThrowLinkageError(klass, "Method %s overrides final method in class %s",
8167 virtual_method->PrettyMethod().c_str(),
8168 super_method->GetDeclaringClassDescriptor());
8169 return 0u;
8170 }
8171 if (initialized_methods.IsBitSet(*it)) {
8172 // The method is overriding more than one method.
8173 // We record that information in a linked list to later set the method in the vtable
8174 // locations that are not the method index.
8175 if (same_signature_vtable_lists.empty()) {
8176 same_signature_vtable_lists = ArrayRef<uint32_t>(
8177 allocator_.AllocArray<uint32_t>(super_vtable_length), super_vtable_length);
8178 std::fill_n(same_signature_vtable_lists.data(), super_vtable_length, dex::kDexNoIndex);
8179 same_signature_vtable_lists_ = same_signature_vtable_lists;
8180 }
8181 same_signature_vtable_lists[j] = virtual_method->GetMethodIndexDuringLinking();
8182 } else {
8183 initialized_methods.SetBit(*it);
8184 }
8185
8186 // We arbitrarily set to the largest index. This is also expected when
8187 // iterating over the `same_signature_vtable_lists_`.
8188 virtual_method->SetMethodIndex(j);
8189 }
8190
8191 // Add the non-overridden methods at the end.
8192 for (size_t i = 0; i < num_virtual_methods; ++i) {
8193 if (!initialized_methods.IsBitSet(i)) {
8194 ArtMethod* local_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8195 local_method->SetMethodIndex(vtable_length);
8196 vtable_length++;
8197 }
8198 }
8199
8200 // A lazily constructed super vtable set, which we only populate in the less
8201 // common sittuation of a superclass implementing a method declared in an
8202 // interface this class inherits.
8203 // We still try to allocate the set on the stack as using the arena will have
8204 // a larger cost.
8205 uint32_t* super_vtable_buffer_ptr = bit_vector_buffer_ptr + bit_vector_size;
8206 VTableSignatureSet super_vtable_signatures(
8207 kMinLoadFactor,
8208 kMaxLoadFactor,
8209 VTableSignatureHash(super_vtable_accessor),
8210 VTableSignatureEqual(super_vtable_accessor),
8211 super_vtable_buffer_ptr,
8212 super_vtable_buffer_size,
8213 allocator_.Adapter());
8214
Vladimir Marko19366b82022-01-18 10:41:28 +00008215 // Assign vtable indexes for interface methods in new interfaces and store them
8216 // in implementation method arrays. These shall be replaced by actual method
8217 // pointers later. We do not need to do this for superclass interfaces as we can
8218 // get these vtable indexes from implementation methods in superclass iftable.
8219 // Record data for copied methods which shall be referenced by the vtable.
8220 const size_t ifcount = iftable->Count();
8221 ObjPtr<mirror::IfTable> super_iftable = super_class->GetIfTable();
8222 const size_t super_ifcount = super_iftable->Count();
8223 for (size_t i = ifcount; i != super_ifcount; ) {
8224 --i;
8225 DCHECK_LT(i, ifcount);
8226 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8227 ObjPtr<mirror::PointerArray> method_array = iftable->GetMethodArrayOrNull(i);
8228 size_t num_methods = (method_array != nullptr) ? method_array->GetLength() : 0u;
8229 for (size_t j = 0; j != num_methods; ++j) {
8230 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8231 size_t hash = ComputeMethodHash(interface_method);
8232 ArtMethod* vtable_method = nullptr;
Vladimir Marko19366b82022-01-18 10:41:28 +00008233 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8234 if (it1 != declared_virtual_signatures.end()) {
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008235 ArtMethod* found_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8236 // For interface overriding, we only look at public methods.
8237 if (found_method->IsPublic()) {
8238 vtable_method = found_method;
8239 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008240 } else {
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008241 // This situation should be rare (a superclass implements a method
8242 // declared in an interface this class is inheriting). Only in this case
8243 // do we lazily populate the super_vtable_signatures.
8244 if (super_vtable_signatures.empty()) {
8245 for (size_t k = 0; k < super_vtable_length; ++k) {
8246 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(k);
8247 if (!super_method->IsPublic()) {
8248 // For interface overriding, we only look at public methods.
8249 continue;
8250 }
8251 size_t super_hash = (k < mirror::Object::kVTableLength)
8252 ? class_linker_->object_virtual_method_hashes_[k]
8253 : ComputeMethodHash(super_method);
8254 auto [it, inserted] = super_vtable_signatures.InsertWithHash(k, super_hash);
8255 DCHECK(inserted || super_vtable_accessor.GetVTableEntry(*it) == super_method);
8256 }
8257 }
Nicolas Geoffray868e5762022-06-08 15:00:06 +01008258 auto it2 = super_vtable_signatures.FindWithHash(interface_method, hash);
8259 if (it2 != super_vtable_signatures.end()) {
8260 vtable_method = super_vtable_accessor.GetVTableEntry(*it2);
Vladimir Marko19366b82022-01-18 10:41:28 +00008261 }
8262 }
Nicolas Geoffrayd88c1492022-07-06 10:01:58 +01008263
Vladimir Marko19366b82022-01-18 10:41:28 +00008264 uint32_t vtable_index = vtable_length;
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008265 if (vtable_method != nullptr) {
Vladimir Marko19366b82022-01-18 10:41:28 +00008266 vtable_index = vtable_method->GetMethodIndexDuringLinking();
8267 if (!vtable_method->IsOverridableByDefaultMethod()) {
8268 method_array->SetElementPtrSize(j, vtable_index, kPointerSize);
8269 continue;
8270 }
8271 }
8272
8273 auto [it, inserted] = copied_method_records_.InsertWithHash(
8274 CopiedMethodRecord(interface_method, vtable_index), hash);
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008275 if (vtable_method != nullptr) {
Vladimir Marko19366b82022-01-18 10:41:28 +00008276 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8277 } else if (inserted) {
8278 DCHECK_EQ(vtable_index, it->GetMethodIndex());
8279 DCHECK_EQ(vtable_index, vtable_length);
8280 ++vtable_length;
8281 } else {
8282 vtable_index = it->GetMethodIndex();
8283 }
8284 method_array->SetElementPtrSize(j, it->GetMethodIndex(), kPointerSize);
8285 if (inserted) {
8286 it->SetState(interface_method->IsAbstract() ? CopiedMethodRecord::State::kAbstractSingle
8287 : CopiedMethodRecord::State::kDefaultSingle);
8288 } else {
8289 it->UpdateState(iface, interface_method, vtable_index, iftable, ifcount, i);
8290 }
8291 }
8292 }
8293 // Finalize copied method records and check if we can reuse some methods from superclass vtable.
8294 size_t num_new_copied_methods = copied_method_records_.size();
8295 for (CopiedMethodRecord& record : copied_method_records_) {
8296 uint32_t vtable_index = record.GetMethodIndex();
8297 if (vtable_index < super_vtable_length) {
8298 ArtMethod* super_method = super_vtable_accessor.GetVTableEntry(record.GetMethodIndex());
8299 DCHECK(super_method->IsOverridableByDefaultMethod());
8300 record.FinalizeState(
8301 super_method, vtable_index, iftable, ifcount, super_iftable, super_ifcount);
8302 if (record.GetState() == CopiedMethodRecord::State::kUseSuperMethod) {
8303 --num_new_copied_methods;
8304 }
8305 }
8306 }
8307 num_new_copied_methods_ = num_new_copied_methods;
8308
Vladimir Marko8670e042021-12-21 17:55:48 +00008309 if (UNLIKELY(!IsUint<16>(vtable_length))) {
8310 sants.reset();
8311 ThrowClassFormatError(klass, "Too many methods defined on class: %zd", vtable_length);
8312 return 0u;
8313 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008314
Vladimir Marko8670e042021-12-21 17:55:48 +00008315 return vtable_length;
8316}
8317
8318template <PointerSize kPointerSize>
Vladimir Marko19366b82022-01-18 10:41:28 +00008319bool ClassLinker::LinkMethodsHelper<kPointerSize>::FindCopiedMethodsForInterface(
8320 ObjPtr<mirror::Class> klass,
8321 size_t num_virtual_methods,
8322 ObjPtr<mirror::IfTable> iftable) {
8323 DCHECK(klass->IsInterface());
8324 DCHECK(klass->HasSuperClass());
8325 DCHECK(klass->GetSuperClass()->IsObjectClass());
8326 DCHECK_EQ(klass->GetSuperClass()->GetIfTableCount(), 0);
8327
8328 // There should be no thread suspension unless we want to throw an exception.
8329 // (We are using `ObjPtr<>`s that are invalidated by thread suspension.)
8330 std::optional<ScopedAssertNoThreadSuspension> sants(__FUNCTION__);
8331
8332 // Prepare a `HashSet<>` with the declared virtual methods. These mask any methods
8333 // from superinterfaces, so we can filter out matching superinterface methods.
8334 static constexpr double kMinLoadFactor = 0.3;
8335 static constexpr double kMaxLoadFactor = 0.5;
8336 static constexpr size_t kMaxStackBuferSize = 256;
8337 const size_t declared_virtuals_buffer_size = num_virtual_methods * 3;
8338 uint32_t* declared_virtuals_buffer_ptr = (declared_virtuals_buffer_size <= kMaxStackBuferSize)
8339 ? reinterpret_cast<uint32_t*>(alloca(declared_virtuals_buffer_size * sizeof(uint32_t)))
8340 : allocator_.AllocArray<uint32_t>(declared_virtuals_buffer_size);
8341 DeclaredVirtualSignatureSet declared_virtual_signatures(
8342 kMinLoadFactor,
8343 kMaxLoadFactor,
8344 DeclaredVirtualSignatureHash(klass),
8345 DeclaredVirtualSignatureEqual(klass),
8346 declared_virtuals_buffer_ptr,
8347 declared_virtuals_buffer_size,
8348 allocator_.Adapter());
8349 for (size_t i = 0; i != num_virtual_methods; ++i) {
8350 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8351 DCHECK(!virtual_method->IsStatic()) << virtual_method->PrettyMethod();
8352 size_t hash = ComputeMethodHash(virtual_method);
8353 declared_virtual_signatures.PutWithHash(i, hash);
8354 }
8355
8356 // We do not create miranda methods for interface classes, so we do not need to track
8357 // non-default (abstract) interface methods. The downside is that we cannot use the
8358 // optimized code paths with `CopiedMethodRecord::State::kDefaultSingle` and since
8359 // we do not fill method arrays for interfaces, the method search actually has to
8360 // compare signatures instead of searching for the implementing method.
8361 const size_t ifcount = iftable->Count();
8362 size_t new_method_index = num_virtual_methods;
8363 for (size_t i = ifcount; i != 0u; ) {
8364 --i;
8365 DCHECK_LT(i, ifcount);
8366 ObjPtr<mirror::Class> iface = iftable->GetInterface(i);
8367 if (!iface->HasDefaultMethods()) {
8368 continue; // No default methods to process.
8369 }
8370 size_t num_methods = iface->NumDeclaredVirtualMethods();
8371 for (size_t j = 0; j != num_methods; ++j) {
8372 ArtMethod* interface_method = iface->GetVirtualMethod(j, kPointerSize);
8373 if (!interface_method->IsDefault()) {
8374 continue; // Do not process this non-default method.
8375 }
8376 size_t hash = ComputeMethodHash(interface_method);
8377 auto it1 = declared_virtual_signatures.FindWithHash(interface_method, hash);
8378 if (it1 != declared_virtual_signatures.end()) {
8379 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(*it1, kPointerSize);
8380 if (!virtual_method->IsAbstract() && !virtual_method->IsPublic()) {
8381 sants.reset();
8382 ThrowIllegalAccessErrorForImplementingMethod(klass, virtual_method, interface_method);
8383 return false;
8384 }
8385 continue; // This default method is masked by a method declared in this interface.
8386 }
8387
8388 CopiedMethodRecord new_record(interface_method, new_method_index);
8389 auto it = copied_method_records_.FindWithHash(new_record, hash);
8390 if (it == copied_method_records_.end()) {
8391 // Pretend that there is another default method and try to update the state.
8392 // If the `interface_method` is not masked, the state shall change to
8393 // `kDefaultConflict`; if it is masked, the state remains `kDefault`.
8394 new_record.SetState(CopiedMethodRecord::State::kDefault);
8395 new_record.UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8396 if (new_record.GetState() == CopiedMethodRecord::State::kDefaultConflict) {
8397 // Insert the new record with the state `kDefault`.
8398 new_record.SetState(CopiedMethodRecord::State::kDefault);
8399 copied_method_records_.PutWithHash(new_record, hash);
8400 DCHECK_EQ(new_method_index, new_record.GetMethodIndex());
8401 ++new_method_index;
8402 }
8403 } else {
8404 it->UpdateStateForInterface(iface, interface_method, iftable, ifcount, i);
8405 }
8406 }
8407 }
8408
8409 // Prune records without conflict. (Method indexes are updated in `ReallocMethods()`.)
8410 // We do not copy normal default methods to subinterfaces, instead we find the
8411 // default method with `Class::FindVirtualMethodForInterfaceSuper()` when needed.
8412 size_t num_new_copied_methods = copied_method_records_.size();
8413 for (CopiedMethodRecord& record : copied_method_records_) {
8414 if (record.GetState() != CopiedMethodRecord::State::kDefaultConflict) {
8415 DCHECK(record.GetState() == CopiedMethodRecord::State::kDefault);
8416 record.SetState(CopiedMethodRecord::State::kUseSuperMethod);
8417 --num_new_copied_methods;
8418 }
8419 }
8420 num_new_copied_methods_ = num_new_copied_methods;
8421
8422 return true;
8423}
8424
8425
8426template <PointerSize kPointerSize>
Vladimir Markob91402f2021-12-21 15:55:06 +00008427FLATTEN
Vladimir Marko78f62d82022-01-10 16:25:19 +00008428bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkMethods(
Vladimir Markobc893672021-11-10 15:25:46 +00008429 Thread* self,
Vladimir Marko78f62d82022-01-10 16:25:19 +00008430 Handle<mirror::Class> klass,
8431 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8432 bool* out_new_conflict,
8433 ArtMethod** out_imt) {
Vladimir Markobc893672021-11-10 15:25:46 +00008434 const size_t num_virtual_methods = klass->NumVirtualMethods();
8435 if (klass->IsInterface()) {
8436 // No vtable.
8437 if (!IsUint<16>(num_virtual_methods)) {
8438 ThrowClassFormatError(klass.Get(), "Too many methods on interface: %zu", num_virtual_methods);
8439 return false;
8440 }
Vladimir Markoff8f8c42022-09-19 17:18:24 +00008441 // Assign each method an interface table index and set the default flag.
Vladimir Markobc893672021-11-10 15:25:46 +00008442 bool has_defaults = false;
Vladimir Markobc893672021-11-10 15:25:46 +00008443 for (size_t i = 0; i < num_virtual_methods; ++i) {
Vladimir Markob91402f2021-12-21 15:55:06 +00008444 ArtMethod* m = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
Vladimir Markobc893672021-11-10 15:25:46 +00008445 m->SetMethodIndex(i);
Vladimir Markoff8f8c42022-09-19 17:18:24 +00008446 uint32_t access_flags = m->GetAccessFlags();
8447 DCHECK(!ArtMethod::IsDefault(access_flags));
8448 DCHECK_EQ(!ArtMethod::IsAbstract(access_flags), ArtMethod::IsInvokable(access_flags));
8449 if (ArtMethod::IsInvokable(access_flags)) {
Vladimir Markobc893672021-11-10 15:25:46 +00008450 // If the dex file does not support default methods, throw ClassFormatError.
8451 // This check is necessary to protect from odd cases, such as native default
8452 // methods, that the dex file verifier permits for old dex file versions. b/157170505
8453 // FIXME: This should be `if (!m->GetDexFile()->SupportsDefaultMethods())` but we're
8454 // currently running CTS tests for default methods with dex file version 035 which
8455 // does not support default methods. So, we limit this to native methods. b/157718952
Vladimir Markoff8f8c42022-09-19 17:18:24 +00008456 if (ArtMethod::IsNative(access_flags)) {
Vladimir Markobc893672021-11-10 15:25:46 +00008457 DCHECK(!m->GetDexFile()->SupportsDefaultMethods());
8458 ThrowClassFormatError(klass.Get(),
8459 "Dex file does not support default method '%s'",
8460 m->PrettyMethod().c_str());
8461 return false;
8462 }
Vladimir Markoff8f8c42022-09-19 17:18:24 +00008463 if (!ArtMethod::IsPublic(access_flags)) {
Vladimir Markofd0aec42022-01-27 14:27:24 +00008464 // The verifier should have caught the non-public method for dex version 37.
8465 // Just warn and skip it since this is from before default-methods so we don't
8466 // really need to care that it has code.
8467 LOG(WARNING) << "Default interface method " << m->PrettyMethod() << " is not public! "
8468 << "This will be a fatal error in subsequent versions of android. "
8469 << "Continuing anyway.";
8470 }
Vladimir Markoff8f8c42022-09-19 17:18:24 +00008471 m->SetAccessFlags(access_flags | kAccDefault);
Vladimir Markobc893672021-11-10 15:25:46 +00008472 has_defaults = true;
8473 }
8474 }
8475 // Mark that we have default methods so that we won't need to scan the virtual_methods_ array
8476 // during initialization. This is a performance optimization. We could simply traverse the
8477 // virtual_methods_ array again during initialization.
8478 if (has_defaults) {
8479 klass->SetHasDefaultMethods();
8480 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00008481 ObjPtr<mirror::IfTable> iftable = SetupInterfaceLookupTable(
8482 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass));
8483 if (UNLIKELY(iftable == nullptr)) {
8484 self->AssertPendingException();
8485 return false;
8486 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008487 size_t ifcount = iftable->Count();
8488 bool have_super_with_defaults = false;
8489 for (size_t i = 0; i != ifcount; ++i) {
8490 if (iftable->GetInterface(i)->HasDefaultMethods()) {
8491 have_super_with_defaults = true;
8492 break;
8493 }
8494 }
8495 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8496 if (have_super_with_defaults) {
8497 if (!FindCopiedMethodsForInterface(klass.Get(), num_virtual_methods, iftable)) {
8498 self->AssertPendingException();
8499 return false;
8500 }
8501 if (num_new_copied_methods_ != 0u) {
8502 // Re-check the number of methods.
8503 size_t final_num_virtual_methods = num_virtual_methods + num_new_copied_methods_;
8504 if (!IsUint<16>(final_num_virtual_methods)) {
8505 ThrowClassFormatError(
8506 klass.Get(), "Too many methods on interface: %zu", final_num_virtual_methods);
8507 return false;
8508 }
8509 ReallocMethods(klass.Get());
8510 }
8511 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00008512 klass->SetIfTable(iftable);
Vladimir Marko19366b82022-01-18 10:41:28 +00008513 if (kIsDebugBuild) {
8514 // May cause thread suspension, so do this after we're done with `ObjPtr<> iftable`.
8515 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8516 }
8517 return true;
Vladimir Markob91402f2021-12-21 15:55:06 +00008518 } else if (LIKELY(klass->HasSuperClass())) {
Vladimir Marko78f62d82022-01-10 16:25:19 +00008519 // We set up the interface lookup table now because we need it to determine if we need
8520 // to update any vtable entries with new default method implementations.
Vladimir Markobc893672021-11-10 15:25:46 +00008521 StackHandleScope<3> hs(self);
Vladimir Marko0441d202022-02-18 13:55:15 +00008522 MutableHandle<mirror::IfTable> iftable = hs.NewHandle(UNLIKELY(klass->IsProxyClass())
Vladimir Marko78f62d82022-01-10 16:25:19 +00008523 ? SetupInterfaceLookupTable(self, klass, &allocator_, ProxyInterfacesAccessor(interfaces))
8524 : SetupInterfaceLookupTable(
8525 self, klass, &allocator_, NonProxyInterfacesAccessor(class_linker_, klass)));
8526 if (UNLIKELY(iftable == nullptr)) {
8527 self->AssertPendingException();
8528 return false;
8529 }
Vladimir Marko78f62d82022-01-10 16:25:19 +00008530
Vladimir Marko51718132022-02-07 16:31:08 +00008531 // Copy the IMT from superclass if present and needed. Update with new methods later.
Vladimir Marko0441d202022-02-18 13:55:15 +00008532 Handle<mirror::Class> super_class = hs.NewHandle(klass->GetSuperClass());
Vladimir Marko51718132022-02-07 16:31:08 +00008533 bool is_klass_abstract = klass->IsAbstract();
8534 bool is_super_abstract = super_class->IsAbstract();
8535 DCHECK_EQ(klass->ShouldHaveImt(), !is_klass_abstract);
8536 DCHECK_EQ(super_class->ShouldHaveImt(), !is_super_abstract);
8537 if (!is_klass_abstract && !is_super_abstract) {
8538 ImTable* super_imt = super_class->GetImt(kPointerSize);
8539 for (size_t i = 0; i < ImTable::kSize; ++i) {
8540 out_imt[i] = super_imt->Get(i, kPointerSize);
8541 }
8542 }
8543
8544 // If there are no new virtual methods and no new interfaces, we can simply reuse
8545 // the vtable from superclass. We may need to make a copy if it's embedded.
Vladimir Marko0441d202022-02-18 13:55:15 +00008546 const size_t super_vtable_length = super_class->GetVTableLength();
Vladimir Marko0f71b192022-02-02 17:20:12 +00008547 if (num_virtual_methods == 0 && iftable.Get() == super_class->GetIfTable()) {
Vladimir Marko51718132022-02-07 16:31:08 +00008548 DCHECK_EQ(is_super_abstract, !super_class->ShouldHaveEmbeddedVTable());
8549 if (is_super_abstract) {
8550 DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
8551 ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
8552 CHECK(super_vtable != nullptr) << super_class->PrettyClass();
8553 klass->SetVTable(super_vtable);
8554 // No IMT in the super class, we need to reconstruct it from the iftable.
8555 if (!is_klass_abstract && iftable->Count() != 0) {
8556 class_linker_->FillIMTFromIfTable(iftable.Get(),
8557 runtime_->GetImtUnimplementedMethod(),
8558 runtime_->GetImtConflictMethod(),
8559 klass.Get(),
8560 /*create_conflict_tables=*/false,
8561 /*ignore_copied_methods=*/false,
8562 out_new_conflict,
8563 out_imt);
8564 }
8565 } else {
Vladimir Marko0441d202022-02-18 13:55:15 +00008566 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, super_vtable_length);
Vladimir Marko8670e042021-12-21 17:55:48 +00008567 if (UNLIKELY(vtable == nullptr)) {
8568 self->AssertPendingOOMException();
Vladimir Markobc893672021-11-10 15:25:46 +00008569 return false;
8570 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008571 for (size_t i = 0; i < super_vtable_length; i++) {
8572 vtable->SetElementPtrSize(
8573 i, super_class->GetEmbeddedVTableEntry(i, kPointerSize), kPointerSize);
8574 }
8575 klass->SetVTable(vtable);
Vladimir Marko51718132022-02-07 16:31:08 +00008576 // The IMT was already copied from superclass if `klass` is not abstract.
Vladimir Marko8670e042021-12-21 17:55:48 +00008577 }
Vladimir Markobed84ef2022-01-21 13:57:14 +00008578 klass->SetIfTable(iftable.Get());
Vladimir Marko8670e042021-12-21 17:55:48 +00008579 return true;
8580 }
8581
Vladimir Markobed84ef2022-01-21 13:57:14 +00008582 // Allocate method arrays, so that we can link interface methods without thread suspension,
8583 // otherwise GC could miss visiting newly allocated copied methods.
8584 // TODO: Do not allocate copied methods during linking, store only records about what
8585 // we need to allocate and allocate it at the end. Start with superclass iftable and
8586 // perform copy-on-write when needed to facilitate maximum memory sharing.
Vladimir Marko0441d202022-02-18 13:55:15 +00008587 if (!AllocateIfTableMethodArrays(self, klass, iftable)) {
8588 self->AssertPendingOOMException();
Vladimir Markobed84ef2022-01-21 13:57:14 +00008589 return false;
8590 }
8591
Vladimir Marko51718132022-02-07 16:31:08 +00008592 size_t final_vtable_size = AssignVTableIndexes(
8593 klass.Get(), super_class.Get(), is_super_abstract, num_virtual_methods, iftable.Get());
Vladimir Marko8670e042021-12-21 17:55:48 +00008594 if (final_vtable_size == 0u) {
8595 self->AssertPendingException();
8596 return false;
8597 }
8598 DCHECK(IsUint<16>(final_vtable_size));
8599
8600 // Allocate the new vtable.
Vladimir Marko0441d202022-02-18 13:55:15 +00008601 Handle<mirror::PointerArray> vtable = hs.NewHandle(AllocPointerArray(self, final_vtable_size));
Vladimir Marko8670e042021-12-21 17:55:48 +00008602 if (UNLIKELY(vtable == nullptr)) {
8603 self->AssertPendingOOMException();
8604 return false;
8605 }
8606
Vladimir Marko19366b82022-01-18 10:41:28 +00008607 LengthPrefixedArray<ArtMethod>* old_methods = kIsDebugBuild ? klass->GetMethodsPtr() : nullptr;
8608 if (num_new_copied_methods_ != 0u) {
8609 ReallocMethods(klass.Get());
8610 }
8611
Vladimir Marko8670e042021-12-21 17:55:48 +00008612 // Store new virtual methods in the new vtable.
Vladimir Markod5d11d92021-02-02 16:24:25 +00008613 ArrayRef<uint32_t> same_signature_vtable_lists = same_signature_vtable_lists_;
Vladimir Marko8670e042021-12-21 17:55:48 +00008614 for (ArtMethod& virtual_method : klass->GetVirtualMethodsSliceUnchecked(kPointerSize)) {
Vladimir Markod5d11d92021-02-02 16:24:25 +00008615 uint32_t vtable_index = virtual_method.GetMethodIndexDuringLinking();
Vladimir Marko8670e042021-12-21 17:55:48 +00008616 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
Vladimir Markod5d11d92021-02-02 16:24:25 +00008617 if (UNLIKELY(vtable_index < same_signature_vtable_lists.size())) {
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008618 // We may override more than one method according to JLS, see b/211854716.
Vladimir Markod5d11d92021-02-02 16:24:25 +00008619 while (same_signature_vtable_lists[vtable_index] != dex::kDexNoIndex) {
8620 DCHECK_LT(same_signature_vtable_lists[vtable_index], vtable_index);
8621 vtable_index = same_signature_vtable_lists[vtable_index];
Nicolas Geoffrayef0b9c42022-07-04 16:24:06 +00008622 vtable->SetElementPtrSize(vtable_index, &virtual_method, kPointerSize);
8623 if (kIsDebugBuild) {
8624 ArtMethod* current_method = super_class->GetVTableEntry(vtable_index, kPointerSize);
8625 DCHECK(klass->CanAccessMember(current_method->GetDeclaringClass(),
8626 current_method->GetAccessFlags()));
Vladimir Markod5d11d92021-02-02 16:24:25 +00008627 DCHECK(!current_method->IsFinal());
Vladimir Markod5d11d92021-02-02 16:24:25 +00008628 }
8629 }
8630 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008631 }
8632
8633 // For non-overridden vtable slots, copy a method from `super_class`.
8634 for (size_t j = 0; j != super_vtable_length; ++j) {
Vladimir Marko19366b82022-01-18 10:41:28 +00008635 if (vtable->GetElementPtrSize<ArtMethod*, kPointerSize>(j) == nullptr) {
8636 ArtMethod* super_method = super_class->GetVTableEntry(j, kPointerSize);
8637 vtable->SetElementPtrSize(j, super_method, kPointerSize);
Vladimir Markobc893672021-11-10 15:25:46 +00008638 }
8639 }
Vladimir Marko8670e042021-12-21 17:55:48 +00008640
Vladimir Marko51718132022-02-07 16:31:08 +00008641 // Update the `iftable` (and IMT) with finalized virtual methods.
Vladimir Marko0441d202022-02-18 13:55:15 +00008642 if (!FinalizeIfTable(klass,
8643 iftable,
8644 vtable,
8645 is_klass_abstract,
8646 is_super_abstract,
8647 out_new_conflict,
8648 out_imt)) {
8649 self->AssertPendingOOMException();
8650 return false;
8651 }
Vladimir Marko19366b82022-01-18 10:41:28 +00008652
Vladimir Markobc893672021-11-10 15:25:46 +00008653 klass->SetVTable(vtable.Get());
Vladimir Marko19366b82022-01-18 10:41:28 +00008654 klass->SetIfTable(iftable.Get());
8655 if (kIsDebugBuild) {
8656 CheckVTable(self, klass, kPointerSize);
8657 ClobberOldMethods(old_methods, klass->GetMethodsPtr());
8658 }
8659 return true;
Vladimir Markobc893672021-11-10 15:25:46 +00008660 } else {
Vladimir Marko78f62d82022-01-10 16:25:19 +00008661 return LinkJavaLangObjectMethods(self, klass);
Vladimir Markobc893672021-11-10 15:25:46 +00008662 }
Vladimir Markobc893672021-11-10 15:25:46 +00008663}
8664
Vladimir Markob91402f2021-12-21 15:55:06 +00008665template <PointerSize kPointerSize>
Vladimir Marko78f62d82022-01-10 16:25:19 +00008666bool ClassLinker::LinkMethodsHelper<kPointerSize>::LinkJavaLangObjectMethods(
Vladimir Markob91402f2021-12-21 15:55:06 +00008667 Thread* self,
8668 Handle<mirror::Class> klass) {
8669 DCHECK_EQ(klass.Get(), GetClassRoot<mirror::Object>(class_linker_));
8670 DCHECK_EQ(klass->NumVirtualMethods(), mirror::Object::kVTableLength);
8671 static_assert(IsUint<16>(mirror::Object::kVTableLength));
Vladimir Marko0441d202022-02-18 13:55:15 +00008672 ObjPtr<mirror::PointerArray> vtable = AllocPointerArray(self, mirror::Object::kVTableLength);
Vladimir Markob91402f2021-12-21 15:55:06 +00008673 if (UNLIKELY(vtable == nullptr)) {
8674 self->AssertPendingOOMException();
8675 return false;
8676 }
8677 for (size_t i = 0; i < mirror::Object::kVTableLength; ++i) {
8678 ArtMethod* virtual_method = klass->GetVirtualMethodDuringLinking(i, kPointerSize);
8679 vtable->SetElementPtrSize(i, virtual_method, kPointerSize);
8680 virtual_method->SetMethodIndex(i);
8681 }
8682 klass->SetVTable(vtable);
8683 InitializeObjectVirtualMethodHashes(
8684 klass.Get(),
8685 kPointerSize,
8686 ArrayRef<uint32_t>(class_linker_->object_virtual_method_hashes_));
Vladimir Marko78f62d82022-01-10 16:25:19 +00008687 // The interface table is already allocated but there are no interface methods to link.
8688 DCHECK(klass->GetIfTable() != nullptr);
8689 DCHECK_EQ(klass->GetIfTableCount(), 0);
Vladimir Markob91402f2021-12-21 15:55:06 +00008690 return true;
8691}
8692
Vladimir Markobc893672021-11-10 15:25:46 +00008693// Populate the class vtable and itable. Compute return type indices.
8694bool ClassLinker::LinkMethods(Thread* self,
8695 Handle<mirror::Class> klass,
8696 Handle<mirror::ObjectArray<mirror::Class>> interfaces,
8697 bool* out_new_conflict,
8698 ArtMethod** out_imt) {
8699 self->AllowThreadSuspension();
Vladimir Markobc893672021-11-10 15:25:46 +00008700 // Link virtual methods then interface methods.
8701 Runtime* const runtime = Runtime::Current();
Vladimir Markob91402f2021-12-21 15:55:06 +00008702 if (LIKELY(GetImagePointerSize() == kRuntimePointerSize)) {
8703 LinkMethodsHelper<kRuntimePointerSize> helper(this, klass, self, runtime);
Vladimir Marko78f62d82022-01-10 16:25:19 +00008704 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
Vladimir Markob91402f2021-12-21 15:55:06 +00008705 } else {
8706 constexpr PointerSize kOtherPointerSize =
8707 (kRuntimePointerSize == PointerSize::k64) ? PointerSize::k32 : PointerSize::k64;
8708 LinkMethodsHelper<kOtherPointerSize> helper(this, klass, self, runtime);
Vladimir Marko78f62d82022-01-10 16:25:19 +00008709 return helper.LinkMethods(self, klass, interfaces, out_new_conflict, out_imt);
Vladimir Markob91402f2021-12-21 15:55:06 +00008710 }
Vladimir Markobc893672021-11-10 15:25:46 +00008711}
8712
Vladimir Marko42bee502021-01-28 14:58:35 +00008713class ClassLinker::LinkFieldsHelper {
8714 public:
8715 static bool LinkFields(ClassLinker* class_linker,
8716 Thread* self,
8717 Handle<mirror::Class> klass,
8718 bool is_static,
8719 size_t* class_size)
8720 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom4873d462011-08-21 15:23:39 -07008721
Vladimir Marko42bee502021-01-28 14:58:35 +00008722 private:
8723 enum class FieldTypeOrder : uint16_t;
8724 class FieldGaps;
8725
8726 struct FieldTypeOrderAndIndex {
8727 FieldTypeOrder field_type_order;
8728 uint16_t field_index;
8729 };
8730
8731 static FieldTypeOrder FieldTypeOrderFromFirstDescriptorCharacter(char first_char);
8732
8733 template <size_t kSize>
8734 static MemberOffset AssignFieldOffset(ArtField* field, MemberOffset field_offset)
8735 REQUIRES_SHARED(Locks::mutator_lock_);
8736};
Brian Carlstrom4873d462011-08-21 15:23:39 -07008737
Vladimir Markoc7993d52021-01-27 15:20:56 +00008738// We use the following order of field types for assigning offsets.
Roland Levillain87553ec2022-07-15 17:49:30 +01008739// Some fields can be shuffled forward to fill gaps, see
8740// `ClassLinker::LinkFieldsHelper::LinkFields()`.
Vladimir Marko42bee502021-01-28 14:58:35 +00008741enum class ClassLinker::LinkFieldsHelper::FieldTypeOrder : uint16_t {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008742 kReference = 0u,
8743 kLong,
8744 kDouble,
8745 kInt,
8746 kFloat,
8747 kChar,
8748 kShort,
8749 kBoolean,
8750 kByte,
8751
8752 kLast64BitType = kDouble,
8753 kLast32BitType = kFloat,
8754 kLast16BitType = kShort,
Brian Carlstromdbc05252011-09-09 01:59:59 -07008755};
8756
Vladimir Markoc7993d52021-01-27 15:20:56 +00008757ALWAYS_INLINE
Vladimir Marko42bee502021-01-28 14:58:35 +00008758ClassLinker::LinkFieldsHelper::FieldTypeOrder
8759ClassLinker::LinkFieldsHelper::FieldTypeOrderFromFirstDescriptorCharacter(char first_char) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008760 switch (first_char) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008761 case 'J':
8762 return FieldTypeOrder::kLong;
8763 case 'D':
8764 return FieldTypeOrder::kDouble;
8765 case 'I':
8766 return FieldTypeOrder::kInt;
8767 case 'F':
8768 return FieldTypeOrder::kFloat;
8769 case 'C':
8770 return FieldTypeOrder::kChar;
8771 case 'S':
8772 return FieldTypeOrder::kShort;
8773 case 'Z':
8774 return FieldTypeOrder::kBoolean;
8775 case 'B':
8776 return FieldTypeOrder::kByte;
Vladimir Marko42bee502021-01-28 14:58:35 +00008777 default:
8778 DCHECK(first_char == 'L' || first_char == '[') << first_char;
8779 return FieldTypeOrder::kReference;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008780 }
8781}
8782
8783// Gaps where we can insert fields in object layout.
Vladimir Marko42bee502021-01-28 14:58:35 +00008784class ClassLinker::LinkFieldsHelper::FieldGaps {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008785 public:
8786 template <uint32_t kSize>
8787 ALWAYS_INLINE MemberOffset AlignFieldOffset(MemberOffset field_offset) {
8788 static_assert(kSize == 2u || kSize == 4u || kSize == 8u);
8789 if (!IsAligned<kSize>(field_offset.Uint32Value())) {
8790 uint32_t gap_start = field_offset.Uint32Value();
8791 field_offset = MemberOffset(RoundUp(gap_start, kSize));
8792 AddGaps<kSize - 1u>(gap_start, field_offset.Uint32Value());
8793 }
8794 return field_offset;
8795 }
8796
8797 template <uint32_t kSize>
8798 bool HasGap() const {
8799 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8800 return (kSize == 1u && gap1_offset_ != kNoOffset) ||
8801 (kSize <= 2u && gap2_offset_ != kNoOffset) ||
8802 gap4_offset_ != kNoOffset;
8803 }
8804
8805 template <uint32_t kSize>
8806 MemberOffset ReleaseGap() {
8807 static_assert(kSize == 1u || kSize == 2u || kSize == 4u);
8808 uint32_t result;
8809 if (kSize == 1u && gap1_offset_ != kNoOffset) {
8810 DCHECK(gap2_offset_ == kNoOffset || gap2_offset_ > gap1_offset_);
8811 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap1_offset_);
8812 result = gap1_offset_;
8813 gap1_offset_ = kNoOffset;
8814 } else if (kSize <= 2u && gap2_offset_ != kNoOffset) {
8815 DCHECK(gap4_offset_ == kNoOffset || gap4_offset_ > gap2_offset_);
8816 result = gap2_offset_;
8817 gap2_offset_ = kNoOffset;
8818 if (kSize < 2u) {
8819 AddGaps<1u>(result + kSize, result + 2u);
8820 }
8821 } else {
8822 DCHECK_NE(gap4_offset_, kNoOffset);
8823 result = gap4_offset_;
8824 gap4_offset_ = kNoOffset;
8825 if (kSize < 4u) {
8826 AddGaps<kSize | 2u>(result + kSize, result + 4u);
8827 }
8828 }
8829 return MemberOffset(result);
8830 }
8831
8832 private:
8833 template <uint32_t kGapsToCheck>
8834 void AddGaps(uint32_t gap_start, uint32_t gap_end) {
8835 if ((kGapsToCheck & 1u) != 0u) {
8836 DCHECK_LT(gap_start, gap_end);
8837 DCHECK_ALIGNED(gap_end, 2u);
8838 if ((gap_start & 1u) != 0u) {
8839 DCHECK_EQ(gap1_offset_, kNoOffset);
8840 gap1_offset_ = gap_start;
8841 gap_start += 1u;
8842 if (kGapsToCheck == 1u || gap_start == gap_end) {
8843 DCHECK_EQ(gap_start, gap_end);
8844 return;
8845 }
8846 }
8847 }
8848
8849 if ((kGapsToCheck & 2u) != 0u) {
8850 DCHECK_LT(gap_start, gap_end);
8851 DCHECK_ALIGNED(gap_start, 2u);
8852 DCHECK_ALIGNED(gap_end, 4u);
8853 if ((gap_start & 2u) != 0u) {
8854 DCHECK_EQ(gap2_offset_, kNoOffset);
8855 gap2_offset_ = gap_start;
8856 gap_start += 2u;
8857 if (kGapsToCheck <= 3u || gap_start == gap_end) {
8858 DCHECK_EQ(gap_start, gap_end);
8859 return;
8860 }
8861 }
8862 }
8863
8864 if ((kGapsToCheck & 4u) != 0u) {
8865 DCHECK_LT(gap_start, gap_end);
8866 DCHECK_ALIGNED(gap_start, 4u);
8867 DCHECK_ALIGNED(gap_end, 8u);
8868 DCHECK_EQ(gap_start + 4u, gap_end);
8869 DCHECK_EQ(gap4_offset_, kNoOffset);
8870 gap4_offset_ = gap_start;
8871 return;
8872 }
8873
8874 DCHECK(false) << "Remaining gap: " << gap_start << " to " << gap_end
8875 << " after checking " << kGapsToCheck;
8876 }
8877
8878 static constexpr uint32_t kNoOffset = static_cast<uint32_t>(-1);
8879
8880 uint32_t gap4_offset_ = kNoOffset;
8881 uint32_t gap2_offset_ = kNoOffset;
8882 uint32_t gap1_offset_ = kNoOffset;
8883};
8884
8885template <size_t kSize>
Vladimir Marko42bee502021-01-28 14:58:35 +00008886ALWAYS_INLINE
8887MemberOffset ClassLinker::LinkFieldsHelper::AssignFieldOffset(ArtField* field,
8888 MemberOffset field_offset) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00008889 DCHECK_ALIGNED(field_offset.Uint32Value(), kSize);
8890 DCHECK_EQ(Primitive::ComponentSize(field->GetTypeAsPrimitiveType()), kSize);
8891 field->SetOffset(field_offset);
8892 return MemberOffset(field_offset.Uint32Value() + kSize);
8893}
8894
Vladimir Marko42bee502021-01-28 14:58:35 +00008895bool ClassLinker::LinkFieldsHelper::LinkFields(ClassLinker* class_linker,
8896 Thread* self,
8897 Handle<mirror::Class> klass,
8898 bool is_static,
8899 size_t* class_size) {
Ian Rogers7b078e82014-09-10 14:44:24 -07008900 self->AllowThreadSuspension();
Mathieu Chartierc7853442015-03-27 14:35:38 -07008901 const size_t num_fields = is_static ? klass->NumStaticFields() : klass->NumInstanceFields();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07008902 LengthPrefixedArray<ArtField>* const fields = is_static ? klass->GetSFieldsPtr() :
8903 klass->GetIFieldsPtr();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008904
Mingyao Yang98d1cc82014-05-15 17:02:16 -07008905 // Initialize field_offset
Brian Carlstrom693267a2011-09-06 09:25:34 -07008906 MemberOffset field_offset(0);
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008907 if (is_static) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008908 field_offset = klass->GetFirstReferenceStaticFieldOffsetDuringLinking(
8909 class_linker->GetImagePointerSize());
Brian Carlstrom3320cf42011-10-04 14:58:28 -07008910 } else {
Mathieu Chartier28357fa2016-10-18 16:27:40 -07008911 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Andreas Gampe2ed8def2014-08-28 14:41:02 -07008912 if (super_class != nullptr) {
Brian Carlstromf3632832014-05-20 15:36:53 -07008913 CHECK(super_class->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07008914 << klass->PrettyClass() << " " << super_class->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008915 field_offset = MemberOffset(super_class->GetObjectSize());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008916 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008917 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008918
David Sehr709b0702016-10-13 09:12:37 -07008919 CHECK_EQ(num_fields == 0, fields == nullptr) << klass->PrettyClass();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07008920
Brian Carlstromdbc05252011-09-09 01:59:59 -07008921 // we want a relatively stable order so that adding new fields
Elliott Hughesadb460d2011-10-05 17:02:34 -07008922 // minimizes disruption of C++ version such as Class and Method.
Alex Lighte64300b2015-12-15 15:02:47 -08008923 //
8924 // The overall sort order order is:
8925 // 1) All object reference fields, sorted alphabetically.
8926 // 2) All java long (64-bit) integer fields, sorted alphabetically.
8927 // 3) All java double (64-bit) floating point fields, sorted alphabetically.
8928 // 4) All java int (32-bit) integer fields, sorted alphabetically.
8929 // 5) All java float (32-bit) floating point fields, sorted alphabetically.
8930 // 6) All java char (16-bit) integer fields, sorted alphabetically.
8931 // 7) All java short (16-bit) integer fields, sorted alphabetically.
8932 // 8) All java boolean (8-bit) integer fields, sorted alphabetically.
8933 // 9) All java byte (8-bit) integer fields, sorted alphabetically.
8934 //
Vladimir Marko42bee502021-01-28 14:58:35 +00008935 // (References are first to increase the chance of reference visiting
8936 // being able to take a fast path using a bitmap of references at the
8937 // start of the object, see `Class::reference_instance_offsets_`.)
8938 //
Vladimir Markoc7993d52021-01-27 15:20:56 +00008939 // Once the fields are sorted in this order we will attempt to fill any gaps
8940 // that might be present in the memory layout of the structure.
8941 // Note that we shall not fill gaps between the superclass fields.
8942
8943 // Collect fields and their "type order index" (see numbered points above).
Mathieu Chartier2d5f39e2014-09-19 17:52:37 -07008944 const char* old_no_suspend_cause = self->StartAssertNoThreadSuspension(
Vladimir Markoc7993d52021-01-27 15:20:56 +00008945 "Using plain ArtField references");
Vladimir Marko42bee502021-01-28 14:58:35 +00008946 constexpr size_t kStackBufferEntries = 64; // Avoid allocations for small number of fields.
8947 FieldTypeOrderAndIndex stack_buffer[kStackBufferEntries];
8948 std::vector<FieldTypeOrderAndIndex> heap_buffer;
8949 ArrayRef<FieldTypeOrderAndIndex> sorted_fields;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008950 if (num_fields <= kStackBufferEntries) {
Vladimir Marko42bee502021-01-28 14:58:35 +00008951 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(stack_buffer, num_fields);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008952 } else {
8953 heap_buffer.resize(num_fields);
Vladimir Marko42bee502021-01-28 14:58:35 +00008954 sorted_fields = ArrayRef<FieldTypeOrderAndIndex>(heap_buffer);
Brian Carlstromdbc05252011-09-09 01:59:59 -07008955 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07008956 size_t num_reference_fields = 0;
Vladimir Markoc7993d52021-01-27 15:20:56 +00008957 size_t primitive_fields_start = num_fields;
Vladimir Marko42bee502021-01-28 14:58:35 +00008958 DCHECK_LE(num_fields, 1u << 16);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008959 for (size_t i = 0; i != num_fields; ++i) {
8960 ArtField* field = &fields->At(i);
8961 const char* descriptor = field->GetTypeDescriptor();
Vladimir Marko42bee502021-01-28 14:58:35 +00008962 FieldTypeOrder field_type_order = FieldTypeOrderFromFirstDescriptorCharacter(descriptor[0]);
8963 uint16_t field_index = dchecked_integral_cast<uint16_t>(i);
Vladimir Markoc7993d52021-01-27 15:20:56 +00008964 // Insert references to the start, other fields to the end.
8965 DCHECK_LT(num_reference_fields, primitive_fields_start);
Vladimir Marko42bee502021-01-28 14:58:35 +00008966 if (field_type_order == FieldTypeOrder::kReference) {
8967 sorted_fields[num_reference_fields] = { field_type_order, field_index };
Vladimir Markoc7993d52021-01-27 15:20:56 +00008968 ++num_reference_fields;
8969 } else {
8970 --primitive_fields_start;
Vladimir Marko42bee502021-01-28 14:58:35 +00008971 sorted_fields[primitive_fields_start] = { field_type_order, field_index };
Vladimir Markoc7993d52021-01-27 15:20:56 +00008972 }
8973 }
8974 DCHECK_EQ(num_reference_fields, primitive_fields_start);
Fred Shih381e4ca2014-08-25 17:24:27 -07008975
Vladimir Marko42bee502021-01-28 14:58:35 +00008976 // Reference fields are already sorted by field index (and dex field index).
Vladimir Markoc7993d52021-01-27 15:20:56 +00008977 DCHECK(std::is_sorted(
8978 sorted_fields.begin(),
8979 sorted_fields.begin() + num_reference_fields,
Vladimir Marko42bee502021-01-28 14:58:35 +00008980 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8981 ArtField* lhs_field = &fields->At(lhs.field_index);
8982 ArtField* rhs_field = &fields->At(rhs.field_index);
8983 CHECK_EQ(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8984 CHECK_EQ(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8985 CHECK_EQ(lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex(),
8986 lhs.field_index < rhs.field_index);
8987 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
Vladimir Markoc7993d52021-01-27 15:20:56 +00008988 }));
Vladimir Marko42bee502021-01-28 14:58:35 +00008989 // Primitive fields were stored in reverse order of their field index (and dex field index).
Vladimir Markoc7993d52021-01-27 15:20:56 +00008990 DCHECK(std::is_sorted(
8991 sorted_fields.begin() + primitive_fields_start,
8992 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00008993 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
8994 ArtField* lhs_field = &fields->At(lhs.field_index);
8995 ArtField* rhs_field = &fields->At(rhs.field_index);
8996 CHECK_NE(lhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8997 CHECK_NE(rhs_field->GetTypeAsPrimitiveType(), Primitive::kPrimNot);
8998 CHECK_EQ(lhs_field->GetDexFieldIndex() > rhs_field->GetDexFieldIndex(),
8999 lhs.field_index > rhs.field_index);
9000 return lhs.field_index > rhs.field_index;
Vladimir Markoc7993d52021-01-27 15:20:56 +00009001 }));
9002 // Sort the primitive fields by the field type order, then field index.
9003 std::sort(sorted_fields.begin() + primitive_fields_start,
9004 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00009005 [](const auto& lhs, const auto& rhs) {
9006 if (lhs.field_type_order != rhs.field_type_order) {
9007 return lhs.field_type_order < rhs.field_type_order;
Vladimir Markoc7993d52021-01-27 15:20:56 +00009008 } else {
Vladimir Marko42bee502021-01-28 14:58:35 +00009009 return lhs.field_index < rhs.field_index;
Vladimir Markoc7993d52021-01-27 15:20:56 +00009010 }
9011 });
9012 // Primitive fields are now sorted by field size (descending), then type, then field index.
9013 DCHECK(std::is_sorted(
9014 sorted_fields.begin() + primitive_fields_start,
9015 sorted_fields.end(),
Vladimir Marko42bee502021-01-28 14:58:35 +00009016 [fields](const auto& lhs, const auto& rhs) REQUIRES_SHARED(Locks::mutator_lock_) {
9017 ArtField* lhs_field = &fields->At(lhs.field_index);
9018 ArtField* rhs_field = &fields->At(rhs.field_index);
9019 Primitive::Type lhs_type = lhs_field->GetTypeAsPrimitiveType();
Vladimir Markoc7993d52021-01-27 15:20:56 +00009020 CHECK_NE(lhs_type, Primitive::kPrimNot);
Vladimir Marko42bee502021-01-28 14:58:35 +00009021 Primitive::Type rhs_type = rhs_field->GetTypeAsPrimitiveType();
Vladimir Markoc7993d52021-01-27 15:20:56 +00009022 CHECK_NE(rhs_type, Primitive::kPrimNot);
9023 if (lhs_type != rhs_type) {
9024 size_t lhs_size = Primitive::ComponentSize(lhs_type);
9025 size_t rhs_size = Primitive::ComponentSize(rhs_type);
9026 return (lhs_size != rhs_size) ? (lhs_size > rhs_size) : (lhs_type < rhs_type);
9027 } else {
Vladimir Marko42bee502021-01-28 14:58:35 +00009028 return lhs_field->GetDexFieldIndex() < rhs_field->GetDexFieldIndex();
Vladimir Markoc7993d52021-01-27 15:20:56 +00009029 }
9030 }));
9031
9032 // Process reference fields.
9033 FieldGaps field_gaps;
9034 size_t index = 0u;
9035 if (num_reference_fields != 0u) {
9036 constexpr size_t kReferenceSize = sizeof(mirror::HeapReference<mirror::Object>);
9037 field_offset = field_gaps.AlignFieldOffset<kReferenceSize>(field_offset);
9038 for (; index != num_reference_fields; ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00009039 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009040 field_offset = AssignFieldOffset<kReferenceSize>(field, field_offset);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009041 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009042 }
Vladimir Markoc7993d52021-01-27 15:20:56 +00009043 // Process 64-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00009044 if (index != num_fields &&
9045 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00009046 field_offset = field_gaps.AlignFieldOffset<8u>(field_offset);
Vladimir Marko42bee502021-01-28 14:58:35 +00009047 while (index != num_fields &&
9048 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast64BitType) {
9049 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009050 field_offset = AssignFieldOffset<8u>(field, field_offset);
9051 ++index;
Mathieu Chartier55650292020-09-14 12:21:04 -07009052 }
Mathieu Chartier55650292020-09-14 12:21:04 -07009053 }
Vladimir Markoc7993d52021-01-27 15:20:56 +00009054 // Process 32-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00009055 if (index != num_fields &&
9056 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00009057 field_offset = field_gaps.AlignFieldOffset<4u>(field_offset);
9058 if (field_gaps.HasGap<4u>()) {
Vladimir Marko42bee502021-01-28 14:58:35 +00009059 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009060 AssignFieldOffset<4u>(field, field_gaps.ReleaseGap<4u>()); // Ignore return value.
9061 ++index;
9062 DCHECK(!field_gaps.HasGap<4u>()); // There can be only one gap for a 32-bit field.
9063 }
Vladimir Marko42bee502021-01-28 14:58:35 +00009064 while (index != num_fields &&
9065 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast32BitType) {
9066 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009067 field_offset = AssignFieldOffset<4u>(field, field_offset);
9068 ++index;
9069 }
9070 }
9071 // Process 16-bit fields.
Vladimir Marko42bee502021-01-28 14:58:35 +00009072 if (index != num_fields &&
9073 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
Vladimir Markoc7993d52021-01-27 15:20:56 +00009074 field_offset = field_gaps.AlignFieldOffset<2u>(field_offset);
9075 while (index != num_fields &&
Vladimir Marko42bee502021-01-28 14:58:35 +00009076 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType &&
Vladimir Markoc7993d52021-01-27 15:20:56 +00009077 field_gaps.HasGap<2u>()) {
Vladimir Marko42bee502021-01-28 14:58:35 +00009078 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009079 AssignFieldOffset<2u>(field, field_gaps.ReleaseGap<2u>()); // Ignore return value.
9080 ++index;
9081 }
Vladimir Marko42bee502021-01-28 14:58:35 +00009082 while (index != num_fields &&
9083 sorted_fields[index].field_type_order <= FieldTypeOrder::kLast16BitType) {
9084 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009085 field_offset = AssignFieldOffset<2u>(field, field_offset);
9086 ++index;
9087 }
9088 }
9089 // Process 8-bit fields.
9090 for (; index != num_fields && field_gaps.HasGap<1u>(); ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00009091 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009092 AssignFieldOffset<1u>(field, field_gaps.ReleaseGap<1u>()); // Ignore return value.
9093 }
9094 for (; index != num_fields; ++index) {
Vladimir Marko42bee502021-01-28 14:58:35 +00009095 ArtField* field = &fields->At(sorted_fields[index].field_index);
Vladimir Markoc7993d52021-01-27 15:20:56 +00009096 field_offset = AssignFieldOffset<1u>(field, field_offset);
9097 }
9098
Ian Rogers7b078e82014-09-10 14:44:24 -07009099 self->EndAssertNoThreadSuspension(old_no_suspend_cause);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009100
Elliott Hughesadb460d2011-10-05 17:02:34 -07009101 // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00009102 DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
Vladimir Marko42bee502021-01-28 14:58:35 +00009103 if (!is_static &&
9104 UNLIKELY(!class_linker->init_done_) &&
9105 klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
Elliott Hughesadb460d2011-10-05 17:02:34 -07009106 // We know there are no non-reference fields in the Reference classes, and we know
9107 // that 'referent' is alphabetically last, so this is easy...
David Sehr709b0702016-10-13 09:12:37 -07009108 CHECK_EQ(num_reference_fields, num_fields) << klass->PrettyClass();
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009109 CHECK_STREQ(fields->At(num_fields - 1).GetName(), "referent")
David Sehr709b0702016-10-13 09:12:37 -07009110 << klass->PrettyClass();
Elliott Hughesadb460d2011-10-05 17:02:34 -07009111 --num_reference_fields;
9112 }
9113
Mingyao Yang98d1cc82014-05-15 17:02:16 -07009114 size_t size = field_offset.Uint32Value();
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009115 // Update klass
Brian Carlstrom3320cf42011-10-04 14:58:28 -07009116 if (is_static) {
9117 klass->SetNumReferenceStaticFields(num_reference_fields);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07009118 *class_size = size;
Brian Carlstrom3320cf42011-10-04 14:58:28 -07009119 } else {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009120 klass->SetNumReferenceInstanceFields(num_reference_fields);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009121 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07009122 if (num_reference_fields == 0 || super_class == nullptr) {
9123 // object has one reference field, klass, but we ignore it since we always visit the class.
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07009124 // super_class is null iff the class is java.lang.Object.
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07009125 if (super_class == nullptr ||
9126 (super_class->GetClassFlags() & mirror::kClassFlagNoReferenceFields) != 0) {
9127 klass->SetClassFlags(klass->GetClassFlags() | mirror::kClassFlagNoReferenceFields);
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07009128 }
9129 }
9130 if (kIsDebugBuild) {
9131 DCHECK_EQ(super_class == nullptr, klass->DescriptorEquals("Ljava/lang/Object;"));
9132 size_t total_reference_instance_fields = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009133 ObjPtr<mirror::Class> cur_super = klass.Get();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07009134 while (cur_super != nullptr) {
9135 total_reference_instance_fields += cur_super->NumReferenceInstanceFieldsDuringLinking();
9136 cur_super = cur_super->GetSuperClass();
9137 }
9138 if (super_class == nullptr) {
David Sehr709b0702016-10-13 09:12:37 -07009139 CHECK_EQ(total_reference_instance_fields, 1u) << klass->PrettyDescriptor();
Mathieu Chartier66c2d2d2015-08-25 14:32:32 -07009140 } else {
9141 // Check that there is at least num_reference_fields other than Object.class.
9142 CHECK_GE(total_reference_instance_fields, 1u + num_reference_fields)
David Sehr709b0702016-10-13 09:12:37 -07009143 << klass->PrettyClass();
Mathieu Chartier52a7f5c2015-08-18 18:35:52 -07009144 }
9145 }
Brian Carlstromdbc05252011-09-09 01:59:59 -07009146 if (!klass->IsVariableSize()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07009147 std::string temp;
9148 DCHECK_GE(size, sizeof(mirror::Object)) << klass->GetDescriptor(&temp);
9149 size_t previous_size = klass->GetObjectSize();
9150 if (previous_size != 0) {
9151 // Make sure that we didn't originally have an incorrect size.
9152 CHECK_EQ(previous_size, size) << klass->GetDescriptor(&temp);
Mathieu Chartier79b4f382013-10-23 15:21:37 -07009153 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07009154 klass->SetObjectSize(size);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009155 }
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009156 }
Vladimir Marko76649e82014-11-10 18:32:59 +00009157
9158 if (kIsDebugBuild) {
9159 // Make sure that the fields array is ordered by name but all reference
9160 // offsets are at the beginning as far as alignment allows.
9161 MemberOffset start_ref_offset = is_static
Vladimir Marko42bee502021-01-28 14:58:35 +00009162 ? klass->GetFirstReferenceStaticFieldOffsetDuringLinking(class_linker->image_pointer_size_)
Vladimir Marko76649e82014-11-10 18:32:59 +00009163 : klass->GetFirstReferenceInstanceFieldOffset();
9164 MemberOffset end_ref_offset(start_ref_offset.Uint32Value() +
9165 num_reference_fields *
9166 sizeof(mirror::HeapReference<mirror::Object>));
9167 MemberOffset current_ref_offset = start_ref_offset;
9168 for (size_t i = 0; i < num_fields; i++) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009169 ArtField* field = &fields->At(i);
Mathieu Chartierc7853442015-03-27 14:35:38 -07009170 VLOG(class_linker) << "LinkFields: " << (is_static ? "static" : "instance")
David Sehr709b0702016-10-13 09:12:37 -07009171 << " class=" << klass->PrettyClass() << " field=" << field->PrettyField()
9172 << " offset=" << field->GetOffsetDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00009173 if (i != 0) {
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009174 ArtField* const prev_field = &fields->At(i - 1);
Vladimir Marko7a7c1db2014-11-17 15:13:34 +00009175 // NOTE: The field names can be the same. This is not possible in the Java language
9176 // but it's valid Java/dex bytecode and for example proguard can generate such bytecode.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07009177 DCHECK_LE(strcmp(prev_field->GetName(), field->GetName()), 0);
Vladimir Marko76649e82014-11-10 18:32:59 +00009178 }
9179 Primitive::Type type = field->GetTypeAsPrimitiveType();
9180 bool is_primitive = type != Primitive::kPrimNot;
9181 if (klass->DescriptorEquals("Ljava/lang/ref/Reference;") &&
9182 strcmp("referent", field->GetName()) == 0) {
9183 is_primitive = true; // We lied above, so we have to expect a lie here.
9184 }
9185 MemberOffset offset = field->GetOffsetDuringLinking();
9186 if (is_primitive) {
9187 if (offset.Uint32Value() < end_ref_offset.Uint32Value()) {
9188 // Shuffled before references.
9189 size_t type_size = Primitive::ComponentSize(type);
9190 CHECK_LT(type_size, sizeof(mirror::HeapReference<mirror::Object>));
9191 CHECK_LT(offset.Uint32Value(), start_ref_offset.Uint32Value());
9192 CHECK_LE(offset.Uint32Value() + type_size, start_ref_offset.Uint32Value());
9193 CHECK(!IsAligned<sizeof(mirror::HeapReference<mirror::Object>)>(offset.Uint32Value()));
9194 }
9195 } else {
9196 CHECK_EQ(current_ref_offset.Uint32Value(), offset.Uint32Value());
9197 current_ref_offset = MemberOffset(current_ref_offset.Uint32Value() +
9198 sizeof(mirror::HeapReference<mirror::Object>));
9199 }
9200 }
9201 CHECK_EQ(current_ref_offset.Uint32Value(), end_ref_offset.Uint32Value());
9202 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009203 return true;
9204}
9205
Vladimir Marko42bee502021-01-28 14:58:35 +00009206bool ClassLinker::LinkInstanceFields(Thread* self, Handle<mirror::Class> klass) {
9207 CHECK(klass != nullptr);
9208 return LinkFieldsHelper::LinkFields(this, self, klass, false, nullptr);
9209}
9210
9211bool ClassLinker::LinkStaticFields(Thread* self, Handle<mirror::Class> klass, size_t* class_size) {
9212 CHECK(klass != nullptr);
9213 return LinkFieldsHelper::LinkFields(this, self, klass, true, class_size);
9214}
9215
Vladimir Marko76649e82014-11-10 18:32:59 +00009216// Set the bitmap of reference instance field offsets.
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07009217void ClassLinker::CreateReferenceInstanceOffsets(Handle<mirror::Class> klass) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009218 uint32_t reference_offsets = 0;
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009219 ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009220 // Leave the reference offsets as 0 for mirror::Object (the class field is handled specially).
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009221 if (super_class != nullptr) {
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009222 reference_offsets = super_class->GetReferenceInstanceOffsets();
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009223 // Compute reference offsets unless our superclass overflowed.
9224 if (reference_offsets != mirror::Class::kClassWalkSuper) {
9225 size_t num_reference_fields = klass->NumReferenceInstanceFieldsDuringLinking();
Vladimir Marko76649e82014-11-10 18:32:59 +00009226 if (num_reference_fields != 0u) {
9227 // All of the fields that contain object references are guaranteed be grouped in memory
9228 // starting at an appropriately aligned address after super class object data.
9229 uint32_t start_offset = RoundUp(super_class->GetObjectSize(),
9230 sizeof(mirror::HeapReference<mirror::Object>));
9231 uint32_t start_bit = (start_offset - mirror::kObjectHeaderSize) /
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009232 sizeof(mirror::HeapReference<mirror::Object>);
Vladimir Marko76649e82014-11-10 18:32:59 +00009233 if (start_bit + num_reference_fields > 32) {
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009234 reference_offsets = mirror::Class::kClassWalkSuper;
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009235 } else {
Vladimir Marko76649e82014-11-10 18:32:59 +00009236 reference_offsets |= (0xffffffffu << start_bit) &
9237 (0xffffffffu >> (32 - (start_bit + num_reference_fields)));
Ian Rogerscdc1aaf2014-10-09 13:21:38 -07009238 }
9239 }
Brian Carlstrom4873d462011-08-21 15:23:39 -07009240 }
9241 }
Mingyao Yangfaff0f02014-09-10 12:03:22 -07009242 klass->SetReferenceInstanceOffsets(reference_offsets);
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009243}
9244
Vladimir Marko18090d12018-06-01 16:53:12 +01009245ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9246 ObjPtr<mirror::DexCache> dex_cache) {
9247 StackHandleScope<1> hs(Thread::Current());
9248 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(dex_cache));
9249 return DoResolveString(string_idx, h_dex_cache);
9250}
9251
9252ObjPtr<mirror::String> ClassLinker::DoResolveString(dex::StringIndex string_idx,
9253 Handle<mirror::DexCache> dex_cache) {
Vladimir Markoa64b52d2017-12-08 16:27:49 +00009254 const DexFile& dex_file = *dex_cache->GetDexFile();
Ian Rogersdfb325e2013-10-30 01:00:44 -07009255 uint32_t utf16_length;
9256 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009257 ObjPtr<mirror::String> string = intern_table_->InternStrong(utf16_length, utf8_data);
Vladimir Marko8d6768d2017-03-14 10:13:21 +00009258 if (string != nullptr) {
9259 dex_cache->SetResolvedString(string_idx, string);
9260 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00009261 return string;
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009262}
9263
Vladimir Marko18090d12018-06-01 16:53:12 +01009264ObjPtr<mirror::String> ClassLinker::DoLookupString(dex::StringIndex string_idx,
9265 ObjPtr<mirror::DexCache> dex_cache) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009266 DCHECK(dex_cache != nullptr);
Vladimir Markoa64b52d2017-12-08 16:27:49 +00009267 const DexFile& dex_file = *dex_cache->GetDexFile();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009268 uint32_t utf16_length;
9269 const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08009270 ObjPtr<mirror::String> string =
9271 intern_table_->LookupStrong(Thread::Current(), utf16_length, utf8_data);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009272 if (string != nullptr) {
9273 dex_cache->SetResolvedString(string_idx, string);
9274 }
Vladimir Marko28e012a2017-12-07 11:22:59 +00009275 return string;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00009276}
9277
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009278ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko09c5ca42018-05-31 15:15:31 +01009279 ObjPtr<mirror::Class> referrer) {
9280 return DoLookupResolvedType(type_idx, referrer->GetDexCache(), referrer->GetClassLoader());
9281}
9282
9283ObjPtr<mirror::Class> ClassLinker::DoLookupResolvedType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009284 ObjPtr<mirror::DexCache> dex_cache,
9285 ObjPtr<mirror::ClassLoader> class_loader) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009286 DCHECK(dex_cache->GetClassLoader() == class_loader);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009287 const DexFile& dex_file = *dex_cache->GetDexFile();
9288 const char* descriptor = dex_file.StringByTypeIdx(type_idx);
Vladimir Markoc63d9672021-03-31 15:50:39 +01009289 ObjPtr<mirror::Class> type = LookupResolvedType(descriptor, class_loader);
9290 if (type != nullptr) {
9291 DCHECK(type->IsResolved());
9292 dex_cache->SetResolvedType(type_idx, type);
9293 }
9294 return type;
9295}
9296
9297ObjPtr<mirror::Class> ClassLinker::LookupResolvedType(const char* descriptor,
9298 ObjPtr<mirror::ClassLoader> class_loader) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009299 DCHECK_NE(*descriptor, '\0') << "descriptor is empty string";
9300 ObjPtr<mirror::Class> type = nullptr;
9301 if (descriptor[1] == '\0') {
9302 // only the descriptors of primitive types should be 1 character long, also avoid class lookup
9303 // for primitive classes that aren't backed by dex files.
Vladimir Marko9186b182018-11-06 14:55:54 +00009304 type = LookupPrimitiveClass(descriptor[0]);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009305 } else {
9306 Thread* const self = Thread::Current();
9307 DCHECK(self != nullptr);
9308 const size_t hash = ComputeModifiedUtf8Hash(descriptor);
9309 // Find the class in the loaded classes table.
Vladimir Markobcf17522018-06-01 13:14:32 +01009310 type = LookupClass(self, descriptor, hash, class_loader);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009311 }
Vladimir Markoc63d9672021-03-31 15:50:39 +01009312 return (type != nullptr && type->IsResolved()) ? type : nullptr;
Mathieu Chartierb8901302016-09-30 10:27:43 -07009313}
9314
Andreas Gampeb0625e02019-05-01 12:43:31 -07009315template <typename RefType>
9316ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx, RefType referrer) {
Vladimir Marko09c5ca42018-05-31 15:15:31 +01009317 StackHandleScope<2> hs(Thread::Current());
9318 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9319 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
9320 return DoResolveType(type_idx, dex_cache, class_loader);
9321}
9322
Andreas Gampe4835d212018-11-21 14:55:10 -08009323// Instantiate the above.
9324template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9325 ArtField* referrer);
9326template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9327 ArtMethod* referrer);
9328template ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
9329 ObjPtr<mirror::Class> referrer);
9330
Vladimir Marko09c5ca42018-05-31 15:15:31 +01009331ObjPtr<mirror::Class> ClassLinker::DoResolveType(dex::TypeIndex type_idx,
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009332 Handle<mirror::DexCache> dex_cache,
9333 Handle<mirror::ClassLoader> class_loader) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009334 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009335 Thread* self = Thread::Current();
9336 const char* descriptor = dex_cache->GetDexFile()->StringByTypeIdx(type_idx);
9337 ObjPtr<mirror::Class> resolved = FindClass(self, descriptor, class_loader);
9338 if (resolved != nullptr) {
9339 // TODO: we used to throw here if resolved's class loader was not the
9340 // boot class loader. This was to permit different classes with the
9341 // same name to be loaded simultaneously by different loaders
9342 dex_cache->SetResolvedType(type_idx, resolved);
9343 } else {
9344 CHECK(self->IsExceptionPending())
9345 << "Expected pending exception for failed resolution of: " << descriptor;
9346 // Convert a ClassNotFoundException to a NoClassDefFoundError.
9347 StackHandleScope<1> hs(self);
9348 Handle<mirror::Throwable> cause(hs.NewHandle(self->GetException()));
Vladimir Markob4eb1b12018-05-24 11:09:38 +01009349 if (cause->InstanceOf(GetClassRoot(ClassRoot::kJavaLangClassNotFoundException, this))) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009350 DCHECK(resolved == nullptr); // No Handle needed to preserve resolved.
9351 self->ClearException();
9352 ThrowNoClassDefFoundError("Failed resolution of: %s", descriptor);
9353 self->GetException()->SetCause(cause.Get());
Ian Rogers0cfe1fb2011-08-26 03:29:44 -07009354 }
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009355 }
Vladimir Marko72ab6842017-01-20 19:32:50 +00009356 DCHECK((resolved == nullptr) || resolved->IsResolved())
David Sehr709b0702016-10-13 09:12:37 -07009357 << resolved->PrettyDescriptor() << " " << resolved->GetStatus();
Vladimir Marko28e012a2017-12-07 11:22:59 +00009358 return resolved;
Carl Shapiro0e5d75d2011-07-06 18:28:37 -07009359}
9360
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009361ArtMethod* ClassLinker::FindResolvedMethod(ObjPtr<mirror::Class> klass,
9362 ObjPtr<mirror::DexCache> dex_cache,
9363 ObjPtr<mirror::ClassLoader> class_loader,
9364 uint32_t method_idx) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009365 DCHECK(dex_cache->GetClassLoader() == class_loader);
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009366 // Search for the method using dex_cache and method_idx. The Class::Find*Method()
9367 // functions can optimize the search if the dex_cache is the same as the DexCache
9368 // of the class, with fall-back to name and signature search otherwise.
9369 ArtMethod* resolved = nullptr;
9370 if (klass->IsInterface()) {
9371 resolved = klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
9372 } else {
9373 resolved = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9374 }
9375 DCHECK(resolved == nullptr || resolved->GetDeclaringClassUnchecked() != nullptr);
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009376 if (resolved != nullptr &&
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009377 // We pass AccessMethod::kNone instead of kLinking to not warn yet on the
9378 // access, as we'll be looking if the method can be accessed through an
9379 // interface.
David Brazdilf50ac102018-10-17 18:00:06 +01009380 hiddenapi::ShouldDenyAccessToMember(resolved,
9381 hiddenapi::AccessContext(class_loader, dex_cache),
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009382 hiddenapi::AccessMethod::kNone)) {
9383 // The resolved method that we have found cannot be accessed due to
9384 // hiddenapi (typically it is declared up the hierarchy and is not an SDK
9385 // method). Try to find an interface method from the implemented interfaces which is
Nicolas Geoffrayaf61f502021-03-31 16:03:50 +00009386 // part of the SDK.
9387 ArtMethod* itf_method = klass->FindAccessibleInterfaceMethod(resolved, image_pointer_size_);
Nicolas Geoffrayfedff512021-02-07 21:36:33 +00009388 if (itf_method == nullptr) {
9389 // No interface method. Call ShouldDenyAccessToMember again but this time
9390 // with AccessMethod::kLinking to ensure that an appropriate warning is
9391 // logged.
9392 hiddenapi::ShouldDenyAccessToMember(resolved,
9393 hiddenapi::AccessContext(class_loader, dex_cache),
9394 hiddenapi::AccessMethod::kLinking);
9395 resolved = nullptr;
9396 } else {
9397 // We found an interface method that is accessible, continue with the resolved method.
9398 }
David Brazdil8ce3bfa2018-03-12 18:01:18 +00009399 }
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009400 if (resolved != nullptr) {
9401 // In case of jmvti, the dex file gets verified before being registered, so first
9402 // check if it's registered before checking class tables.
9403 const DexFile& dex_file = *dex_cache->GetDexFile();
Santiago Aboy Solanes6cdabe12022-02-18 15:27:43 +00009404 DCHECK_IMPLIES(
9405 IsDexFileRegistered(Thread::Current(), dex_file),
9406 FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009407 << "DexFile referrer: " << dex_file.GetLocation()
9408 << " ClassLoader: " << DescribeLoaders(class_loader, "");
9409 // Be a good citizen and update the dex cache to speed subsequent calls.
David Srbecky5de5efe2021-02-15 21:23:00 +00009410 dex_cache->SetResolvedMethod(method_idx, resolved);
Nicolas Geoffraybefa3092018-02-22 14:50:01 +00009411 // Disable the following invariant check as the verifier breaks it. b/73760543
9412 // const DexFile::MethodId& method_id = dex_file.GetMethodId(method_idx);
9413 // DCHECK(LookupResolvedType(method_id.class_idx_, dex_cache, class_loader) != nullptr)
9414 // << "Method: " << resolved->PrettyMethod() << ", "
9415 // << "Class: " << klass->PrettyClass() << " (" << klass->GetStatus() << "), "
9416 // << "DexFile referrer: " << dex_file.GetLocation();
Nicolas Geoffrayea179f42018-02-08 22:30:18 +00009417 }
9418 return resolved;
9419}
9420
David Brazdil4525e0b2018-04-05 16:57:32 +01009421// Returns true if `method` is either null or hidden.
9422// Does not print any warnings if it is hidden.
9423static bool CheckNoSuchMethod(ArtMethod* method,
9424 ObjPtr<mirror::DexCache> dex_cache,
9425 ObjPtr<mirror::ClassLoader> class_loader)
9426 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffrayc07f4882021-09-13 09:20:33 +01009427 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
David Brazdil4525e0b2018-04-05 16:57:32 +01009428 return method == nullptr ||
David Brazdilf50ac102018-10-17 18:00:06 +01009429 hiddenapi::ShouldDenyAccessToMember(method,
9430 hiddenapi::AccessContext(class_loader, dex_cache),
9431 hiddenapi::AccessMethod::kNone); // no warnings
David Brazdil4525e0b2018-04-05 16:57:32 +01009432}
9433
9434ArtMethod* ClassLinker::FindIncompatibleMethod(ObjPtr<mirror::Class> klass,
9435 ObjPtr<mirror::DexCache> dex_cache,
9436 ObjPtr<mirror::ClassLoader> class_loader,
9437 uint32_t method_idx) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009438 DCHECK(dex_cache->GetClassLoader() == class_loader);
David Brazdil4525e0b2018-04-05 16:57:32 +01009439 if (klass->IsInterface()) {
9440 ArtMethod* method = klass->FindClassMethod(dex_cache, method_idx, image_pointer_size_);
9441 return CheckNoSuchMethod(method, dex_cache, class_loader) ? nullptr : method;
9442 } else {
9443 // If there was an interface method with the same signature, we would have
9444 // found it in the "copied" methods. Only DCHECK that the interface method
9445 // really does not exist.
9446 if (kIsDebugBuild) {
9447 ArtMethod* method =
9448 klass->FindInterfaceMethod(dex_cache, method_idx, image_pointer_size_);
Nicolas Geoffray24642182022-02-14 14:40:51 +00009449 CHECK(CheckNoSuchMethod(method, dex_cache, class_loader) ||
9450 (klass->FindAccessibleInterfaceMethod(method, image_pointer_size_) == nullptr));
David Brazdil4525e0b2018-04-05 16:57:32 +01009451 }
9452 return nullptr;
9453 }
9454}
9455
Vladimir Marko89011192017-12-11 13:45:05 +00009456ArtMethod* ClassLinker::ResolveMethodWithoutInvokeType(uint32_t method_idx,
Jeff Hao13e748b2015-08-25 20:44:19 +00009457 Handle<mirror::DexCache> dex_cache,
9458 Handle<mirror::ClassLoader> class_loader) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009459 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
David Srbecky5de5efe2021-02-15 21:23:00 +00009460 ArtMethod* resolved = dex_cache->GetResolvedMethod(method_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009461 Thread::PoisonObjectPointersIfDebug();
Vladimir Marko07bfbac2017-07-06 14:55:02 +01009462 if (resolved != nullptr) {
9463 DCHECK(!resolved->IsRuntimeMethod());
Jeff Hao13e748b2015-08-25 20:44:19 +00009464 DCHECK(resolved->GetDeclaringClassUnchecked() != nullptr) << resolved->GetDexMethodIndex();
9465 return resolved;
9466 }
9467 // Fail, get the declaring class.
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009468 const dex::MethodId& method_id = dex_cache->GetDexFile()->GetMethodId(method_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009469 ObjPtr<mirror::Class> klass = ResolveType(method_id.class_idx_, dex_cache, class_loader);
Jeff Hao13e748b2015-08-25 20:44:19 +00009470 if (klass == nullptr) {
9471 Thread::Current()->AssertPendingException();
9472 return nullptr;
9473 }
Nicolas Geoffrayae12f962023-01-06 12:06:13 +00009474 return FindResolvedMethod(klass, dex_cache.Get(), class_loader.Get(), method_idx);
Jeff Hao13e748b2015-08-25 20:44:19 +00009475}
9476
Vladimir Markof44d36c2017-03-14 14:18:46 +00009477ArtField* ClassLinker::LookupResolvedField(uint32_t field_idx,
9478 ObjPtr<mirror::DexCache> dex_cache,
9479 ObjPtr<mirror::ClassLoader> class_loader,
9480 bool is_static) {
Nicolas Geoffrayc07f4882021-09-13 09:20:33 +01009481 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
Vladimir Markof44d36c2017-03-14 14:18:46 +00009482 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009483 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009484 ObjPtr<mirror::Class> klass = dex_cache->GetResolvedType(field_id.class_idx_);
9485 if (klass == nullptr) {
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009486 klass = LookupResolvedType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009487 }
9488 if (klass == nullptr) {
9489 // The class has not been resolved yet, so the field is also unresolved.
9490 return nullptr;
9491 }
9492 DCHECK(klass->IsResolved());
Vladimir Markof44d36c2017-03-14 14:18:46 +00009493
David Brazdil1ab0fa82018-05-04 11:28:03 +01009494 return FindResolvedField(klass, dex_cache, class_loader, field_idx, is_static);
Vladimir Markof44d36c2017-03-14 14:18:46 +00009495}
9496
Vladimir Markoe11dd502017-12-08 14:09:45 +00009497ArtField* ClassLinker::ResolveFieldJLS(uint32_t field_idx,
Mathieu Chartierc7853442015-03-27 14:35:38 -07009498 Handle<mirror::DexCache> dex_cache,
9499 Handle<mirror::ClassLoader> class_loader) {
Andreas Gampefa4333d2017-02-14 11:10:34 -08009500 DCHECK(dex_cache != nullptr);
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009501 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
David Srbecky5de5efe2021-02-15 21:23:00 +00009502 ArtField* resolved = dex_cache->GetResolvedField(field_idx);
Mathieu Chartiera59d9b22016-09-26 18:13:17 -07009503 Thread::PoisonObjectPointersIfDebug();
Andreas Gampe58a5af82014-07-31 16:23:49 -07009504 if (resolved != nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009505 return resolved;
9506 }
Vladimir Markoe11dd502017-12-08 14:09:45 +00009507 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009508 const dex::FieldId& field_id = dex_file.GetFieldId(field_idx);
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009509 ObjPtr<mirror::Class> klass = ResolveType(field_id.class_idx_, dex_cache, class_loader);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009510 if (klass == nullptr) {
Ian Rogersb067ac22011-12-13 18:05:09 -08009511 DCHECK(Thread::Current()->IsExceptionPending());
Andreas Gampe2ed8def2014-08-28 14:41:02 -07009512 return nullptr;
Ian Rogersb067ac22011-12-13 18:05:09 -08009513 }
9514
David Brazdil1ab0fa82018-05-04 11:28:03 +01009515 resolved = FindResolvedFieldJLS(klass, dex_cache.Get(), class_loader.Get(), field_idx);
9516 if (resolved == nullptr) {
9517 const char* name = dex_file.GetFieldName(field_id);
9518 const char* type = dex_file.GetFieldTypeDescriptor(field_id);
Vladimir Marko19a4d372016-12-08 14:41:46 +00009519 ThrowNoSuchFieldError("", klass, type, name);
Brian Carlstrom9ea1cb12011-08-24 23:18:18 -07009520 }
9521 return resolved;
Carl Shapiro5fafe2b2011-07-09 15:34:41 -07009522}
9523
David Brazdil1ab0fa82018-05-04 11:28:03 +01009524ArtField* ClassLinker::FindResolvedField(ObjPtr<mirror::Class> klass,
9525 ObjPtr<mirror::DexCache> dex_cache,
9526 ObjPtr<mirror::ClassLoader> class_loader,
9527 uint32_t field_idx,
9528 bool is_static) {
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009529 DCHECK(dex_cache->GetClassLoader() == class_loader);
Vladimir Markob10668c2021-06-10 09:52:53 +01009530 ArtField* resolved = is_static ? klass->FindStaticField(dex_cache, field_idx)
9531 : klass->FindInstanceField(dex_cache, field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009532 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009533 hiddenapi::ShouldDenyAccessToMember(resolved,
9534 hiddenapi::AccessContext(class_loader, dex_cache),
9535 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009536 resolved = nullptr;
9537 }
9538
9539 if (resolved != nullptr) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009540 dex_cache->SetResolvedField(field_idx, resolved);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009541 }
9542
9543 return resolved;
9544}
9545
9546ArtField* ClassLinker::FindResolvedFieldJLS(ObjPtr<mirror::Class> klass,
9547 ObjPtr<mirror::DexCache> dex_cache,
9548 ObjPtr<mirror::ClassLoader> class_loader,
9549 uint32_t field_idx) {
Nicolas Geoffrayc07f4882021-09-13 09:20:33 +01009550 DCHECK(dex_cache->GetClassLoader().Ptr() == class_loader.Ptr());
Vladimir Markob10668c2021-06-10 09:52:53 +01009551 ArtField* resolved = klass->FindField(dex_cache, field_idx);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009552
9553 if (resolved != nullptr &&
David Brazdilf50ac102018-10-17 18:00:06 +01009554 hiddenapi::ShouldDenyAccessToMember(resolved,
9555 hiddenapi::AccessContext(class_loader, dex_cache),
9556 hiddenapi::AccessMethod::kLinking)) {
David Brazdil1ab0fa82018-05-04 11:28:03 +01009557 resolved = nullptr;
9558 }
9559
9560 if (resolved != nullptr) {
David Srbecky5de5efe2021-02-15 21:23:00 +00009561 dex_cache->SetResolvedField(field_idx, resolved);
David Brazdil1ab0fa82018-05-04 11:28:03 +01009562 }
9563
9564 return resolved;
9565}
9566
Vladimir Markoaf940202017-12-08 15:01:18 +00009567ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(
9568 Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009569 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009570 Handle<mirror::DexCache> dex_cache,
9571 Handle<mirror::ClassLoader> class_loader) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009572 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
Andreas Gampefa4333d2017-02-14 11:10:34 -08009573 DCHECK(dex_cache != nullptr);
Nicolas Geoffrayab91eef2021-09-14 09:48:51 +01009574 DCHECK(dex_cache->GetClassLoader() == class_loader.Get());
Narayan Kamath25352fc2016-08-03 12:46:58 +01009575
Mathieu Chartier28357fa2016-10-18 16:27:40 -07009576 ObjPtr<mirror::MethodType> resolved = dex_cache->GetResolvedMethodType(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009577 if (resolved != nullptr) {
Vladimir Markobcf17522018-06-01 13:14:32 +01009578 return resolved;
Narayan Kamath25352fc2016-08-03 12:46:58 +01009579 }
9580
Narayan Kamath25352fc2016-08-03 12:46:58 +01009581 StackHandleScope<4> hs(self);
9582
9583 // First resolve the return type.
Vladimir Markoaf940202017-12-08 15:01:18 +00009584 const DexFile& dex_file = *dex_cache->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009585 const dex::ProtoId& proto_id = dex_file.GetProtoId(proto_idx);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009586 Handle<mirror::Class> return_type(hs.NewHandle(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009587 ResolveType(proto_id.return_type_idx_, dex_cache, class_loader)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009588 if (return_type == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009589 DCHECK(self->IsExceptionPending());
9590 return nullptr;
9591 }
9592
9593 // Then resolve the argument types.
9594 //
9595 // TODO: Is there a better way to figure out the number of method arguments
9596 // other than by looking at the shorty ?
9597 const size_t num_method_args = strlen(dex_file.StringDataByIdx(proto_id.shorty_idx_)) - 1;
9598
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009599 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Narayan Kamath25352fc2016-08-03 12:46:58 +01009600 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9601 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_method_args)));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009602 if (method_params == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009603 DCHECK(self->IsExceptionPending());
9604 return nullptr;
9605 }
9606
9607 DexFileParameterIterator it(dex_file, proto_id);
9608 int32_t i = 0;
9609 MutableHandle<mirror::Class> param_class = hs.NewHandle<mirror::Class>(nullptr);
9610 for (; it.HasNext(); it.Next()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08009611 const dex::TypeIndex type_idx = it.GetTypeIdx();
Vladimir Marko666ee3d2017-12-11 18:37:36 +00009612 param_class.Assign(ResolveType(type_idx, dex_cache, class_loader));
Andreas Gampefa4333d2017-02-14 11:10:34 -08009613 if (param_class == nullptr) {
Narayan Kamath25352fc2016-08-03 12:46:58 +01009614 DCHECK(self->IsExceptionPending());
9615 return nullptr;
9616 }
9617
9618 method_params->Set(i++, param_class.Get());
9619 }
9620
9621 DCHECK(!it.HasNext());
9622
9623 Handle<mirror::MethodType> type = hs.NewHandle(
9624 mirror::MethodType::Create(self, return_type, method_params));
Nicolas Geoffray4aebd582021-07-23 15:27:31 +01009625 if (type != nullptr) {
Orion Hodson9fa3b7c2022-10-31 18:03:40 +00009626 // Ensure all stores for the newly created MethodType are visible, before we attempt to place
9627 // it in the DexCache (b/224733324).
9628 std::atomic_thread_fence(std::memory_order_release);
Nicolas Geoffray4aebd582021-07-23 15:27:31 +01009629 dex_cache->SetResolvedMethodType(proto_idx, type.Get());
9630 }
Narayan Kamath25352fc2016-08-03 12:46:58 +01009631
9632 return type.Get();
9633}
9634
Vladimir Markoaf940202017-12-08 15:01:18 +00009635ObjPtr<mirror::MethodType> ClassLinker::ResolveMethodType(Thread* self,
Orion Hodson06d10a72018-05-14 08:53:38 +01009636 dex::ProtoIndex proto_idx,
Vladimir Markoaf940202017-12-08 15:01:18 +00009637 ArtMethod* referrer) {
Orion Hodson2e599942017-09-22 16:17:41 +01009638 StackHandleScope<2> hs(self);
Orion Hodson2e599942017-09-22 16:17:41 +01009639 Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
9640 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(referrer->GetClassLoader()));
Vladimir Markoaf940202017-12-08 15:01:18 +00009641 return ResolveMethodType(self, proto_idx, dex_cache, class_loader);
Orion Hodson2e599942017-09-22 16:17:41 +01009642}
9643
Vladimir Marko5aead702019-03-27 11:00:36 +00009644ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForField(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009645 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009646 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009647 ArtMethod* referrer) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009648 DexFile::MethodHandleType handle_type =
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009649 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9650 mirror::MethodHandle::Kind kind;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009651 bool is_put;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009652 bool is_static;
9653 int32_t num_params;
Orion Hodsonc069a302017-01-18 09:23:12 +00009654 switch (handle_type) {
9655 case DexFile::MethodHandleType::kStaticPut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009656 kind = mirror::MethodHandle::Kind::kStaticPut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009657 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009658 is_static = true;
9659 num_params = 1;
Orion Hodson631827d2017-04-10 14:53:47 +01009660 break;
9661 }
9662 case DexFile::MethodHandleType::kStaticGet: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009663 kind = mirror::MethodHandle::Kind::kStaticGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009664 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009665 is_static = true;
9666 num_params = 0;
Orion Hodson631827d2017-04-10 14:53:47 +01009667 break;
9668 }
9669 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson82b351f2017-07-05 14:34:25 +01009670 kind = mirror::MethodHandle::Kind::kInstancePut;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009671 is_put = true;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009672 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009673 num_params = 2;
9674 break;
9675 }
9676 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009677 kind = mirror::MethodHandle::Kind::kInstanceGet;
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009678 is_put = false;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009679 is_static = false;
Orion Hodsonc069a302017-01-18 09:23:12 +00009680 num_params = 1;
9681 break;
9682 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009683 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson82b351f2017-07-05 14:34:25 +01009684 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009685 case DexFile::MethodHandleType::kInvokeConstructor:
Orion Hodson82b351f2017-07-05 14:34:25 +01009686 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009687 case DexFile::MethodHandleType::kInvokeInterface:
9688 UNREACHABLE();
Orion Hodsonc069a302017-01-18 09:23:12 +00009689 }
9690
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009691 ArtField* target_field =
9692 ResolveField(method_handle.field_or_method_idx_, referrer, is_static);
9693 if (LIKELY(target_field != nullptr)) {
9694 ObjPtr<mirror::Class> target_class = target_field->GetDeclaringClass();
9695 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9696 if (UNLIKELY(!referring_class->CanAccessMember(target_class, target_field->GetAccessFlags()))) {
9697 ThrowIllegalAccessErrorField(referring_class, target_field);
9698 return nullptr;
9699 }
Orion Hodsonfd7b2c22018-03-15 15:38:38 +00009700 if (UNLIKELY(is_put && target_field->IsFinal())) {
9701 ThrowIllegalAccessErrorField(referring_class, target_field);
9702 return nullptr;
9703 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009704 } else {
9705 DCHECK(Thread::Current()->IsExceptionPending());
9706 return nullptr;
9707 }
9708
9709 StackHandleScope<4> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009710 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonc069a302017-01-18 09:23:12 +00009711 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9712 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009713 if (UNLIKELY(method_params == nullptr)) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009714 DCHECK(self->IsExceptionPending());
9715 return nullptr;
9716 }
9717
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009718 Handle<mirror::Class> constructor_class;
Orion Hodsonc069a302017-01-18 09:23:12 +00009719 Handle<mirror::Class> return_type;
9720 switch (handle_type) {
9721 case DexFile::MethodHandleType::kStaticPut: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009722 method_params->Set(0, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009723 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009724 break;
9725 }
9726 case DexFile::MethodHandleType::kStaticGet: {
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009727 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009728 break;
9729 }
9730 case DexFile::MethodHandleType::kInstancePut: {
Orion Hodson631827d2017-04-10 14:53:47 +01009731 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009732 method_params->Set(1, target_field->ResolveType());
Vladimir Marko9186b182018-11-06 14:55:54 +00009733 return_type = hs.NewHandle(GetClassRoot(ClassRoot::kPrimitiveVoid, this));
Orion Hodsonc069a302017-01-18 09:23:12 +00009734 break;
9735 }
9736 case DexFile::MethodHandleType::kInstanceGet: {
Orion Hodson631827d2017-04-10 14:53:47 +01009737 method_params->Set(0, target_field->GetDeclaringClass());
Vladimir Marko4098a7a2017-11-06 16:00:51 +00009738 return_type = hs.NewHandle(target_field->ResolveType());
Orion Hodsonc069a302017-01-18 09:23:12 +00009739 break;
9740 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009741 case DexFile::MethodHandleType::kInvokeStatic:
Orion Hodson631827d2017-04-10 14:53:47 +01009742 case DexFile::MethodHandleType::kInvokeInstance:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009743 case DexFile::MethodHandleType::kInvokeConstructor:
9744 case DexFile::MethodHandleType::kInvokeDirect:
Orion Hodson631827d2017-04-10 14:53:47 +01009745 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009746 UNREACHABLE();
9747 }
9748
9749 for (int32_t i = 0; i < num_params; ++i) {
9750 if (UNLIKELY(method_params->Get(i) == nullptr)) {
9751 DCHECK(self->IsExceptionPending());
9752 return nullptr;
Orion Hodsonc069a302017-01-18 09:23:12 +00009753 }
9754 }
9755
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009756 if (UNLIKELY(return_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009757 DCHECK(self->IsExceptionPending());
9758 return nullptr;
9759 }
9760
9761 Handle<mirror::MethodType>
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009762 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9763 if (UNLIKELY(method_type.IsNull())) {
Orion Hodsonc069a302017-01-18 09:23:12 +00009764 DCHECK(self->IsExceptionPending());
9765 return nullptr;
9766 }
Orion Hodson631827d2017-04-10 14:53:47 +01009767
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009768 uintptr_t target = reinterpret_cast<uintptr_t>(target_field);
9769 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9770}
9771
Vladimir Marko5aead702019-03-27 11:00:36 +00009772ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandleForMethod(
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009773 Thread* self,
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009774 const dex::MethodHandleItem& method_handle,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009775 ArtMethod* referrer) {
9776 DexFile::MethodHandleType handle_type =
9777 static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_);
9778 mirror::MethodHandle::Kind kind;
9779 uint32_t receiver_count = 0;
9780 ArtMethod* target_method = nullptr;
9781 switch (handle_type) {
9782 case DexFile::MethodHandleType::kStaticPut:
9783 case DexFile::MethodHandleType::kStaticGet:
9784 case DexFile::MethodHandleType::kInstancePut:
9785 case DexFile::MethodHandleType::kInstanceGet:
9786 UNREACHABLE();
9787 case DexFile::MethodHandleType::kInvokeStatic: {
9788 kind = mirror::MethodHandle::Kind::kInvokeStatic;
9789 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009790 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9791 method_handle.field_or_method_idx_,
9792 referrer,
9793 InvokeType::kStatic);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009794 break;
9795 }
9796 case DexFile::MethodHandleType::kInvokeInstance: {
9797 kind = mirror::MethodHandle::Kind::kInvokeVirtual;
9798 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009799 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9800 method_handle.field_or_method_idx_,
9801 referrer,
9802 InvokeType::kVirtual);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009803 break;
9804 }
9805 case DexFile::MethodHandleType::kInvokeConstructor: {
9806 // Constructors are currently implemented as a transform. They
9807 // are special cased later in this method.
9808 kind = mirror::MethodHandle::Kind::kInvokeTransform;
9809 receiver_count = 0;
Vladimir Markoba118822017-06-12 15:41:56 +01009810 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9811 method_handle.field_or_method_idx_,
9812 referrer,
9813 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009814 break;
9815 }
9816 case DexFile::MethodHandleType::kInvokeDirect: {
9817 kind = mirror::MethodHandle::Kind::kInvokeDirect;
9818 receiver_count = 1;
9819 StackHandleScope<2> hs(self);
9820 // A constant method handle with type kInvokeDirect can refer to
9821 // a method that is private or to a method in a super class. To
9822 // disambiguate the two options, we resolve the method ignoring
9823 // the invocation type to determine if the method is private. We
9824 // then resolve again specifying the intended invocation type to
9825 // force the appropriate checks.
Vladimir Marko89011192017-12-11 13:45:05 +00009826 target_method = ResolveMethodWithoutInvokeType(method_handle.field_or_method_idx_,
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009827 hs.NewHandle(referrer->GetDexCache()),
9828 hs.NewHandle(referrer->GetClassLoader()));
9829 if (UNLIKELY(target_method == nullptr)) {
9830 break;
9831 }
9832
9833 if (target_method->IsPrivate()) {
9834 kind = mirror::MethodHandle::Kind::kInvokeDirect;
Vladimir Markoba118822017-06-12 15:41:56 +01009835 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9836 method_handle.field_or_method_idx_,
9837 referrer,
9838 InvokeType::kDirect);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009839 } else {
9840 kind = mirror::MethodHandle::Kind::kInvokeSuper;
Vladimir Markoba118822017-06-12 15:41:56 +01009841 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9842 method_handle.field_or_method_idx_,
9843 referrer,
9844 InvokeType::kSuper);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009845 if (UNLIKELY(target_method == nullptr)) {
9846 break;
9847 }
9848 // Find the method specified in the parent in referring class
9849 // so invoke-super invokes the method in the parent of the
9850 // referrer.
9851 target_method =
9852 referrer->GetDeclaringClass()->FindVirtualMethodForVirtual(target_method,
9853 kRuntimePointerSize);
9854 }
9855 break;
9856 }
9857 case DexFile::MethodHandleType::kInvokeInterface: {
9858 kind = mirror::MethodHandle::Kind::kInvokeInterface;
9859 receiver_count = 1;
Vladimir Markoba118822017-06-12 15:41:56 +01009860 target_method = ResolveMethod<ResolveMode::kNoChecks>(self,
9861 method_handle.field_or_method_idx_,
9862 referrer,
9863 InvokeType::kInterface);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009864 break;
9865 }
Orion Hodson631827d2017-04-10 14:53:47 +01009866 }
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009867
9868 if (UNLIKELY(target_method == nullptr)) {
9869 DCHECK(Thread::Current()->IsExceptionPending());
9870 return nullptr;
9871 }
9872
9873 ObjPtr<mirror::Class> target_class = target_method->GetDeclaringClass();
9874 ObjPtr<mirror::Class> referring_class = referrer->GetDeclaringClass();
9875 uint32_t access_flags = target_method->GetAccessFlags();
9876 if (UNLIKELY(!referring_class->CanAccessMember(target_class, access_flags))) {
9877 ThrowIllegalAccessErrorMethod(referring_class, target_method);
9878 return nullptr;
9879 }
9880
9881 // Calculate the number of parameters from the method shorty. We add the
9882 // receiver count (0 or 1) and deduct one for the return value.
9883 uint32_t shorty_length;
9884 target_method->GetShorty(&shorty_length);
9885 int32_t num_params = static_cast<int32_t>(shorty_length + receiver_count - 1);
9886
Orion Hodsonecd58562018-09-24 11:27:33 +01009887 StackHandleScope<5> hs(self);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +01009888 ObjPtr<mirror::Class> array_of_class = GetClassRoot<mirror::ObjectArray<mirror::Class>>(this);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009889 Handle<mirror::ObjectArray<mirror::Class>> method_params(hs.NewHandle(
9890 mirror::ObjectArray<mirror::Class>::Alloc(self, array_of_class, num_params)));
9891 if (method_params.Get() == nullptr) {
9892 DCHECK(self->IsExceptionPending());
9893 return nullptr;
9894 }
9895
Orion Hodsonecd58562018-09-24 11:27:33 +01009896 const DexFile* dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009897 const dex::MethodId& method_id = dex_file->GetMethodId(method_handle.field_or_method_idx_);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009898 int32_t index = 0;
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009899 if (receiver_count != 0) {
Orion Hodsonecd58562018-09-24 11:27:33 +01009900 // Insert receiver. Use the class identified in the method handle rather than the declaring
9901 // class of the resolved method which may be super class or default interface method
9902 // (b/115964401).
9903 ObjPtr<mirror::Class> receiver_class = LookupResolvedType(method_id.class_idx_, referrer);
9904 // receiver_class should have been resolved when resolving the target method.
9905 DCHECK(receiver_class != nullptr);
9906 method_params->Set(index++, receiver_class);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009907 }
Orion Hodsonecd58562018-09-24 11:27:33 +01009908
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009909 const dex::ProtoId& proto_id = dex_file->GetProtoId(method_id.proto_idx_);
Orion Hodsonecd58562018-09-24 11:27:33 +01009910 DexFileParameterIterator it(*dex_file, proto_id);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009911 while (it.HasNext()) {
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009912 DCHECK_LT(index, num_params);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009913 const dex::TypeIndex type_idx = it.GetTypeIdx();
Orion Hodsonecd58562018-09-24 11:27:33 +01009914 ObjPtr<mirror::Class> klass = ResolveType(type_idx, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009915 if (nullptr == klass) {
9916 DCHECK(self->IsExceptionPending());
9917 return nullptr;
9918 }
9919 method_params->Set(index++, klass);
9920 it.Next();
9921 }
9922
Orion Hodsonecd58562018-09-24 11:27:33 +01009923 Handle<mirror::Class> return_type =
9924 hs.NewHandle(ResolveType(proto_id.return_type_idx_, referrer));
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009925 if (UNLIKELY(return_type.IsNull())) {
9926 DCHECK(self->IsExceptionPending());
9927 return nullptr;
9928 }
9929
9930 Handle<mirror::MethodType>
9931 method_type(hs.NewHandle(mirror::MethodType::Create(self, return_type, method_params)));
9932 if (UNLIKELY(method_type.IsNull())) {
9933 DCHECK(self->IsExceptionPending());
9934 return nullptr;
9935 }
9936
9937 if (UNLIKELY(handle_type == DexFile::MethodHandleType::kInvokeConstructor)) {
9938 Handle<mirror::Class> constructor_class = hs.NewHandle(target_method->GetDeclaringClass());
9939 Handle<mirror::MethodHandlesLookup> lookup =
9940 hs.NewHandle(mirror::MethodHandlesLookup::GetDefault(self));
9941 return lookup->FindConstructor(self, constructor_class, method_type);
9942 }
9943
9944 uintptr_t target = reinterpret_cast<uintptr_t>(target_method);
9945 return mirror::MethodHandleImpl::Create(self, target, kind, method_type);
9946}
9947
Vladimir Markoaf940202017-12-08 15:01:18 +00009948ObjPtr<mirror::MethodHandle> ClassLinker::ResolveMethodHandle(Thread* self,
9949 uint32_t method_handle_idx,
9950 ArtMethod* referrer)
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009951 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009952 const DexFile* const dex_file = referrer->GetDexFile();
Andreas Gampe3f1dcd32018-12-28 09:39:56 -08009953 const dex::MethodHandleItem& method_handle = dex_file->GetMethodHandle(method_handle_idx);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009954 switch (static_cast<DexFile::MethodHandleType>(method_handle.method_handle_type_)) {
9955 case DexFile::MethodHandleType::kStaticPut:
9956 case DexFile::MethodHandleType::kStaticGet:
9957 case DexFile::MethodHandleType::kInstancePut:
9958 case DexFile::MethodHandleType::kInstanceGet:
9959 return ResolveMethodHandleForField(self, method_handle, referrer);
9960 case DexFile::MethodHandleType::kInvokeStatic:
9961 case DexFile::MethodHandleType::kInvokeInstance:
9962 case DexFile::MethodHandleType::kInvokeConstructor:
9963 case DexFile::MethodHandleType::kInvokeDirect:
9964 case DexFile::MethodHandleType::kInvokeInterface:
Orion Hodsonda1cdd02018-01-31 18:08:28 +00009965 return ResolveMethodHandleForMethod(self, method_handle, referrer);
Orion Hodsonf8db2c32017-07-07 20:07:12 +01009966 }
Orion Hodsonc069a302017-01-18 09:23:12 +00009967}
9968
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009969bool ClassLinker::IsQuickResolutionStub(const void* entry_point) const {
9970 return (entry_point == GetQuickResolutionStub()) ||
9971 (quick_resolution_trampoline_ == entry_point);
9972}
9973
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009974bool ClassLinker::IsQuickToInterpreterBridge(const void* entry_point) const {
9975 return (entry_point == GetQuickToInterpreterBridge()) ||
9976 (quick_to_interpreter_bridge_trampoline_ == entry_point);
9977}
9978
9979bool ClassLinker::IsQuickGenericJniStub(const void* entry_point) const {
9980 return (entry_point == GetQuickGenericJniStub()) ||
9981 (quick_generic_jni_trampoline_ == entry_point);
9982}
9983
David Sehra49e0532017-08-25 08:05:29 -07009984bool ClassLinker::IsJniDlsymLookupStub(const void* entry_point) const {
Vladimir Marko7dac8642019-11-06 17:09:30 +00009985 return entry_point == GetJniDlsymLookupStub() ||
9986 (jni_dlsym_lookup_trampoline_ == entry_point);
David Sehra49e0532017-08-25 08:05:29 -07009987}
9988
Vladimir Markofa458ac2020-02-12 14:08:07 +00009989bool ClassLinker::IsJniDlsymLookupCriticalStub(const void* entry_point) const {
9990 return entry_point == GetJniDlsymLookupCriticalStub() ||
9991 (jni_dlsym_lookup_critical_trampoline_ == entry_point);
9992}
9993
Ian Rogers6f3dbba2014-10-14 17:41:57 -07009994const void* ClassLinker::GetRuntimeQuickGenericJniStub() const {
9995 return GetQuickGenericJniStub();
9996}
9997
Alex Lightdb01a092017-04-03 15:39:55 -07009998void ClassLinker::SetEntryPointsForObsoleteMethod(ArtMethod* method) const {
9999 DCHECK(method->IsObsolete());
10000 // We cannot mess with the entrypoints of native methods because they are used to determine how
10001 // large the method's quick stack frame is. Without this information we cannot walk the stacks.
10002 if (!method->IsNative()) {
10003 method->SetEntryPointFromQuickCompiledCode(GetInvokeObsoleteMethodStub());
10004 }
10005}
10006
Ian Rogers7dfb28c2013-08-22 08:18:36 -070010007void ClassLinker::DumpForSigQuit(std::ostream& os) {
Hans Boehmfb3ad722021-08-16 16:53:17 +000010008 ScopedObjectAccess soa(Thread::Current());
10009 ReaderMutexLock mu(soa.Self(), *Locks::classlinker_classes_lock_);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -070010010 os << "Zygote loaded classes=" << NumZygoteClasses() << " post zygote classes="
10011 << NumNonZygoteClasses() << "\n";
Hans Boehmfb3ad722021-08-16 16:53:17 +000010012 ReaderMutexLock mu2(soa.Self(), *Locks::dex_lock_);
Nicolas Geoffraya90c9222018-09-07 13:19:19 +010010013 os << "Dumping registered class loaders\n";
10014 size_t class_loader_index = 0;
10015 for (const ClassLoaderData& class_loader : class_loaders_) {
10016 ObjPtr<mirror::ClassLoader> loader =
Hans Boehmfb3ad722021-08-16 16:53:17 +000010017 ObjPtr<mirror::ClassLoader>::DownCast(soa.Self()->DecodeJObject(class_loader.weak_root));
Nicolas Geoffraya90c9222018-09-07 13:19:19 +010010018 if (loader != nullptr) {
10019 os << "#" << class_loader_index++ << " " << loader->GetClass()->PrettyDescriptor() << ": [";
10020 bool saw_one_dex_file = false;
David Srbecky6fbcc292021-02-23 01:05:32 +000010021 for (const auto& entry : dex_caches_) {
10022 const DexCacheData& dex_cache = entry.second;
10023 if (dex_cache.class_table == class_loader.class_table) {
Nicolas Geoffraya90c9222018-09-07 13:19:19 +010010024 if (saw_one_dex_file) {
10025 os << ":";
10026 }
10027 saw_one_dex_file = true;
David Srbecky6fbcc292021-02-23 01:05:32 +000010028 os << entry.first->GetLocation();
Nicolas Geoffraya90c9222018-09-07 13:19:19 +010010029 }
10030 }
10031 os << "]";
10032 bool found_parent = false;
10033 if (loader->GetParent() != nullptr) {
10034 size_t parent_index = 0;
10035 for (const ClassLoaderData& class_loader2 : class_loaders_) {
10036 ObjPtr<mirror::ClassLoader> loader2 = ObjPtr<mirror::ClassLoader>::DownCast(
Hans Boehmfb3ad722021-08-16 16:53:17 +000010037 soa.Self()->DecodeJObject(class_loader2.weak_root));
Nicolas Geoffraya90c9222018-09-07 13:19:19 +010010038 if (loader2 == loader->GetParent()) {
10039 os << ", parent #" << parent_index;
10040 found_parent = true;
10041 break;
10042 }
10043 parent_index++;
10044 }
10045 if (!found_parent) {
10046 os << ", unregistered parent of type "
10047 << loader->GetParent()->GetClass()->PrettyDescriptor();
10048 }
10049 } else {
10050 os << ", no parent";
10051 }
10052 os << "\n";
10053 }
10054 }
10055 os << "Done dumping class loaders\n";
Andreas Gampe9b7f8b52019-06-07 08:59:29 -070010056 Runtime* runtime = Runtime::Current();
10057 os << "Classes initialized: " << runtime->GetStat(KIND_GLOBAL_CLASS_INIT_COUNT) << " in "
10058 << PrettyDuration(runtime->GetStat(KIND_GLOBAL_CLASS_INIT_TIME)) << "\n";
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -070010059}
10060
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010061class CountClassesVisitor : public ClassLoaderVisitor {
10062 public:
10063 CountClassesVisitor() : num_zygote_classes(0), num_non_zygote_classes(0) {}
10064
Mathieu Chartier28357fa2016-10-18 16:27:40 -070010065 void Visit(ObjPtr<mirror::ClassLoader> class_loader)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010010066 REQUIRES_SHARED(Locks::classlinker_classes_lock_, Locks::mutator_lock_) override {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010067 ClassTable* const class_table = class_loader->GetClassTable();
Mathieu Chartier6b069532015-08-05 15:08:12 -070010068 if (class_table != nullptr) {
Vladimir Markoc5798bf2016-12-09 10:20:54 +000010069 num_zygote_classes += class_table->NumZygoteClasses(class_loader);
10070 num_non_zygote_classes += class_table->NumNonZygoteClasses(class_loader);
Mathieu Chartier6b069532015-08-05 15:08:12 -070010071 }
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -070010072 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010073
10074 size_t num_zygote_classes;
10075 size_t num_non_zygote_classes;
10076};
10077
10078size_t ClassLinker::NumZygoteClasses() const {
10079 CountClassesVisitor visitor;
10080 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -070010081 return visitor.num_zygote_classes + boot_class_table_->NumZygoteClasses(nullptr);
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -070010082}
10083
10084size_t ClassLinker::NumNonZygoteClasses() const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010085 CountClassesVisitor visitor;
10086 VisitClassLoaders(&visitor);
Andreas Gampe2af99022017-04-25 08:32:59 -070010087 return visitor.num_non_zygote_classes + boot_class_table_->NumNonZygoteClasses(nullptr);
Elliott Hughescac6cc72011-11-03 20:31:21 -070010088}
10089
Ian Rogers7dfb28c2013-08-22 08:18:36 -070010090size_t ClassLinker::NumLoadedClasses() {
Ian Rogers1bf8d4d2013-05-30 00:18:49 -070010091 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
Mathieu Chartierc2e20622014-11-03 11:41:47 -080010092 // Only return non zygote classes since these are the ones which apps which care about.
Mathieu Chartiercc5ebdf2015-07-27 11:19:43 -070010093 return NumNonZygoteClasses();
Elliott Hughese27955c2011-08-26 15:21:24 -070010094}
10095
Brian Carlstrom47d237a2011-10-18 15:08:33 -070010096pid_t ClassLinker::GetClassesLockOwner() {
Ian Rogersb726dcb2012-09-05 08:57:23 -070010097 return Locks::classlinker_classes_lock_->GetExclusiveOwnerTid();
Brian Carlstrom47d237a2011-10-18 15:08:33 -070010098}
10099
10100pid_t ClassLinker::GetDexLockOwner() {
Andreas Gampecc1b5352016-12-01 16:58:38 -080010101 return Locks::dex_lock_->GetExclusiveOwnerTid();
Brian Carlstrom24a3c2e2011-10-17 18:07:52 -070010102}
10103
Mathieu Chartier28357fa2016-10-18 16:27:40 -070010104void ClassLinker::SetClassRoot(ClassRoot class_root, ObjPtr<mirror::Class> klass) {
Ian Rogers6d4d9fc2011-11-30 16:24:48 -080010105 DCHECK(!init_done_);
10106
Andreas Gampe2ed8def2014-08-28 14:41:02 -070010107 DCHECK(klass != nullptr);
10108 DCHECK(klass->GetClassLoader() == nullptr);
Ian Rogers6d4d9fc2011-11-30 16:24:48 -080010109
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070010110 mirror::ObjectArray<mirror::Class>* class_roots = class_roots_.Read();
Andreas Gampe2ed8def2014-08-28 14:41:02 -070010111 DCHECK(class_roots != nullptr);
Vladimir Markob4eb1b12018-05-24 11:09:38 +010010112 DCHECK_LT(static_cast<uint32_t>(class_root), static_cast<uint32_t>(ClassRoot::kMax));
10113 int32_t index = static_cast<int32_t>(class_root);
10114 DCHECK(class_roots->Get(index) == nullptr);
10115 class_roots->Set<false>(index, klass);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070010116}
10117
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010118ObjPtr<mirror::ClassLoader> ClassLinker::CreateWellKnownClassLoader(
10119 Thread* self,
10120 const std::vector<const DexFile*>& dex_files,
10121 Handle<mirror::Class> loader_class,
Nicolas Geoffraye1672732018-11-30 01:09:49 +000010122 Handle<mirror::ClassLoader> parent_loader,
Brad Stenning9c924e82021-10-11 19:09:00 -070010123 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries,
10124 Handle<mirror::ObjectArray<mirror::ClassLoader>> shared_libraries_after) {
Vladimir Markob6f965d2022-11-23 14:24:45 +000010125 CHECK(loader_class.Get() == WellKnownClasses::dalvik_system_PathClassLoader ||
10126 loader_class.Get() == WellKnownClasses::dalvik_system_DelegateLastClassLoader ||
10127 loader_class.Get() == WellKnownClasses::dalvik_system_InMemoryDexClassLoader);
Calin Juravle7865ac72017-06-28 11:03:12 -070010128
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010129 StackHandleScope<5> hs(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010130
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010131 ArtField* dex_elements_field = WellKnownClasses::dalvik_system_DexPathList_dexElements;
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010132
Vladimir Marko4098a7a2017-11-06 16:00:51 +000010133 Handle<mirror::Class> dex_elements_class(hs.NewHandle(dex_elements_field->ResolveType()));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010134 DCHECK(dex_elements_class != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010135 DCHECK(dex_elements_class->IsArrayClass());
Mathieu Chartierdaaf3262015-03-24 13:30:28 -070010136 Handle<mirror::ObjectArray<mirror::Object>> h_dex_elements(hs.NewHandle(
Mathieu Chartier3398c782016-09-30 10:27:43 -070010137 mirror::ObjectArray<mirror::Object>::Alloc(self,
10138 dex_elements_class.Get(),
10139 dex_files.size())));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010140 Handle<mirror::Class> h_dex_element_class =
10141 hs.NewHandle(dex_elements_class->GetComponentType());
10142
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010143 ArtField* element_file_field = WellKnownClasses::dalvik_system_DexPathList__Element_dexFile;
Mathieu Chartierc7853442015-03-27 14:35:38 -070010144 DCHECK_EQ(h_dex_element_class.Get(), element_file_field->GetDeclaringClass());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010145
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010146 ArtField* cookie_field = WellKnownClasses::dalvik_system_DexFile_cookie;
Vladimir Marko208f6702017-12-08 12:00:50 +000010147 DCHECK_EQ(cookie_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010148
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010149 ArtField* file_name_field = WellKnownClasses::dalvik_system_DexFile_fileName;
Vladimir Marko208f6702017-12-08 12:00:50 +000010150 DCHECK_EQ(file_name_field->GetDeclaringClass(), element_file_field->LookupResolvedType());
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010151
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010152 // Fill the elements array.
10153 int32_t index = 0;
10154 for (const DexFile* dex_file : dex_files) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010155 StackHandleScope<4> hs2(self);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010156
Calin Juravle7865ac72017-06-28 11:03:12 -070010157 // CreateWellKnownClassLoader is only used by gtests and compiler.
10158 // Index 0 of h_long_array is supposed to be the oat file but we can leave it null.
Mathieu Chartiere58991b2015-10-13 07:59:34 -070010159 Handle<mirror::LongArray> h_long_array = hs2.NewHandle(mirror::LongArray::Alloc(
10160 self,
10161 kDexFileIndexStart + 1));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010162 DCHECK(h_long_array != nullptr);
Vladimir Marko78baed52018-10-11 10:44:58 +010010163 h_long_array->Set(kDexFileIndexStart, reinterpret_cast64<int64_t>(dex_file));
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010164
Mathieu Chartier3738e982017-05-12 16:07:28 -070010165 // Note that this creates a finalizable dalvik.system.DexFile object and a corresponding
10166 // FinalizerReference which will never get cleaned up without a started runtime.
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010167 Handle<mirror::Object> h_dex_file = hs2.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -070010168 cookie_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010169 DCHECK(h_dex_file != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -070010170 cookie_field->SetObject<false>(h_dex_file.Get(), h_long_array.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010171
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010172 Handle<mirror::String> h_file_name = hs2.NewHandle(
10173 mirror::String::AllocFromModifiedUtf8(self, dex_file->GetLocation().c_str()));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010174 DCHECK(h_file_name != nullptr);
Mathieu Chartierfbc31082016-01-24 11:59:56 -080010175 file_name_field->SetObject<false>(h_dex_file.Get(), h_file_name.Get());
10176
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010177 Handle<mirror::Object> h_element = hs2.NewHandle(h_dex_element_class->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010178 DCHECK(h_element != nullptr);
Mathieu Chartierc7853442015-03-27 14:35:38 -070010179 element_file_field->SetObject<false>(h_element.Get(), h_dex_file.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010180
10181 h_dex_elements->Set(index, h_element.Get());
10182 index++;
10183 }
10184 DCHECK_EQ(index, h_dex_elements->GetLength());
10185
10186 // Create DexPathList.
10187 Handle<mirror::Object> h_dex_path_list = hs.NewHandle(
Mathieu Chartierc7853442015-03-27 14:35:38 -070010188 dex_elements_field->GetDeclaringClass()->AllocObject(self));
Andreas Gampefa4333d2017-02-14 11:10:34 -080010189 DCHECK(h_dex_path_list != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010190 // Set elements.
Mathieu Chartierc7853442015-03-27 14:35:38 -070010191 dex_elements_field->SetObject<false>(h_dex_path_list.Get(), h_dex_elements.Get());
Andreas Gampe473191c2017-12-28 16:55:31 -080010192 // Create an empty List for the "nativeLibraryDirectories," required for native tests.
10193 // Note: this code is uncommon(oatdump)/testing-only, so don't add further WellKnownClasses
10194 // elements.
10195 {
10196 ArtField* native_lib_dirs = dex_elements_field->GetDeclaringClass()->
10197 FindDeclaredInstanceField("nativeLibraryDirectories", "Ljava/util/List;");
10198 DCHECK(native_lib_dirs != nullptr);
10199 ObjPtr<mirror::Class> list_class = FindSystemClass(self, "Ljava/util/ArrayList;");
10200 DCHECK(list_class != nullptr);
10201 {
10202 StackHandleScope<1> h_list_scope(self);
10203 Handle<mirror::Class> h_list_class(h_list_scope.NewHandle<mirror::Class>(list_class));
10204 bool list_init = EnsureInitialized(self, h_list_class, true, true);
10205 DCHECK(list_init);
10206 list_class = h_list_class.Get();
10207 }
10208 ObjPtr<mirror::Object> list_object = list_class->AllocObject(self);
10209 // Note: we leave the object uninitialized. This must never leak into any non-testing code, but
10210 // is fine for testing. While it violates a Java-code invariant (the elementData field is
10211 // normally never null), as long as one does not try to add elements, this will still
10212 // work.
10213 native_lib_dirs->SetObject<false>(h_dex_path_list.Get(), list_object);
10214 }
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010215
Calin Juravle7865ac72017-06-28 11:03:12 -070010216 // Create the class loader..
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010217 Handle<mirror::ClassLoader> h_class_loader = hs.NewHandle<mirror::ClassLoader>(
10218 ObjPtr<mirror::ClassLoader>::DownCast(loader_class->AllocObject(self)));
Calin Juravle7865ac72017-06-28 11:03:12 -070010219 DCHECK(h_class_loader != nullptr);
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010220 // Set DexPathList.
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010221 ArtField* path_list_field = WellKnownClasses::dalvik_system_BaseDexClassLoader_pathList;
Mathieu Chartierc7853442015-03-27 14:35:38 -070010222 DCHECK(path_list_field != nullptr);
Calin Juravle7865ac72017-06-28 11:03:12 -070010223 path_list_field->SetObject<false>(h_class_loader.Get(), h_dex_path_list.Get());
Andreas Gampe81c6f8d2015-03-25 17:19:53 -070010224
10225 // Make a pretend boot-classpath.
10226 // TODO: Should we scan the image?
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010227 ArtField* const parent_field = WellKnownClasses::java_lang_ClassLoader_parent;
Roland Levillainf39c9eb2015-05-26 15:02:07 +010010228 DCHECK(parent_field != nullptr);
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010229 if (parent_loader.Get() == nullptr) {
Vladimir Markob6f965d2022-11-23 14:24:45 +000010230 ObjPtr<mirror::Object> boot_loader(
10231 WellKnownClasses::java_lang_BootClassLoader->AllocObject(self));
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010232 parent_field->SetObject<false>(h_class_loader.Get(), boot_loader);
10233 } else {
10234 parent_field->SetObject<false>(h_class_loader.Get(), parent_loader.Get());
10235 }
Calin Juravle7865ac72017-06-28 11:03:12 -070010236
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010237 ArtField* shared_libraries_field =
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010238 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoaders;
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010239 DCHECK(shared_libraries_field != nullptr);
10240 shared_libraries_field->SetObject<false>(h_class_loader.Get(), shared_libraries.Get());
10241
Brad Stenning9c924e82021-10-11 19:09:00 -070010242 ArtField* shared_libraries_after_field =
Vladimír Marko5be5ce72022-10-18 09:49:00 +000010243 WellKnownClasses::dalvik_system_BaseDexClassLoader_sharedLibraryLoadersAfter;
Brad Stenning9c924e82021-10-11 19:09:00 -070010244 DCHECK(shared_libraries_after_field != nullptr);
10245 shared_libraries_after_field->SetObject<false>(h_class_loader.Get(),
10246 shared_libraries_after.Get());
Nicolas Geoffray6b9fd8c2018-11-16 10:25:42 +000010247 return h_class_loader.Get();
10248}
10249
Calin Juravle7865ac72017-06-28 11:03:12 -070010250jobject ClassLinker::CreatePathClassLoader(Thread* self,
10251 const std::vector<const DexFile*>& dex_files) {
Vladimir Markob6f965d2022-11-23 14:24:45 +000010252 StackHandleScope<3u> hs(self);
10253 Handle<mirror::Class> d_s_pcl =
10254 hs.NewHandle(WellKnownClasses::dalvik_system_PathClassLoader.Get());
10255 auto null_parent = hs.NewHandle<mirror::ClassLoader>(nullptr);
10256 auto null_libs = hs.NewHandle<mirror::ObjectArray<mirror::ClassLoader>>(nullptr);
10257 ObjPtr<mirror::ClassLoader> class_loader =
10258 CreateWellKnownClassLoader(self, dex_files, d_s_pcl, null_parent, null_libs, null_libs);
10259 return Runtime::Current()->GetJavaVM()->AddGlobalRef(self, class_loader);
Calin Juravle7865ac72017-06-28 11:03:12 -070010260}
10261
Andreas Gampe8ac75952015-06-02 21:01:45 -070010262void ClassLinker::DropFindArrayClassCache() {
10263 std::fill_n(find_array_class_cache_, kFindArrayCacheSize, GcRoot<mirror::Class>(nullptr));
10264 find_array_class_cache_next_victim_ = 0;
10265}
10266
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010267void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010268 Thread* const self = Thread::Current();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010269 for (const ClassLoaderData& data : class_loaders_) {
Mathieu Chartier4843bd52015-10-01 17:08:44 -070010270 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -070010271 ObjPtr<mirror::ClassLoader> class_loader = ObjPtr<mirror::ClassLoader>::DownCast(
10272 self->DecodeJObject(data.weak_root));
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010273 if (class_loader != nullptr) {
Vladimir Markod93e3742018-07-18 10:58:13 +010010274 visitor->Visit(class_loader);
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010275 }
10276 }
10277}
10278
Lokesh Gidrab7607c22022-01-28 12:30:31 -080010279void ClassLinker::VisitDexCaches(DexCacheVisitor* visitor) const {
10280 Thread* const self = Thread::Current();
10281 for (const auto& it : dex_caches_) {
10282 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
10283 ObjPtr<mirror::DexCache> dex_cache = ObjPtr<mirror::DexCache>::DownCast(
10284 self->DecodeJObject(it.second.weak_root));
10285 if (dex_cache != nullptr) {
10286 visitor->Visit(dex_cache);
10287 }
10288 }
10289}
10290
Alexey Grebenkin252a4e42018-04-02 18:18:01 +030010291void ClassLinker::VisitAllocators(AllocatorVisitor* visitor) const {
10292 for (const ClassLoaderData& data : class_loaders_) {
10293 LinearAlloc* alloc = data.allocator;
10294 if (alloc != nullptr && !visitor->Visit(alloc)) {
10295 break;
10296 }
10297 }
10298}
10299
Mathieu Chartierbc5a7952016-10-17 15:46:31 -070010300void ClassLinker::InsertDexFileInToClassLoader(ObjPtr<mirror::Object> dex_file,
10301 ObjPtr<mirror::ClassLoader> class_loader) {
Mathieu Chartier00310e02015-10-17 12:46:42 -070010302 DCHECK(dex_file != nullptr);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010303 Thread* const self = Thread::Current();
10304 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
Vladimir Markobcf17522018-06-01 13:14:32 +010010305 ClassTable* const table = ClassTableForClassLoader(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010306 DCHECK(table != nullptr);
Mathieu Chartierc9dbb1d2016-06-03 17:47:32 -070010307 if (table->InsertStrongRoot(dex_file) && class_loader != nullptr) {
Mathieu Chartier00310e02015-10-17 12:46:42 -070010308 // It was not already inserted, perform the write barrier to let the GC know the class loader's
10309 // class table was modified.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -070010310 WriteBarrier::ForEveryFieldWrite(class_loader);
Mathieu Chartier00310e02015-10-17 12:46:42 -070010311 }
10312}
10313
Mathieu Chartier951ec2c2015-09-22 08:50:05 -070010314void ClassLinker::CleanupClassLoaders() {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010315 Thread* const self = Thread::Current();
Vladimir Marko8e94a6f2022-12-13 16:46:22 +000010316 std::list<ClassLoaderData> to_delete;
Mathieu Chartier65975772016-08-05 10:46:36 -070010317 // Do the delete outside the lock to avoid lock violation in jit code cache.
10318 {
10319 WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
10320 for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
Vladimir Marko8e94a6f2022-12-13 16:46:22 +000010321 auto this_it = it;
10322 ++it;
10323 const ClassLoaderData& data = *this_it;
Mathieu Chartier65975772016-08-05 10:46:36 -070010324 // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
Mathieu Chartierc4f39252016-10-05 18:32:08 -070010325 ObjPtr<mirror::ClassLoader> class_loader =
10326 ObjPtr<mirror::ClassLoader>::DownCast(self->DecodeJObject(data.weak_root));
Vladimir Marko8e94a6f2022-12-13 16:46:22 +000010327 if (class_loader == nullptr) {
Mathieu Chartier65975772016-08-05 10:46:36 -070010328 VLOG(class_linker) << "Freeing class loader";
Vladimir Marko8e94a6f2022-12-13 16:46:22 +000010329 to_delete.splice(to_delete.end(), class_loaders_, this_it);
Mathieu Chartier65975772016-08-05 10:46:36 -070010330 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010331 }
10332 }
Vladimír Marko1deea772022-11-30 16:17:07 +000010333 std::set<const OatFile*> unregistered_oat_files;
Vladimir Marko68457df2022-10-26 07:18:57 +000010334 if (!to_delete.empty()) {
10335 JavaVMExt* vm = self->GetJniEnv()->GetVm();
10336 WriterMutexLock mu(self, *Locks::dex_lock_);
10337 for (auto it = dex_caches_.begin(), end = dex_caches_.end(); it != end; ) {
Vladimír Marko1deea772022-11-30 16:17:07 +000010338 const DexFile* dex_file = it->first;
Vladimir Marko68457df2022-10-26 07:18:57 +000010339 const DexCacheData& data = it->second;
10340 if (self->DecodeJObject(data.weak_root) == nullptr) {
10341 DCHECK(to_delete.end() != std::find_if(
10342 to_delete.begin(),
10343 to_delete.end(),
10344 [&](const ClassLoaderData& cld) { return cld.class_table == data.class_table; }));
Vladimír Marko1deea772022-11-30 16:17:07 +000010345 if (dex_file->GetOatDexFile() != nullptr &&
10346 dex_file->GetOatDexFile()->GetOatFile() != nullptr &&
10347 dex_file->GetOatDexFile()->GetOatFile()->IsExecutable()) {
10348 unregistered_oat_files.insert(dex_file->GetOatDexFile()->GetOatFile());
10349 }
Vladimir Marko68457df2022-10-26 07:18:57 +000010350 vm->DeleteWeakGlobalRef(self, data.weak_root);
10351 it = dex_caches_.erase(it);
10352 } else {
10353 ++it;
10354 }
10355 }
10356 }
Vladimír Marko1deea772022-11-30 16:17:07 +000010357 {
10358 ScopedDebugDisallowReadBarriers sddrb(self);
10359 for (ClassLoaderData& data : to_delete) {
10360 // CHA unloading analysis and SingleImplementaion cleanups are required.
10361 DeleteClassLoader(self, data, /*cleanup_cha=*/ true);
10362 }
10363 }
10364 if (!unregistered_oat_files.empty()) {
10365 for (const OatFile* oat_file : unregistered_oat_files) {
10366 // Notify the fault handler about removal of the executable code range if needed.
10367 DCHECK(oat_file->IsExecutable());
10368 size_t exec_offset = oat_file->GetOatHeader().GetExecutableOffset();
10369 DCHECK_LE(exec_offset, oat_file->Size());
10370 size_t exec_size = oat_file->Size() - exec_offset;
10371 if (exec_size != 0u) {
10372 Runtime::Current()->RemoveGeneratedCodeRange(oat_file->Begin() + exec_offset, exec_size);
10373 }
10374 }
Mathieu Chartier65975772016-08-05 10:46:36 -070010375 }
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070010376}
10377
Mathieu Chartier65975772016-08-05 10:46:36 -070010378class ClassLinker::FindVirtualMethodHolderVisitor : public ClassVisitor {
10379 public:
10380 FindVirtualMethodHolderVisitor(const ArtMethod* method, PointerSize pointer_size)
10381 : method_(method),
10382 pointer_size_(pointer_size) {}
10383
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010010384 bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) override {
Mathieu Chartier65975772016-08-05 10:46:36 -070010385 if (klass->GetVirtualMethodsSliceUnchecked(pointer_size_).Contains(method_)) {
10386 holder_ = klass;
10387 }
10388 // Return false to stop searching if holder_ is not null.
10389 return holder_ == nullptr;
10390 }
10391
Mathieu Chartier28357fa2016-10-18 16:27:40 -070010392 ObjPtr<mirror::Class> holder_ = nullptr;
Mathieu Chartier65975772016-08-05 10:46:36 -070010393 const ArtMethod* const method_;
10394 const PointerSize pointer_size_;
10395};
10396
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010397ObjPtr<mirror::Class> ClassLinker::GetHoldingClassOfCopiedMethod(ArtMethod* method) {
Mathieu Chartier65975772016-08-05 10:46:36 -070010398 ScopedTrace trace(__FUNCTION__); // Since this function is slow, have a trace to notify people.
10399 CHECK(method->IsCopied());
10400 FindVirtualMethodHolderVisitor visitor(method, image_pointer_size_);
10401 VisitClasses(&visitor);
Vladimir Markod80bbba2022-12-20 16:25:58 +000010402 DCHECK(visitor.holder_ != nullptr);
Vladimir Markoa8bba7d2018-05-30 15:18:48 +010010403 return visitor.holder_;
Mathieu Chartier65975772016-08-05 10:46:36 -070010404}
10405
Vladimir Markod80bbba2022-12-20 16:25:58 +000010406ObjPtr<mirror::ClassLoader> ClassLinker::GetHoldingClassLoaderOfCopiedMethod(Thread* self,
10407 ArtMethod* method) {
10408 // Note: `GetHoldingClassOfCopiedMethod(method)` is a lot more expensive than finding
10409 // the class loader, so we're using it only to verify the result in debug mode.
10410 CHECK(method->IsCopied());
10411 gc::Heap* heap = Runtime::Current()->GetHeap();
10412 // Check if the copied method is in the boot class path.
10413 if (heap->IsBootImageAddress(method) || GetAllocatorForClassLoader(nullptr)->Contains(method)) {
10414 DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == nullptr);
10415 return nullptr;
10416 }
10417 // Check if the copied method is in an app image.
10418 // Note: Continuous spaces contain boot image spaces and app image spaces.
10419 // However, they are sorted by address, so boot images are not trivial to skip.
10420 ArrayRef<gc::space::ContinuousSpace* const> spaces(heap->GetContinuousSpaces());
10421 DCHECK_GE(spaces.size(), heap->GetBootImageSpaces().size());
10422 for (gc::space::ContinuousSpace* space : spaces) {
10423 if (space->IsImageSpace()) {
10424 gc::space::ImageSpace* image_space = space->AsImageSpace();
10425 size_t offset = reinterpret_cast<const uint8_t*>(method) - image_space->Begin();
10426 const ImageSection& methods_section = image_space->GetImageHeader().GetMethodsSection();
10427 if (offset - methods_section.Offset() < methods_section.Size()) {
10428 // Grab the class loader from the first non-BCP class in the app image class table.
10429 // Note: If we allow classes from arbitrary parent or library class loaders in app
10430 // images, this shall need to be updated to actually search for the exact class.
10431 const ImageSection& class_table_section =
10432 image_space->GetImageHeader().GetClassTableSection();
10433 CHECK_NE(class_table_section.Size(), 0u);
10434 const uint8_t* ptr = image_space->Begin() + class_table_section.Offset();
10435 size_t read_count = 0;
10436 ClassTable::ClassSet class_set(ptr, /*make_copy_of_data=*/ false, &read_count);
10437 CHECK(!class_set.empty());
10438 auto it = class_set.begin();
10439 // No read barrier needed for references to non-movable image classes.
10440 while ((*it).Read<kWithoutReadBarrier>()->IsBootStrapClassLoaded()) {
10441 ++it;
10442 CHECK(it != class_set.end());
10443 }
10444 ObjPtr<mirror::ClassLoader> class_loader =
10445 (*it).Read<kWithoutReadBarrier>()->GetClassLoader();
10446 DCHECK(GetHoldingClassOfCopiedMethod(method)->GetClassLoader() == class_loader);
10447 return class_loader;
10448 }
10449 }
10450 }
10451 // Otherwise, the method must be in one of the `LinearAlloc` memory areas.
10452 jweak result = nullptr;
10453 {
10454 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
10455 for (const ClassLoaderData& data : class_loaders_) {
10456 if (data.allocator->Contains(method)) {
10457 result = data.weak_root;
10458 break;
10459 }
10460 }
10461 }
10462 CHECK(result != nullptr) << "Did not find allocator holding the copied method: " << method
10463 << " " << method->PrettyMethod();
10464 // The `method` is alive, so the class loader must also be alive.
10465 return ObjPtr<mirror::ClassLoader>::DownCast(
10466 Runtime::Current()->GetJavaVM()->DecodeWeakGlobalAsStrong(result));
10467}
10468
Calin Juravle33787682019-07-26 14:27:18 -070010469bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtMethod* art_method ATTRIBUTE_UNUSED) const
10470 REQUIRES_SHARED(Locks::mutator_lock_) {
10471 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10472 LOG(FATAL) << "UNREACHABLE";
10473 UNREACHABLE();
10474}
10475
10476bool ClassLinker::DenyAccessBasedOnPublicSdk(ArtField* art_field ATTRIBUTE_UNUSED) const
10477 REQUIRES_SHARED(Locks::mutator_lock_) {
10478 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10479 LOG(FATAL) << "UNREACHABLE";
10480 UNREACHABLE();
10481}
10482
10483bool ClassLinker::DenyAccessBasedOnPublicSdk(const char* type_descriptor ATTRIBUTE_UNUSED) const {
10484 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10485 LOG(FATAL) << "UNREACHABLE";
10486 UNREACHABLE();
10487}
10488
Calin Juravle2c2724c2021-01-14 19:54:23 -080010489void ClassLinker::SetEnablePublicSdkChecks(bool enabled ATTRIBUTE_UNUSED) {
10490 // Should not be called on ClassLinker, only on AotClassLinker that overrides this.
10491 LOG(FATAL) << "UNREACHABLE";
10492 UNREACHABLE();
10493}
10494
Guo Li1ee02902022-07-13 15:57:31 +080010495void ClassLinker::RemoveDexFromCaches(const DexFile& dex_file) {
10496 ReaderMutexLock mu(Thread::Current(), *Locks::dex_lock_);
10497
10498 auto it = dex_caches_.find(&dex_file);
10499 if (it != dex_caches_.end()) {
10500 dex_caches_.erase(it);
10501 }
10502}
10503
Roland Levillain0e840272018-08-23 19:55:30 +010010504// Instantiate ClassLinker::AllocClass.
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010505template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ true>(
Roland Levillain0e840272018-08-23 19:55:30 +010010506 Thread* self,
10507 ObjPtr<mirror::Class> java_lang_Class,
10508 uint32_t class_size);
Andreas Gampe98ea9d92018-10-19 14:06:15 -070010509template ObjPtr<mirror::Class> ClassLinker::AllocClass</* kMovable= */ false>(
Roland Levillain0e840272018-08-23 19:55:30 +010010510 Thread* self,
10511 ObjPtr<mirror::Class> java_lang_Class,
10512 uint32_t class_size);
10513
Carl Shapiro0e5d75d2011-07-06 18:28:37 -070010514} // namespace art