blob: 6d14f6de7934c4e54a3bda67753ccf428ee26e1c [file] [log] [blame]
Nicolas Geoffray00391822019-12-10 10:17:23 +00001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Mterp entry point and support functions.
19 */
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010020#include "nterp.h"
Nicolas Geoffray00391822019-12-10 10:17:23 +000021
22#include "base/quasi_atomic.h"
David Srbeckyfa663892021-10-18 23:08:53 +010023#include "class_linker-inl.h"
Nicolas Geoffray00391822019-12-10 10:17:23 +000024#include "dex/dex_instruction_utils.h"
25#include "debugger.h"
26#include "entrypoints/entrypoint_utils-inl.h"
David Srbecky17958de2022-02-20 15:50:38 +000027#include "interpreter/interpreter_cache-inl.h"
Nicolas Geoffray00391822019-12-10 10:17:23 +000028#include "interpreter/interpreter_common.h"
Nicolas Geoffray00391822019-12-10 10:17:23 +000029#include "interpreter/shadow_frame-inl.h"
30#include "mirror/string-alloc-inl.h"
31#include "nterp_helpers.h"
32
33namespace art {
34namespace interpreter {
35
36bool IsNterpSupported() {
Lokesh Gidrabae279c2022-09-06 09:35:35 -070037 return !kPoisonHeapReferences && kReserveMarkingRegister;
Nicolas Geoffray00391822019-12-10 10:17:23 +000038}
39
40bool CanRuntimeUseNterp() REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray60d4abc2020-07-27 13:58:51 +000041 Runtime* runtime = Runtime::Current();
42 instrumentation::Instrumentation* instr = runtime->GetInstrumentation();
Nicolas Geoffray60d4abc2020-07-27 13:58:51 +000043 // If the runtime is interpreter only, we currently don't use nterp as some
44 // parts of the runtime (like instrumentation) make assumption on an
45 // interpreter-only runtime to always be in a switch-like interpreter.
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +010046 return IsNterpSupported() &&
Mythri Alle57afbc22022-11-07 14:05:27 +000047 !runtime->IsJavaDebuggable() &&
48 !instr->AreExitStubsInstalled() &&
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +010049 !instr->InterpretOnly() &&
50 !runtime->IsAotCompiler() &&
Mythri Alle57afbc22022-11-07 14:05:27 +000051 !instr->NeedsSlowInterpreterForListeners() &&
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +010052 // An async exception has been thrown. We need to go to the switch interpreter. nterp doesn't
53 // know how to deal with these so we could end up never dealing with it if we are in an
54 // infinite loop.
55 !runtime->AreAsyncExceptionsThrown() &&
56 (runtime->GetJit() == nullptr || !runtime->GetJit()->JitAtFirstUse());
Nicolas Geoffray00391822019-12-10 10:17:23 +000057}
58
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +010059// The entrypoint for nterp, which ArtMethods can directly point to.
60extern "C" void ExecuteNterpImpl() REQUIRES_SHARED(Locks::mutator_lock_);
61
Nicolas Geoffray8af7f912022-07-06 15:29:16 +010062// Another entrypoint, which does a clinit check at entry.
63extern "C" void ExecuteNterpWithClinitImpl() REQUIRES_SHARED(Locks::mutator_lock_);
64
Nicolas Geoffray00391822019-12-10 10:17:23 +000065const void* GetNterpEntryPoint() {
66 return reinterpret_cast<const void*>(interpreter::ExecuteNterpImpl);
67}
68
Nicolas Geoffray8af7f912022-07-06 15:29:16 +010069const void* GetNterpWithClinitEntryPoint() {
70 return reinterpret_cast<const void*>(interpreter::ExecuteNterpWithClinitImpl);
71}
72
Nicolas Geoffray00391822019-12-10 10:17:23 +000073/*
74 * Verify some constants used by the nterp interpreter.
75 */
76void CheckNterpAsmConstants() {
77 /*
78 * If we're using computed goto instruction transitions, make sure
79 * none of the handlers overflows the byte limit. This won't tell
80 * which one did, but if any one is too big the total size will
81 * overflow.
82 */
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +010083 const int width = kNterpHandlerSize;
Nicolas Geoffray00391822019-12-10 10:17:23 +000084 ptrdiff_t interp_size = reinterpret_cast<uintptr_t>(artNterpAsmInstructionEnd) -
85 reinterpret_cast<uintptr_t>(artNterpAsmInstructionStart);
86 if ((interp_size == 0) || (interp_size != (art::kNumPackedOpcodes * width))) {
87 LOG(FATAL) << "ERROR: unexpected asm interp size " << interp_size
88 << "(did an instruction handler exceed " << width << " bytes?)";
89 }
90}
91
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010092inline void UpdateHotness(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
93 // The hotness we will add to a method when we perform a
94 // field/method/class/string lookup.
Nicolas Geoffray3caa05d2020-06-30 11:09:44 +010095 constexpr uint16_t kNterpHotnessLookup = 0xf;
Nicolas Geoffray61673dc2021-11-06 13:58:31 +000096 method->UpdateCounter(kNterpHotnessLookup);
Nicolas Geoffray0315efa2020-06-26 11:42:39 +010097}
98
Nicolas Geoffray00391822019-12-10 10:17:23 +000099template<typename T>
Nicolas Geoffraye22aa322022-08-05 12:32:31 +0000100inline void UpdateCache(Thread* self, const uint16_t* dex_pc_ptr, T value) {
David Srbecky09924362022-02-20 16:10:11 +0000101 self->GetInterpreterCache()->Set(self, dex_pc_ptr, value);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000102}
103
104template<typename T>
Nicolas Geoffraye22aa322022-08-05 12:32:31 +0000105inline void UpdateCache(Thread* self, const uint16_t* dex_pc_ptr, T* value) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000106 UpdateCache(self, dex_pc_ptr, reinterpret_cast<size_t>(value));
107}
108
Nicolas Geoffraydd406c32020-11-22 22:53:18 +0000109#ifdef __arm__
110
111extern "C" void NterpStoreArm32Fprs(const char* shorty,
112 uint32_t* registers,
113 uint32_t* stack_args,
114 const uint32_t* fprs) {
115 // Note `shorty` has already the returned type removed.
116 ScopedAssertNoThreadSuspension sants("In nterp");
117 uint32_t arg_index = 0;
118 uint32_t fpr_double_index = 0;
119 uint32_t fpr_index = 0;
120 for (uint32_t shorty_index = 0; shorty[shorty_index] != '\0'; ++shorty_index) {
121 char arg_type = shorty[shorty_index];
122 switch (arg_type) {
123 case 'D': {
124 // Double should not overlap with float.
125 fpr_double_index = std::max(fpr_double_index, RoundUp(fpr_index, 2));
126 if (fpr_double_index < 16) {
127 registers[arg_index] = fprs[fpr_double_index++];
128 registers[arg_index + 1] = fprs[fpr_double_index++];
129 } else {
130 registers[arg_index] = stack_args[arg_index];
131 registers[arg_index + 1] = stack_args[arg_index + 1];
132 }
133 arg_index += 2;
134 break;
135 }
136 case 'F': {
137 if (fpr_index % 2 == 0) {
138 fpr_index = std::max(fpr_double_index, fpr_index);
139 }
140 if (fpr_index < 16) {
141 registers[arg_index] = fprs[fpr_index++];
142 } else {
143 registers[arg_index] = stack_args[arg_index];
144 }
145 arg_index++;
146 break;
147 }
148 case 'J': {
149 arg_index += 2;
150 break;
151 }
152 default: {
153 arg_index++;
154 break;
155 }
156 }
157 }
158}
159
160extern "C" void NterpSetupArm32Fprs(const char* shorty,
161 uint32_t dex_register,
162 uint32_t stack_index,
163 uint32_t* fprs,
164 uint32_t* registers,
165 uint32_t* stack_args) {
166 // Note `shorty` has already the returned type removed.
167 ScopedAssertNoThreadSuspension sants("In nterp");
168 uint32_t fpr_double_index = 0;
169 uint32_t fpr_index = 0;
170 for (uint32_t shorty_index = 0; shorty[shorty_index] != '\0'; ++shorty_index) {
171 char arg_type = shorty[shorty_index];
172 switch (arg_type) {
173 case 'D': {
174 // Double should not overlap with float.
175 fpr_double_index = std::max(fpr_double_index, RoundUp(fpr_index, 2));
176 if (fpr_double_index < 16) {
177 fprs[fpr_double_index++] = registers[dex_register++];
178 fprs[fpr_double_index++] = registers[dex_register++];
179 stack_index += 2;
180 } else {
181 stack_args[stack_index++] = registers[dex_register++];
182 stack_args[stack_index++] = registers[dex_register++];
183 }
184 break;
185 }
186 case 'F': {
187 if (fpr_index % 2 == 0) {
188 fpr_index = std::max(fpr_double_index, fpr_index);
189 }
190 if (fpr_index < 16) {
191 fprs[fpr_index++] = registers[dex_register++];
192 stack_index++;
193 } else {
194 stack_args[stack_index++] = registers[dex_register++];
195 }
196 break;
197 }
198 case 'J': {
199 stack_index += 2;
200 dex_register += 2;
201 break;
202 }
203 default: {
204 stack_index++;
205 dex_register++;
206 break;
207 }
208 }
209 }
210}
211
212#endif
213
Nicolas Geoffray00391822019-12-10 10:17:23 +0000214extern "C" const dex::CodeItem* NterpGetCodeItem(ArtMethod* method)
215 REQUIRES_SHARED(Locks::mutator_lock_) {
216 ScopedAssertNoThreadSuspension sants("In nterp");
217 return method->GetCodeItem();
218}
219
220extern "C" const char* NterpGetShorty(ArtMethod* method)
221 REQUIRES_SHARED(Locks::mutator_lock_) {
222 ScopedAssertNoThreadSuspension sants("In nterp");
223 return method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty();
224}
225
226extern "C" const char* NterpGetShortyFromMethodId(ArtMethod* caller, uint32_t method_index)
227 REQUIRES_SHARED(Locks::mutator_lock_) {
228 ScopedAssertNoThreadSuspension sants("In nterp");
229 return caller->GetDexFile()->GetMethodShorty(method_index);
230}
231
232extern "C" const char* NterpGetShortyFromInvokePolymorphic(ArtMethod* caller, uint16_t* dex_pc_ptr)
233 REQUIRES_SHARED(Locks::mutator_lock_) {
234 ScopedAssertNoThreadSuspension sants("In nterp");
235 const Instruction* inst = Instruction::At(dex_pc_ptr);
236 dex::ProtoIndex proto_idx(inst->Opcode() == Instruction::INVOKE_POLYMORPHIC
237 ? inst->VRegH_45cc()
238 : inst->VRegH_4rcc());
239 return caller->GetDexFile()->GetShorty(proto_idx);
240}
241
242extern "C" const char* NterpGetShortyFromInvokeCustom(ArtMethod* caller, uint16_t* dex_pc_ptr)
243 REQUIRES_SHARED(Locks::mutator_lock_) {
244 ScopedAssertNoThreadSuspension sants("In nterp");
245 const Instruction* inst = Instruction::At(dex_pc_ptr);
246 uint16_t call_site_index = (inst->Opcode() == Instruction::INVOKE_CUSTOM
247 ? inst->VRegB_35c()
248 : inst->VRegB_3rc());
249 const DexFile* dex_file = caller->GetDexFile();
250 dex::ProtoIndex proto_idx = dex_file->GetProtoIndexForCallSite(call_site_index);
251 return dex_file->GetShorty(proto_idx);
252}
253
Vladimir Marko8f8a8a42022-10-31 17:23:29 +0100254static constexpr uint8_t kInvalidInvokeType = 255u;
255static_assert(static_cast<uint8_t>(kMaxInvokeType) < kInvalidInvokeType);
256
257static constexpr uint8_t GetOpcodeInvokeType(uint8_t opcode) {
258 switch (opcode) {
259 case Instruction::INVOKE_DIRECT:
260 case Instruction::INVOKE_DIRECT_RANGE:
261 return static_cast<uint8_t>(kDirect);
262 case Instruction::INVOKE_INTERFACE:
263 case Instruction::INVOKE_INTERFACE_RANGE:
264 return static_cast<uint8_t>(kInterface);
265 case Instruction::INVOKE_STATIC:
266 case Instruction::INVOKE_STATIC_RANGE:
267 return static_cast<uint8_t>(kStatic);
268 case Instruction::INVOKE_SUPER:
269 case Instruction::INVOKE_SUPER_RANGE:
270 return static_cast<uint8_t>(kSuper);
271 case Instruction::INVOKE_VIRTUAL:
272 case Instruction::INVOKE_VIRTUAL_RANGE:
273 return static_cast<uint8_t>(kVirtual);
274
275 default:
276 return kInvalidInvokeType;
277 }
278}
279
280static constexpr std::array<uint8_t, 256u> GenerateOpcodeInvokeTypes() {
281 std::array<uint8_t, 256u> opcode_invoke_types{};
282 for (size_t opcode = 0u; opcode != opcode_invoke_types.size(); ++opcode) {
283 opcode_invoke_types[opcode] = GetOpcodeInvokeType(opcode);
284 }
285 return opcode_invoke_types;
286}
287
288static constexpr std::array<uint8_t, 256u> kOpcodeInvokeTypes = GenerateOpcodeInvokeTypes();
289
David Srbeckyfa663892021-10-18 23:08:53 +0100290FLATTEN
Nicolas Geoffraye22aa322022-08-05 12:32:31 +0000291extern "C" size_t NterpGetMethod(Thread* self, ArtMethod* caller, const uint16_t* dex_pc_ptr)
Nicolas Geoffray00391822019-12-10 10:17:23 +0000292 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray0315efa2020-06-26 11:42:39 +0100293 UpdateHotness(caller);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000294 const Instruction* inst = Instruction::At(dex_pc_ptr);
Vladimir Marko8f8a8a42022-10-31 17:23:29 +0100295 Instruction::Code opcode = inst->Opcode();
296 DCHECK(IsUint<8>(static_cast<std::underlying_type_t<Instruction::Code>>(opcode)));
297 uint8_t raw_invoke_type = kOpcodeInvokeTypes[opcode];
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100298 DCHECK_LE(raw_invoke_type, kMaxInvokeType);
Vladimir Marko8f8a8a42022-10-31 17:23:29 +0100299 InvokeType invoke_type = static_cast<InvokeType>(raw_invoke_type);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000300
Vladimir Marko8f8a8a42022-10-31 17:23:29 +0100301 // In release mode, this is just a simple load.
302 // In debug mode, this checks that we're using the correct instruction format.
303 uint16_t method_index =
304 (opcode >= Instruction::INVOKE_VIRTUAL_RANGE) ? inst->VRegB_3rc() : inst->VRegB_35c();
Nicolas Geoffray00391822019-12-10 10:17:23 +0000305
306 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
307 ArtMethod* resolved_method = caller->SkipAccessChecks()
308 ? class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
309 self, method_index, caller, invoke_type)
310 : class_linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
311 self, method_index, caller, invoke_type);
312 if (resolved_method == nullptr) {
313 DCHECK(self->IsExceptionPending());
314 return 0;
315 }
316
Nicolas Geoffray00391822019-12-10 10:17:23 +0000317 if (invoke_type == kSuper) {
Nicolas Geoffraybdf7dc02022-01-19 10:34:57 +0000318 resolved_method = caller->SkipAccessChecks()
319 ? FindSuperMethodToCall</*access_check=*/false>(method_index, resolved_method, caller, self)
320 : FindSuperMethodToCall</*access_check=*/true>(method_index, resolved_method, caller, self);
321 if (resolved_method == nullptr) {
322 DCHECK(self->IsExceptionPending());
Nicolas Geoffray00391822019-12-10 10:17:23 +0000323 return 0;
324 }
Nicolas Geoffray00391822019-12-10 10:17:23 +0000325 }
326
327 if (invoke_type == kInterface) {
Nicolas Geoffrayd5a86952021-01-19 10:35:54 +0000328 size_t result = 0u;
Nicolas Geoffray29253112020-02-17 09:40:14 +0000329 if (resolved_method->GetDeclaringClass()->IsObjectClass()) {
Nicolas Geoffrayd5a86952021-01-19 10:35:54 +0000330 // Set the low bit to notify the interpreter it should do a vtable call.
Nicolas Geoffray29253112020-02-17 09:40:14 +0000331 DCHECK_LT(resolved_method->GetMethodIndex(), 0x10000);
Nicolas Geoffrayd5a86952021-01-19 10:35:54 +0000332 result = (resolved_method->GetMethodIndex() << 16) | 1U;
Nicolas Geoffray29253112020-02-17 09:40:14 +0000333 } else {
334 DCHECK(resolved_method->GetDeclaringClass()->IsInterface());
Nicolas Geoffrayd5a86952021-01-19 10:35:54 +0000335 DCHECK(!resolved_method->IsCopied());
336 if (!resolved_method->IsAbstract()) {
337 // Set the second bit to notify the interpreter this is a default
338 // method.
339 result = reinterpret_cast<size_t>(resolved_method) | 2U;
340 } else {
341 result = reinterpret_cast<size_t>(resolved_method);
342 }
Nicolas Geoffray29253112020-02-17 09:40:14 +0000343 }
Nicolas Geoffrayd5a86952021-01-19 10:35:54 +0000344 UpdateCache(self, dex_pc_ptr, result);
345 return result;
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100346 } else if (resolved_method->IsStringConstructor()) {
Nicolas Geoffraydd406c32020-11-22 22:53:18 +0000347 CHECK_NE(invoke_type, kSuper);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000348 resolved_method = WellKnownClasses::StringInitToStringFactory(resolved_method);
349 // Or the result with 1 to notify to nterp this is a string init method. We
350 // also don't cache the result as we don't want nterp to have its fast path always
351 // check for it, and we expect a lot more regular calls than string init
352 // calls.
353 return reinterpret_cast<size_t>(resolved_method) | 1;
354 } else if (invoke_type == kVirtual) {
355 UpdateCache(self, dex_pc_ptr, resolved_method->GetMethodIndex());
356 return resolved_method->GetMethodIndex();
357 } else {
358 UpdateCache(self, dex_pc_ptr, resolved_method);
359 return reinterpret_cast<size_t>(resolved_method);
360 }
361}
362
Hans Boehm3d2f1482022-01-17 01:32:55 +0000363extern "C" size_t NterpGetStaticField(Thread* self,
364 ArtMethod* caller,
Nicolas Geoffray39083d52022-10-24 08:41:06 +0000365 const uint16_t* dex_pc_ptr,
Hans Boehm3d2f1482022-01-17 01:32:55 +0000366 size_t resolve_field_type) // Resolve if not zero
Nicolas Geoffray00391822019-12-10 10:17:23 +0000367 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray0315efa2020-06-26 11:42:39 +0100368 UpdateHotness(caller);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000369 const Instruction* inst = Instruction::At(dex_pc_ptr);
370 uint16_t field_index = inst->VRegB_21c();
371 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100372 Instruction::Code opcode = inst->Opcode();
Nicolas Geoffray00391822019-12-10 10:17:23 +0000373 ArtField* resolved_field = ResolveFieldWithAccessChecks(
374 self,
375 class_linker,
376 field_index,
377 caller,
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100378 /*is_static=*/ true,
379 /*is_put=*/ IsInstructionSPut(opcode),
Nicolas Geoffray808c5562021-01-18 14:19:21 +0000380 resolve_field_type);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000381
382 if (resolved_field == nullptr) {
383 DCHECK(self->IsExceptionPending());
384 return 0;
385 }
386 if (UNLIKELY(!resolved_field->GetDeclaringClass()->IsVisiblyInitialized())) {
387 StackHandleScope<1> hs(self);
388 Handle<mirror::Class> h_class(hs.NewHandle(resolved_field->GetDeclaringClass()));
389 if (UNLIKELY(!class_linker->EnsureInitialized(
390 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
391 DCHECK(self->IsExceptionPending());
392 return 0;
393 }
394 DCHECK(h_class->IsInitializing());
395 }
396 if (resolved_field->IsVolatile()) {
397 // Or the result with 1 to notify to nterp this is a volatile field. We
398 // also don't cache the result as we don't want nterp to have its fast path always
399 // check for it.
400 return reinterpret_cast<size_t>(resolved_field) | 1;
401 } else {
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100402 // For sput-object, try to resolve the field type even if we were not requested to.
403 // Only if the field type is successfully resolved can we update the cache. If we
Nicolas Geoffray254f9a92022-07-25 09:41:38 +0100404 // fail to resolve the type, we clear the exception to keep interpreter
405 // semantics of not throwing when null is stored.
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100406 if (opcode == Instruction::SPUT_OBJECT &&
407 resolve_field_type == 0 &&
408 resolved_field->ResolveType() == nullptr) {
Nicolas Geoffray254f9a92022-07-25 09:41:38 +0100409 DCHECK(self->IsExceptionPending());
410 self->ClearException();
411 } else {
412 UpdateCache(self, dex_pc_ptr, resolved_field);
413 }
Nicolas Geoffray00391822019-12-10 10:17:23 +0000414 return reinterpret_cast<size_t>(resolved_field);
415 }
416}
417
Hans Boehm3d2f1482022-01-17 01:32:55 +0000418extern "C" uint32_t NterpGetInstanceFieldOffset(Thread* self,
419 ArtMethod* caller,
Nicolas Geoffray39083d52022-10-24 08:41:06 +0000420 const uint16_t* dex_pc_ptr,
Hans Boehm3d2f1482022-01-17 01:32:55 +0000421 size_t resolve_field_type) // Resolve if not zero
Nicolas Geoffray00391822019-12-10 10:17:23 +0000422 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray0315efa2020-06-26 11:42:39 +0100423 UpdateHotness(caller);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000424 const Instruction* inst = Instruction::At(dex_pc_ptr);
425 uint16_t field_index = inst->VRegC_22c();
426 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100427 Instruction::Code opcode = inst->Opcode();
Nicolas Geoffray00391822019-12-10 10:17:23 +0000428 ArtField* resolved_field = ResolveFieldWithAccessChecks(
429 self,
430 class_linker,
431 field_index,
432 caller,
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100433 /*is_static=*/ false,
434 /*is_put=*/ IsInstructionIPut(opcode),
Nicolas Geoffray808c5562021-01-18 14:19:21 +0000435 resolve_field_type);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000436 if (resolved_field == nullptr) {
437 DCHECK(self->IsExceptionPending());
438 return 0;
439 }
440 if (resolved_field->IsVolatile()) {
441 // Don't cache for a volatile field, and return a negative offset as marker
442 // of volatile.
443 return -resolved_field->GetOffset().Uint32Value();
444 }
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100445 // For iput-object, try to resolve the field type even if we were not requested to.
446 // Only if the field type is successfully resolved can we update the cache. If we
Nicolas Geoffray254f9a92022-07-25 09:41:38 +0100447 // fail to resolve the type, we clear the exception to keep interpreter
448 // semantics of not throwing when null is stored.
Vladimir Marko7267e1e2022-11-07 12:37:32 +0100449 if (opcode == Instruction::IPUT_OBJECT &&
450 resolve_field_type == 0 &&
451 resolved_field->ResolveType() == nullptr) {
Nicolas Geoffray254f9a92022-07-25 09:41:38 +0100452 DCHECK(self->IsExceptionPending());
453 self->ClearException();
454 } else {
455 UpdateCache(self, dex_pc_ptr, resolved_field->GetOffset().Uint32Value());
456 }
Nicolas Geoffray00391822019-12-10 10:17:23 +0000457 return resolved_field->GetOffset().Uint32Value();
458}
459
Vladimir Marko30781f12022-11-02 11:28:11 +0100460extern "C" mirror::Object* NterpGetClass(Thread* self, ArtMethod* caller, uint16_t* dex_pc_ptr)
Nicolas Geoffray00391822019-12-10 10:17:23 +0000461 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray0315efa2020-06-26 11:42:39 +0100462 UpdateHotness(caller);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000463 const Instruction* inst = Instruction::At(dex_pc_ptr);
Vladimir Marko30781f12022-11-02 11:28:11 +0100464 Instruction::Code opcode = inst->Opcode();
465 DCHECK(opcode == Instruction::CHECK_CAST ||
466 opcode == Instruction::INSTANCE_OF ||
467 opcode == Instruction::CONST_CLASS ||
468 opcode == Instruction::NEW_ARRAY);
469
470 // In release mode, this is just a simple load.
471 // In debug mode, this checks that we're using the correct instruction format.
472 dex::TypeIndex index = dex::TypeIndex(
473 (opcode == Instruction::CHECK_CAST || opcode == Instruction::CONST_CLASS)
474 ? inst->VRegB_21c()
475 : inst->VRegC_22c());
476
Nicolas Geoffray00391822019-12-10 10:17:23 +0000477 ObjPtr<mirror::Class> c =
478 ResolveVerifyAndClinit(index,
479 caller,
480 self,
481 /* can_run_clinit= */ false,
482 /* verify_access= */ !caller->SkipAccessChecks());
Vladimir Marko30781f12022-11-02 11:28:11 +0100483 if (UNLIKELY(c == nullptr)) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000484 DCHECK(self->IsExceptionPending());
485 return nullptr;
486 }
487
Vladimir Marko30781f12022-11-02 11:28:11 +0100488 UpdateCache(self, dex_pc_ptr, c.Ptr());
Nicolas Geoffray00391822019-12-10 10:17:23 +0000489 return c.Ptr();
490}
491
Vladimir Marko30781f12022-11-02 11:28:11 +0100492extern "C" mirror::Object* NterpAllocateObject(Thread* self,
493 ArtMethod* caller,
494 uint16_t* dex_pc_ptr)
495 REQUIRES_SHARED(Locks::mutator_lock_) {
496 UpdateHotness(caller);
497 const Instruction* inst = Instruction::At(dex_pc_ptr);
498 DCHECK_EQ(inst->Opcode(), Instruction::NEW_INSTANCE);
499 dex::TypeIndex index = dex::TypeIndex(inst->VRegB_21c());
500 ObjPtr<mirror::Class> c =
501 ResolveVerifyAndClinit(index,
502 caller,
503 self,
504 /* can_run_clinit= */ false,
505 /* verify_access= */ !caller->SkipAccessChecks());
506 if (UNLIKELY(c == nullptr)) {
507 DCHECK(self->IsExceptionPending());
508 return nullptr;
509 }
510
511 gc::AllocatorType allocator_type = Runtime::Current()->GetHeap()->GetCurrentAllocator();
512 if (UNLIKELY(c->IsStringClass())) {
513 // We don't cache the class for strings as we need to special case their
514 // allocation.
515 return mirror::String::AllocEmptyString(self, allocator_type).Ptr();
516 } else {
517 if (!c->IsFinalizable() && c->IsInstantiable()) {
518 // Cache non-finalizable classes for next calls.
519 UpdateCache(self, dex_pc_ptr, c.Ptr());
520 }
521 return AllocObjectFromCode(c, self, allocator_type).Ptr();
522 }
523}
524
Nicolas Geoffray00391822019-12-10 10:17:23 +0000525extern "C" mirror::Object* NterpLoadObject(Thread* self, ArtMethod* caller, uint16_t* dex_pc_ptr)
526 REQUIRES_SHARED(Locks::mutator_lock_) {
527 const Instruction* inst = Instruction::At(dex_pc_ptr);
528 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
529 switch (inst->Opcode()) {
530 case Instruction::CONST_STRING:
531 case Instruction::CONST_STRING_JUMBO: {
Nicolas Geoffray0315efa2020-06-26 11:42:39 +0100532 UpdateHotness(caller);
Nicolas Geoffray00391822019-12-10 10:17:23 +0000533 dex::StringIndex string_index(
534 (inst->Opcode() == Instruction::CONST_STRING)
535 ? inst->VRegB_21c()
536 : inst->VRegB_31c());
537 ObjPtr<mirror::String> str = class_linker->ResolveString(string_index, caller);
538 if (str == nullptr) {
539 DCHECK(self->IsExceptionPending());
540 return nullptr;
541 }
542 UpdateCache(self, dex_pc_ptr, str.Ptr());
543 return str.Ptr();
544 }
545 case Instruction::CONST_METHOD_HANDLE: {
546 // Don't cache: we don't expect this to be performance sensitive, and we
547 // don't want the cache to conflict with a performance sensitive entry.
548 return class_linker->ResolveMethodHandle(self, inst->VRegB_21c(), caller).Ptr();
549 }
550 case Instruction::CONST_METHOD_TYPE: {
551 // Don't cache: we don't expect this to be performance sensitive, and we
552 // don't want the cache to conflict with a performance sensitive entry.
553 return class_linker->ResolveMethodType(
554 self, dex::ProtoIndex(inst->VRegB_21c()), caller).Ptr();
555 }
556 default:
557 LOG(FATAL) << "Unreachable";
558 }
559 return nullptr;
560}
561
562extern "C" void NterpUnimplemented() {
563 LOG(FATAL) << "Unimplemented";
564}
565
566static mirror::Object* DoFilledNewArray(Thread* self,
567 ArtMethod* caller,
568 uint16_t* dex_pc_ptr,
Nicolas Geoffray3bae0472020-04-30 15:47:00 +0100569 uint32_t* regs,
Nicolas Geoffray00391822019-12-10 10:17:23 +0000570 bool is_range)
571 REQUIRES_SHARED(Locks::mutator_lock_) {
572 const Instruction* inst = Instruction::At(dex_pc_ptr);
573 if (kIsDebugBuild) {
574 if (is_range) {
575 DCHECK_EQ(inst->Opcode(), Instruction::FILLED_NEW_ARRAY_RANGE);
576 } else {
577 DCHECK_EQ(inst->Opcode(), Instruction::FILLED_NEW_ARRAY);
578 }
579 }
580 const int32_t length = is_range ? inst->VRegA_3rc() : inst->VRegA_35c();
581 DCHECK_GE(length, 0);
582 if (!is_range) {
583 // Checks FILLED_NEW_ARRAY's length does not exceed 5 arguments.
584 DCHECK_LE(length, 5);
585 }
586 uint16_t type_idx = is_range ? inst->VRegB_3rc() : inst->VRegB_35c();
Nicolas Geoffray60d4abc2020-07-27 13:58:51 +0000587 ObjPtr<mirror::Class> array_class =
588 ResolveVerifyAndClinit(dex::TypeIndex(type_idx),
589 caller,
590 self,
591 /* can_run_clinit= */ true,
592 /* verify_access= */ !caller->SkipAccessChecks());
Nicolas Geoffray00391822019-12-10 10:17:23 +0000593 if (UNLIKELY(array_class == nullptr)) {
594 DCHECK(self->IsExceptionPending());
595 return nullptr;
596 }
597 DCHECK(array_class->IsArrayClass());
598 ObjPtr<mirror::Class> component_class = array_class->GetComponentType();
599 const bool is_primitive_int_component = component_class->IsPrimitiveInt();
600 if (UNLIKELY(component_class->IsPrimitive() && !is_primitive_int_component)) {
601 if (component_class->IsPrimitiveLong() || component_class->IsPrimitiveDouble()) {
602 ThrowRuntimeException("Bad filled array request for type %s",
603 component_class->PrettyDescriptor().c_str());
604 } else {
605 self->ThrowNewExceptionF(
606 "Ljava/lang/InternalError;",
607 "Found type %s; filled-new-array not implemented for anything but 'int'",
608 component_class->PrettyDescriptor().c_str());
609 }
610 return nullptr;
611 }
612 ObjPtr<mirror::Object> new_array = mirror::Array::Alloc(
613 self,
614 array_class,
615 length,
616 array_class->GetComponentSizeShift(),
617 Runtime::Current()->GetHeap()->GetCurrentAllocator());
618 if (UNLIKELY(new_array == nullptr)) {
619 self->AssertPendingOOMException();
620 return nullptr;
621 }
622 uint32_t arg[Instruction::kMaxVarArgRegs]; // only used in filled-new-array.
623 uint32_t vregC = 0; // only used in filled-new-array-range.
624 if (is_range) {
625 vregC = inst->VRegC_3rc();
626 } else {
627 inst->GetVarArgs(arg);
628 }
629 for (int32_t i = 0; i < length; ++i) {
630 size_t src_reg = is_range ? vregC + i : arg[i];
631 if (is_primitive_int_component) {
632 new_array->AsIntArray()->SetWithoutChecks</* kTransactionActive= */ false>(i, regs[src_reg]);
633 } else {
634 new_array->AsObjectArray<mirror::Object>()->SetWithoutChecks</* kTransactionActive= */ false>(
635 i, reinterpret_cast<mirror::Object*>(regs[src_reg]));
636 }
637 }
638 return new_array.Ptr();
639}
640
641extern "C" mirror::Object* NterpFilledNewArray(Thread* self,
642 ArtMethod* caller,
Nicolas Geoffray3bae0472020-04-30 15:47:00 +0100643 uint32_t* registers,
Nicolas Geoffray00391822019-12-10 10:17:23 +0000644 uint16_t* dex_pc_ptr)
645 REQUIRES_SHARED(Locks::mutator_lock_) {
646 return DoFilledNewArray(self, caller, dex_pc_ptr, registers, /* is_range= */ false);
647}
648
649extern "C" mirror::Object* NterpFilledNewArrayRange(Thread* self,
650 ArtMethod* caller,
Nicolas Geoffray3bae0472020-04-30 15:47:00 +0100651 uint32_t* registers,
Nicolas Geoffray00391822019-12-10 10:17:23 +0000652 uint16_t* dex_pc_ptr)
653 REQUIRES_SHARED(Locks::mutator_lock_) {
654 return DoFilledNewArray(self, caller, dex_pc_ptr, registers, /* is_range= */ true);
655}
656
657extern "C" jit::OsrData* NterpHotMethod(ArtMethod* method, uint16_t* dex_pc_ptr, uint32_t* vregs)
658 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray5f778892021-11-26 14:51:50 +0000659 // It is important this method is not suspended because it can be called on
660 // method entry and async deoptimization does not expect runtime methods other than the
661 // suspend entrypoint before executing the first instruction of a Java
662 // method.
Nicolas Geoffray00391822019-12-10 10:17:23 +0000663 ScopedAssertNoThreadSuspension sants("In nterp");
Nicolas Geoffray58f916c2021-11-15 14:02:07 +0000664 Runtime* runtime = Runtime::Current();
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +0000665 if (method->IsMemorySharedMethod()) {
666 DCHECK_EQ(Thread::Current()->GetSharedMethodHotness(), 0u);
667 Thread::Current()->ResetSharedMethodHotness();
668 } else {
669 method->ResetCounter(runtime->GetJITOptions()->GetWarmupThreshold());
670 }
Nicolas Geoffray58f916c2021-11-15 14:02:07 +0000671 jit::Jit* jit = runtime->GetJit();
Nicolas Geoffray3bae0472020-04-30 15:47:00 +0100672 if (jit != nullptr && jit->UseJitCompilation()) {
Nicolas Geoffray00391822019-12-10 10:17:23 +0000673 // Nterp passes null on entry where we don't want to OSR.
674 if (dex_pc_ptr != nullptr) {
675 // This could be a loop back edge, check if we can OSR.
676 CodeItemInstructionAccessor accessor(method->DexInstructions());
677 uint32_t dex_pc = dex_pc_ptr - accessor.Insns();
678 jit::OsrData* osr_data = jit->PrepareForOsr(
679 method->GetInterfaceMethodIfProxy(kRuntimePointerSize), dex_pc, vregs);
680 if (osr_data != nullptr) {
681 return osr_data;
682 }
683 }
Nicolas Geoffrayf9ae8e32022-02-15 22:54:11 +0000684 jit->MaybeEnqueueCompilation(method, Thread::Current());
Nicolas Geoffray00391822019-12-10 10:17:23 +0000685 }
686 return nullptr;
687}
688
Nicolas Geoffray00391822019-12-10 10:17:23 +0000689extern "C" ssize_t NterpDoPackedSwitch(const uint16_t* switchData, int32_t testVal)
690 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray3bae0472020-04-30 15:47:00 +0100691 ScopedAssertNoThreadSuspension sants("In nterp");
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +0100692 const int kInstrLen = 3;
693
694 /*
695 * Packed switch data format:
696 * ushort ident = 0x0100 magic value
697 * ushort size number of entries in the table
698 * int first_key first (and lowest) switch case value
699 * int targets[size] branch targets, relative to switch opcode
700 *
701 * Total size is (4+size*2) 16-bit code units.
702 */
703 uint16_t signature = *switchData++;
704 DCHECK_EQ(signature, static_cast<uint16_t>(art::Instruction::kPackedSwitchSignature));
705
706 uint16_t size = *switchData++;
707
708 int32_t firstKey = *switchData++;
709 firstKey |= (*switchData++) << 16;
710
711 int index = testVal - firstKey;
712 if (index < 0 || index >= size) {
713 return kInstrLen;
714 }
715
716 /*
717 * The entries are guaranteed to be aligned on a 32-bit boundary;
718 * we can treat them as a native int array.
719 */
720 const int32_t* entries = reinterpret_cast<const int32_t*>(switchData);
721 return entries[index];
Nicolas Geoffray00391822019-12-10 10:17:23 +0000722}
723
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +0100724/*
725 * Find the matching case. Returns the offset to the handler instructions.
726 *
727 * Returns 3 if we don't find a match (it's the size of the sparse-switch
728 * instruction).
729 */
Nicolas Geoffray00391822019-12-10 10:17:23 +0000730extern "C" ssize_t NterpDoSparseSwitch(const uint16_t* switchData, int32_t testVal)
731 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray3bae0472020-04-30 15:47:00 +0100732 ScopedAssertNoThreadSuspension sants("In nterp");
Nicolas Geoffraye4f983c2021-07-12 15:53:27 +0100733 const int kInstrLen = 3;
734 uint16_t size;
735 const int32_t* keys;
736 const int32_t* entries;
737
738 /*
739 * Sparse switch data format:
740 * ushort ident = 0x0200 magic value
741 * ushort size number of entries in the table; > 0
742 * int keys[size] keys, sorted low-to-high; 32-bit aligned
743 * int targets[size] branch targets, relative to switch opcode
744 *
745 * Total size is (2+size*4) 16-bit code units.
746 */
747
748 uint16_t signature = *switchData++;
749 DCHECK_EQ(signature, static_cast<uint16_t>(art::Instruction::kSparseSwitchSignature));
750
751 size = *switchData++;
752
753 /* The keys are guaranteed to be aligned on a 32-bit boundary;
754 * we can treat them as a native int array.
755 */
756 keys = reinterpret_cast<const int32_t*>(switchData);
757
758 /* The entries are guaranteed to be aligned on a 32-bit boundary;
759 * we can treat them as a native int array.
760 */
761 entries = keys + size;
762
763 /*
764 * Binary-search through the array of keys, which are guaranteed to
765 * be sorted low-to-high.
766 */
767 int lo = 0;
768 int hi = size - 1;
769 while (lo <= hi) {
770 int mid = (lo + hi) >> 1;
771
772 int32_t foundVal = keys[mid];
773 if (testVal < foundVal) {
774 hi = mid - 1;
775 } else if (testVal > foundVal) {
776 lo = mid + 1;
777 } else {
778 return entries[mid];
779 }
780 }
781 return kInstrLen;
Nicolas Geoffray00391822019-12-10 10:17:23 +0000782}
783
Nicolas Geoffrayf5958f82021-06-02 14:48:14 +0100784extern "C" void NterpFree(void* val) {
785 free(val);
786}
787
Nicolas Geoffray00391822019-12-10 10:17:23 +0000788} // namespace interpreter
789} // namespace art