summaryrefslogtreecommitdiff
path: root/runtime/interpreter/interpreter_common.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/interpreter/interpreter_common.cc')
-rw-r--r--runtime/interpreter/interpreter_common.cc41
1 files changed, 33 insertions, 8 deletions
diff --git a/runtime/interpreter/interpreter_common.cc b/runtime/interpreter/interpreter_common.cc
index ef0ddb30db..084cb4218f 100644
--- a/runtime/interpreter/interpreter_common.cc
+++ b/runtime/interpreter/interpreter_common.cc
@@ -458,8 +458,8 @@ ALWAYS_INLINE void CopyRegisters(ShadowFrame& caller_frame,
void ArtInterpreterToCompiledCodeBridge(Thread* self,
ArtMethod* caller,
- const DexFile::CodeItem* code_item,
ShadowFrame* shadow_frame,
+ uint16_t arg_offset,
JValue* result)
REQUIRES_SHARED(Locks::mutator_lock_) {
ArtMethod* method = shadow_frame->GetMethod();
@@ -482,9 +482,17 @@ void ArtInterpreterToCompiledCodeBridge(Thread* self,
method = shadow_frame->GetMethod();
}
}
- uint16_t arg_offset = (code_item == nullptr)
- ? 0
- : code_item->registers_size_ - code_item->ins_size_;
+ // Basic checks for the arg_offset. If there's no code item, the arg_offset must be 0. Otherwise,
+ // check that the arg_offset isn't greater than the number of registers. A stronger check is
+ // difficult since the frame may contain space for all the registers in the method, or only enough
+ // space for the arguments.
+ if (kIsDebugBuild) {
+ if (method->GetCodeItem() == nullptr) {
+ DCHECK_EQ(0u, arg_offset) << method->PrettyMethod();
+ } else {
+ DCHECK_LE(arg_offset, shadow_frame->NumberOfVRegs());
+ }
+ }
jit::Jit* jit = Runtime::Current()->GetJit();
if (jit != nullptr && caller != nullptr) {
jit->NotifyInterpreterToCompiledCodeTransition(self, caller);
@@ -918,12 +926,23 @@ static inline bool DoCallCommon(ArtMethod* called_method,
// Compute method information.
const DexFile::CodeItem* code_item = called_method->GetCodeItem();
-
// Number of registers for the callee's call frame.
uint16_t num_regs;
+ // Test whether to use the interpreter or compiler entrypoint, and save that result to pass to
+ // PerformCall. A deoptimization could occur at any time, and we shouldn't change which
+ // entrypoint to use once we start building the shadow frame.
+ bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
+ called_method, called_method->GetEntryPointFromQuickCompiledCode());
if (LIKELY(code_item != nullptr)) {
- num_regs = code_item->registers_size_;
- DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, code_item->ins_size_);
+ // When transitioning to compiled code, space only needs to be reserved for the input registers.
+ // The rest of the frame gets discarded. This also prevents accessing the called method's code
+ // item, saving memory by keeping code items of compiled code untouched.
+ if (Runtime::Current()->IsStarted() && !use_interpreter_entrypoint) {
+ num_regs = number_of_inputs;
+ } else {
+ num_regs = code_item->registers_size_;
+ DCHECK_EQ(string_init ? number_of_inputs - 1 : number_of_inputs, code_item->ins_size_);
+ }
} else {
DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
num_regs = number_of_inputs;
@@ -1077,7 +1096,13 @@ static inline bool DoCallCommon(ArtMethod* called_method,
self->EndAssertNoThreadSuspension(old_cause);
}
- PerformCall(self, code_item, shadow_frame.GetMethod(), first_dest_reg, new_shadow_frame, result);
+ PerformCall(self,
+ code_item,
+ shadow_frame.GetMethod(),
+ first_dest_reg,
+ new_shadow_frame,
+ result,
+ use_interpreter_entrypoint);
if (string_init && !self->IsExceptionPending()) {
SetStringInitValueToAllAliases(&shadow_frame, string_init_vreg_this, *result);