summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2023-11-06 16:11:27 +0000
committer Nicolas Geoffray <ngeoffray@google.com> 2023-11-06 16:58:43 +0000
commit4384e944119846a2bc1d699a8c366baf9ac24c8e (patch)
treedf4ddb19cc2ff76baff980f6af4b71ca74b437c2
parenta12874590c1eb01c04d8d8ef28ad61f680406d1b (diff)
Revert "Fix pathological deoptimization case."
This reverts commit cd576b2e7927067319f6e7e790d5e3f3a1db7a62. Reason for revert: Broken luci libcore tests. Change-Id: I4ceb85bccce9c698a96bb02f4805a2cf98f5b4bb
-rw-r--r--runtime/jit/jit_code_cache.cc15
-rw-r--r--runtime/jit/jit_code_cache.h5
-rw-r--r--runtime/quick_exception_handler.cc44
3 files changed, 3 insertions, 61 deletions
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index 30703e3eb5..845f6eb8fb 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -1316,21 +1316,6 @@ ProfilingInfo* JitCodeCache::GetProfilingInfo(ArtMethod* method, Thread* self) {
return it->second;
}
-void JitCodeCache::MaybeUpdateInlineCache(ArtMethod* method,
- uint32_t dex_pc,
- ObjPtr<mirror::Class> cls,
- Thread* self) {
- ScopedDebugDisallowReadBarriers sddrb(self);
- MutexLock mu(self, *Locks::jit_lock_);
- auto it = profiling_infos_.find(method);
- if (it == profiling_infos_.end()) {
- return;
- }
- ProfilingInfo* info = it->second;
- ScopedAssertNoThreadSuspension sants("ProfilingInfo");
- info->AddInvokeInfo(dex_pc, cls.Ptr());
-}
-
void JitCodeCache::ResetHotnessCounter(ArtMethod* method, Thread* self) {
ScopedDebugDisallowReadBarriers sddrb(self);
MutexLock mu(self, *Locks::jit_lock_);
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 86efb91d0e..034ee9f6c5 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -407,11 +407,6 @@ class JitCodeCache {
ProfilingInfo* GetProfilingInfo(ArtMethod* method, Thread* self);
void ResetHotnessCounter(ArtMethod* method, Thread* self);
- void MaybeUpdateInlineCache(ArtMethod* method,
- uint32_t dex_pc,
- ObjPtr<mirror::Class> cls,
- Thread* self)
- REQUIRES_SHARED(Locks::mutator_lock_);
void VisitRoots(RootVisitor* visitor);
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index a5e3aee3f7..fd57b4ae83 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -29,7 +29,6 @@
#include "base/systrace.h"
#include "dex/dex_file_types.h"
#include "dex/dex_instruction.h"
-#include "dex/dex_instruction-inl.h"
#include "entrypoints/entrypoint_utils.h"
#include "entrypoints/quick/quick_entrypoints_enum.h"
#include "entrypoints/runtime_asm_entrypoints.h"
@@ -402,14 +401,6 @@ class DeoptimizeStackVisitor final : public StackVisitor {
return single_frame_deopt_quick_method_header_;
}
- ShadowFrame* GetBottomShadowFrame() const {
- ShadowFrame* result = prev_shadow_frame_;
- while (result->GetLink() != nullptr) {
- result = result->GetLink();
- }
- return result;
- }
-
void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
// This is the upcall, or the next full frame in single-frame deopt, or the
// code isn't deoptimizeable. We remember the frame and last pc so that we
@@ -716,43 +707,14 @@ void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
// When deoptimizing for debug support the optimized code is still valid and
// can be reused when debugging support (like breakpoints) are no longer
// needed fot this method.
- Runtime* runtime = Runtime::Current();
- if (runtime->UseJitCompilation() && (kind != DeoptimizationKind::kDebugging)) {
- runtime->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
+ if (Runtime::Current()->UseJitCompilation() && (kind != DeoptimizationKind::kDebugging)) {
+ Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
} else {
- runtime->GetInstrumentation()->InitializeMethodsCode(
+ Runtime::Current()->GetInstrumentation()->InitializeMethodsCode(
deopt_method, /*aot_code=*/ nullptr);
}
- // If the deoptimization is due to an inline cache, update it with the type
- // that made us deoptimize. This avoids pathological cases of never seeing
- // that type while executing baseline generated code.
- if (kind == DeoptimizationKind::kJitInlineCache || kind == DeoptimizationKind::kJitSameTarget) {
- DCHECK(runtime->UseJitCompilation());
- ShadowFrame* shadow_frame = visitor.GetBottomShadowFrame();
- uint32_t dex_pc = shadow_frame->GetDexPC();
- CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
- const uint16_t* const insns = accessor.Insns();
- const Instruction* inst = Instruction::At(insns + dex_pc);
- switch (inst->Opcode()) {
- case Instruction::INVOKE_INTERFACE:
- case Instruction::INVOKE_VIRTUAL:
- case Instruction::INVOKE_INTERFACE_RANGE:
- case Instruction::INVOKE_VIRTUAL_RANGE: {
- runtime->GetJit()->GetCodeCache()->MaybeUpdateInlineCache(
- shadow_frame->GetMethod(),
- dex_pc,
- shadow_frame->GetVRegReference(inst->VRegC())->GetClass(),
- self_);
- break;
- }
- default: {
- LOG(FATAL) << "Unexpected instruction for inline cache: " << inst->Name();
- }
- }
- }
-
PrepareForLongJumpToInvokeStubOrInterpreterBridge();
}