summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Hiroshi Yamauchi <yamauchi@google.com> 2016-11-03 13:06:52 -0700
committer Hiroshi Yamauchi <yamauchi@google.com> 2016-11-09 18:14:08 -0800
commit3049324f4ef71b5d7a3de49bd77c75f07dbf8f3a (patch)
tree19e2d9d5e8476bf526dd5924ef05b1d727b75f8b
parente7b46e22c7f4f6f503501b3b2ad99113289d142b (diff)
Make empty checkpoint work while weak ref access is disabled.
Fix a potential race on PushOntoMarkStack for CC by running an empty checkpoint (while weak ref access is disabled). Bug: 32508093 Bug: 12687968 Test: test-art-host with CC/CMS, libartd boot with N9, Ritz EAAC. Change-Id: I3749bb525e7734804307ee16262355f3fc730312
-rw-r--r--runtime/gc/allocation_record.cc4
-rw-r--r--runtime/gc/allocation_record.h1
-rw-r--r--runtime/gc/collector/concurrent_copying.cc32
-rw-r--r--runtime/gc/heap.cc1
-rw-r--r--runtime/gc/heap.h1
-rw-r--r--runtime/gc/reference_processor.cc7
-rw-r--r--runtime/gc/system_weak.h14
-rw-r--r--runtime/gc/system_weak_test.cc10
-rw-r--r--runtime/generated/asm_support_gen.h4
-rw-r--r--runtime/intern_table.cc1
-rw-r--r--runtime/intern_table.h2
-rw-r--r--runtime/interpreter/mterp/arm/footer.S2
-rw-r--r--runtime/interpreter/mterp/arm/op_return.S2
-rw-r--r--runtime/interpreter/mterp/arm/op_return_void.S2
-rw-r--r--runtime/interpreter/mterp/arm/op_return_void_no_barrier.S2
-rw-r--r--runtime/interpreter/mterp/arm/op_return_wide.S2
-rw-r--r--runtime/interpreter/mterp/arm64/footer.S6
-rw-r--r--runtime/interpreter/mterp/arm64/op_return.S2
-rw-r--r--runtime/interpreter/mterp/arm64/op_return_void.S2
-rw-r--r--runtime/interpreter/mterp/arm64/op_return_void_no_barrier.S2
-rw-r--r--runtime/interpreter/mterp/arm64/op_return_wide.S2
-rw-r--r--runtime/interpreter/mterp/mips/footer.S2
-rw-r--r--runtime/interpreter/mterp/mips/op_return.S2
-rw-r--r--runtime/interpreter/mterp/mips/op_return_void.S2
-rw-r--r--runtime/interpreter/mterp/mips/op_return_void_no_barrier.S2
-rw-r--r--runtime/interpreter/mterp/mips/op_return_wide.S2
-rw-r--r--runtime/interpreter/mterp/mips64/footer.S4
-rw-r--r--runtime/interpreter/mterp/mips64/op_return.S2
-rw-r--r--runtime/interpreter/mterp/mips64/op_return_void.S2
-rw-r--r--runtime/interpreter/mterp/mips64/op_return_void_no_barrier.S2
-rw-r--r--runtime/interpreter/mterp/mips64/op_return_wide.S2
-rw-r--r--runtime/interpreter/mterp/mterp.cc2
-rw-r--r--runtime/interpreter/mterp/out/mterp_arm.S12
-rw-r--r--runtime/interpreter/mterp/out/mterp_arm64.S16
-rw-r--r--runtime/interpreter/mterp/out/mterp_mips.S12
-rw-r--r--runtime/interpreter/mterp/out/mterp_mips64.S14
-rw-r--r--runtime/interpreter/mterp/out/mterp_x86.S12
-rw-r--r--runtime/interpreter/mterp/out/mterp_x86_64.S12
-rw-r--r--runtime/interpreter/mterp/x86/footer.S2
-rw-r--r--runtime/interpreter/mterp/x86/op_return.S2
-rw-r--r--runtime/interpreter/mterp/x86/op_return_void.S2
-rw-r--r--runtime/interpreter/mterp/x86/op_return_void_no_barrier.S2
-rw-r--r--runtime/interpreter/mterp/x86/op_return_wide.S2
-rw-r--r--runtime/interpreter/mterp/x86_64/footer.S2
-rw-r--r--runtime/interpreter/mterp/x86_64/op_return.S2
-rw-r--r--runtime/interpreter/mterp/x86_64/op_return_void.S2
-rw-r--r--runtime/interpreter/mterp/x86_64/op_return_void_no_barrier.S2
-rw-r--r--runtime/interpreter/mterp/x86_64/op_return_wide.S2
-rw-r--r--runtime/java_vm_ext.cc10
-rw-r--r--runtime/java_vm_ext.h1
-rw-r--r--runtime/monitor.cc4
-rw-r--r--runtime/runtime.cc8
-rw-r--r--runtime/runtime.h5
-rw-r--r--runtime/thread-inl.h24
-rw-r--r--runtime/thread.cc31
-rw-r--r--runtime/thread.h9
-rw-r--r--runtime/thread_list.cc41
-rw-r--r--runtime/thread_list.h15
-rw-r--r--tools/cpp-define-generator/constant_thread.def2
59 files changed, 248 insertions, 125 deletions
diff --git a/runtime/gc/allocation_record.cc b/runtime/gc/allocation_record.cc
index d921900933..e18a955251 100644
--- a/runtime/gc/allocation_record.cc
+++ b/runtime/gc/allocation_record.cc
@@ -181,7 +181,6 @@ void AllocRecordObjectMap::DisallowNewAllocationRecords() {
}
void AllocRecordObjectMap::BroadcastForNewAllocationRecords() {
- CHECK(kUseReadBarrier);
new_record_condition_.Broadcast(Thread::Current());
}
@@ -291,6 +290,9 @@ void AllocRecordObjectMap::RecordAllocation(Thread* self,
// Wait for GC's sweeping to complete and allow new records
while (UNLIKELY((!kUseReadBarrier && !allow_new_record_) ||
(kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
new_record_condition_.WaitHoldingLocks(self);
}
diff --git a/runtime/gc/allocation_record.h b/runtime/gc/allocation_record.h
index c8b2b89702..90cff6a8c4 100644
--- a/runtime/gc/allocation_record.h
+++ b/runtime/gc/allocation_record.h
@@ -261,7 +261,6 @@ class AllocRecordObjectMap {
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::alloc_tracker_lock_);
void BroadcastForNewAllocationRecords()
- REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::alloc_tracker_lock_);
// TODO: Is there a better way to hide the entries_'s type?
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 6dfab8b566..1e1b05c682 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -514,26 +514,6 @@ void ConcurrentCopying::RecordLiveStackFreezeSize(Thread* self) {
live_stack_freeze_size_ = heap_->GetLiveStack()->Size();
}
-class EmptyCheckpoint : public Closure {
- public:
- explicit EmptyCheckpoint(ConcurrentCopying* concurrent_copying)
- : concurrent_copying_(concurrent_copying) {
- }
-
- virtual void Run(Thread* thread) OVERRIDE NO_THREAD_SAFETY_ANALYSIS {
- // Note: self is not necessarily equal to thread since thread may be suspended.
- Thread* self = Thread::Current();
- CHECK(thread == self || thread->IsSuspended() || thread->GetState() == kWaitingPerformingGc)
- << thread->GetState() << " thread " << thread << " self " << self;
- // If thread is a running mutator, then act on behalf of the garbage collector.
- // See the code in ThreadList::RunCheckpoint.
- concurrent_copying_->GetBarrier().Pass(self);
- }
-
- private:
- ConcurrentCopying* const concurrent_copying_;
-};
-
// Used to visit objects in the immune spaces.
inline void ConcurrentCopying::ScanImmuneObject(mirror::Object* obj) {
DCHECK(obj != nullptr);
@@ -835,10 +815,10 @@ void ConcurrentCopying::ProcessFalseGrayStack() {
void ConcurrentCopying::IssueEmptyCheckpoint() {
Thread* self = Thread::Current();
- EmptyCheckpoint check_point(this);
ThreadList* thread_list = Runtime::Current()->GetThreadList();
- gc_barrier_->Init(self, 0);
- size_t barrier_count = thread_list->RunCheckpoint(&check_point);
+ Barrier* barrier = thread_list->EmptyCheckpointBarrier();
+ barrier->Init(self, 0);
+ size_t barrier_count = thread_list->RunEmptyCheckpoint();
// If there are no threads to wait which implys that all the checkpoint functions are finished,
// then no need to release the mutator lock.
if (barrier_count == 0) {
@@ -848,7 +828,7 @@ void ConcurrentCopying::IssueEmptyCheckpoint() {
Locks::mutator_lock_->SharedUnlock(self);
{
ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
- gc_barrier_->Increment(self, barrier_count);
+ barrier->Increment(self, barrier_count);
}
Locks::mutator_lock_->SharedLock(self);
}
@@ -1253,6 +1233,10 @@ bool ConcurrentCopying::ProcessMarkStackOnce() {
}
gc_mark_stack_->Reset();
} else if (mark_stack_mode == kMarkStackModeShared) {
+ // Do an empty checkpoint to avoid a race with a mutator preempted in the middle of a read
+ // barrier but before pushing onto the mark stack. b/32508093. Note the weak ref access is
+ // disabled at this point.
+ IssueEmptyCheckpoint();
// Process the shared GC mark stack with a lock.
{
MutexLock mu(self, mark_stack_lock_);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 5de004b7a3..447e06e70e 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -4069,7 +4069,6 @@ void Heap::DisallowNewAllocationRecords() const {
}
void Heap::BroadcastForNewAllocationRecords() const {
- CHECK(kUseReadBarrier);
// Always broadcast without checking IsAllocTrackingEnabled() because IsAllocTrackingEnabled() may
// be set to false while some threads are waiting for system weak access in
// AllocRecordObjectMap::RecordAllocation() and we may fail to wake them up. b/27467554.
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index e8eb69e35c..0c671d269d 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -797,7 +797,6 @@ class Heap {
REQUIRES(!Locks::alloc_tracker_lock_);
void BroadcastForNewAllocationRecords() const
- REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Locks::alloc_tracker_lock_);
void DisableGCForShutdown() REQUIRES(!*gc_complete_lock_);
diff --git a/runtime/gc/reference_processor.cc b/runtime/gc/reference_processor.cc
index 798ecd3d87..2cde7d5731 100644
--- a/runtime/gc/reference_processor.cc
+++ b/runtime/gc/reference_processor.cc
@@ -55,7 +55,6 @@ void ReferenceProcessor::DisableSlowPath(Thread* self) {
}
void ReferenceProcessor::BroadcastForSlowPath(Thread* self) {
- CHECK(kUseReadBarrier);
MutexLock mu(self, *Locks::reference_processor_lock_);
condition_.Broadcast(self);
}
@@ -99,6 +98,9 @@ ObjPtr<mirror::Object> ReferenceProcessor::GetReferent(Thread* self,
}
}
}
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
condition_.WaitHoldingLocks(self);
}
return reference->GetReferent();
@@ -270,6 +272,9 @@ bool ReferenceProcessor::MakeCircularListIfUnenqueued(
// Wait untul we are done processing reference.
while ((!kUseReadBarrier && SlowPathEnabled()) ||
(kUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
condition_.WaitHoldingLocks(self);
}
// At this point, since the sentinel of the reference is live, it is guaranteed to not be
diff --git a/runtime/gc/system_weak.h b/runtime/gc/system_weak.h
index 3910a280cc..884e90b6dc 100644
--- a/runtime/gc/system_weak.h
+++ b/runtime/gc/system_weak.h
@@ -30,7 +30,8 @@ class AbstractSystemWeakHolder {
virtual void Allow() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
virtual void Disallow() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
- virtual void Broadcast() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
+ // See Runtime::BroadcastForNewSystemWeaks for the broadcast_for_checkpoint definition.
+ virtual void Broadcast(bool broadcast_for_checkpoint) = 0;
virtual void Sweep(IsMarkedVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_) = 0;
};
@@ -61,19 +62,22 @@ class SystemWeakHolder : public AbstractSystemWeakHolder {
allow_new_system_weak_ = false;
}
- void Broadcast() OVERRIDE
- REQUIRES_SHARED(Locks::mutator_lock_)
+ void Broadcast(bool broadcast_for_checkpoint ATTRIBUTE_UNUSED) OVERRIDE
REQUIRES(!allow_disallow_lock_) {
- CHECK(kUseReadBarrier);
MutexLock mu(Thread::Current(), allow_disallow_lock_);
new_weak_condition_.Broadcast(Thread::Current());
}
protected:
- void Wait(Thread* self) REQUIRES_SHARED(allow_disallow_lock_) {
+ void Wait(Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(allow_disallow_lock_) {
// Wait for GC's sweeping to complete and allow new records
while (UNLIKELY((!kUseReadBarrier && !allow_new_system_weak_) ||
(kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
new_weak_condition_.WaitHoldingLocks(self);
}
}
diff --git a/runtime/gc/system_weak_test.cc b/runtime/gc/system_weak_test.cc
index af8a444903..9b601c0753 100644
--- a/runtime/gc/system_weak_test.cc
+++ b/runtime/gc/system_weak_test.cc
@@ -58,12 +58,14 @@ struct CountingSystemWeakHolder : public SystemWeakHolder {
disallow_count_++;
}
- void Broadcast() OVERRIDE
- REQUIRES_SHARED(Locks::mutator_lock_)
+ void Broadcast(bool broadcast_for_checkpoint) OVERRIDE
REQUIRES(!allow_disallow_lock_) {
- SystemWeakHolder::Broadcast();
+ SystemWeakHolder::Broadcast(broadcast_for_checkpoint);
- allow_count_++;
+ if (!broadcast_for_checkpoint) {
+ // Don't count the broadcasts for running checkpoints.
+ allow_count_++;
+ }
}
void Sweep(IsMarkedVisitor* visitor) OVERRIDE
diff --git a/runtime/generated/asm_support_gen.h b/runtime/generated/asm_support_gen.h
index 03f5bf6dd4..bea20f1673 100644
--- a/runtime/generated/asm_support_gen.h
+++ b/runtime/generated/asm_support_gen.h
@@ -134,6 +134,10 @@ DEFINE_CHECK_EQ(static_cast<int32_t>(ROSALLOC_SLOT_NEXT_OFFSET), (static_cast<in
DEFINE_CHECK_EQ(static_cast<int32_t>(THREAD_SUSPEND_REQUEST), (static_cast<int32_t>((art::kSuspendRequest))))
#define THREAD_CHECKPOINT_REQUEST 2
DEFINE_CHECK_EQ(static_cast<int32_t>(THREAD_CHECKPOINT_REQUEST), (static_cast<int32_t>((art::kCheckpointRequest))))
+#define THREAD_EMPTY_CHECKPOINT_REQUEST 4
+DEFINE_CHECK_EQ(static_cast<int32_t>(THREAD_EMPTY_CHECKPOINT_REQUEST), (static_cast<int32_t>((art::kEmptyCheckpointRequest))))
+#define THREAD_SUSPEND_OR_CHECKPOINT_REQUEST 7
+DEFINE_CHECK_EQ(static_cast<int32_t>(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), (static_cast<int32_t>((art::kSuspendRequest | art::kCheckpointRequest | art::kEmptyCheckpointRequest))))
#define JIT_CHECK_OSR (-1)
DEFINE_CHECK_EQ(static_cast<int16_t>(JIT_CHECK_OSR), (static_cast<int16_t>((art::jit::kJitCheckForOSR))))
#define JIT_HOTNESS_DISABLE (-2)
diff --git a/runtime/intern_table.cc b/runtime/intern_table.cc
index a61a1878af..577c488e5e 100644
--- a/runtime/intern_table.cc
+++ b/runtime/intern_table.cc
@@ -185,7 +185,6 @@ void InternTable::AddImagesStringsToTable(const std::vector<gc::space::ImageSpac
}
void InternTable::BroadcastForNewInterns() {
- CHECK(kUseReadBarrier);
Thread* self = Thread::Current();
MutexLock mu(self, *Locks::intern_table_lock_);
weak_intern_condition_.Broadcast(self);
diff --git a/runtime/intern_table.h b/runtime/intern_table.h
index 30ff55d33d..67ab48a3af 100644
--- a/runtime/intern_table.h
+++ b/runtime/intern_table.h
@@ -111,7 +111,7 @@ class InternTable {
void DumpForSigQuit(std::ostream& os) const REQUIRES(!Locks::intern_table_lock_);
- void BroadcastForNewInterns() REQUIRES_SHARED(Locks::mutator_lock_);
+ void BroadcastForNewInterns();
// Adds all of the resolved image strings from the image spaces into the intern table. The
// advantage of doing this is preventing expensive DexFile::FindStringId calls. Sets
diff --git a/runtime/interpreter/mterp/arm/footer.S b/runtime/interpreter/mterp/arm/footer.S
index 62e573a7db..cd32ea2e28 100644
--- a/runtime/interpreter/mterp/arm/footer.S
+++ b/runtime/interpreter/mterp/arm/footer.S
@@ -156,7 +156,7 @@ MterpCommonTakenBranch:
REFRESH_IBASE
add r2, rINST, rINST @ r2<- byte offset
FETCH_ADVANCE_INST_RB r2 @ update rPC, load rINST
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
bne .L_suspend_request_pending
GET_INST_OPCODE ip @ extract opcode from rINST
GOTO_OPCODE ip @ jump to next instruction
diff --git a/runtime/interpreter/mterp/arm/op_return.S b/runtime/interpreter/mterp/arm/op_return.S
index 1888373256..f9c0f0fa19 100644
--- a/runtime/interpreter/mterp/arm/op_return.S
+++ b/runtime/interpreter/mterp/arm/op_return.S
@@ -8,7 +8,7 @@
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
GET_VREG r0, r2 @ r0<- vAA
diff --git a/runtime/interpreter/mterp/arm/op_return_void.S b/runtime/interpreter/mterp/arm/op_return_void.S
index cbea2bf6a0..a91ccb31f5 100644
--- a/runtime/interpreter/mterp/arm/op_return_void.S
+++ b/runtime/interpreter/mterp/arm/op_return_void.S
@@ -2,7 +2,7 @@
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r0, #0
mov r1, #0
diff --git a/runtime/interpreter/mterp/arm/op_return_void_no_barrier.S b/runtime/interpreter/mterp/arm/op_return_void_no_barrier.S
index 2dde7aeefc..b953f4c7fd 100644
--- a/runtime/interpreter/mterp/arm/op_return_void_no_barrier.S
+++ b/runtime/interpreter/mterp/arm/op_return_void_no_barrier.S
@@ -1,6 +1,6 @@
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r0, #0
mov r1, #0
diff --git a/runtime/interpreter/mterp/arm/op_return_wide.S b/runtime/interpreter/mterp/arm/op_return_wide.S
index ceae878fa4..df582c0831 100644
--- a/runtime/interpreter/mterp/arm/op_return_wide.S
+++ b/runtime/interpreter/mterp/arm/op_return_wide.S
@@ -6,7 +6,7 @@
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
VREG_INDEX_TO_ADDR r2, r2 @ r2<- &fp[AA]
diff --git a/runtime/interpreter/mterp/arm64/footer.S b/runtime/interpreter/mterp/arm64/footer.S
index 7628ed3c47..ada0326649 100644
--- a/runtime/interpreter/mterp/arm64/footer.S
+++ b/runtime/interpreter/mterp/arm64/footer.S
@@ -141,7 +141,7 @@ MterpCommonTakenBranchNoFlags:
add w2, wINST, wINST // w2<- byte offset
FETCH_ADVANCE_INST_RB w2 // update rPC, load wINST
REFRESH_IBASE
- ands lr, lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .L_suspend_request_pending
GET_INST_OPCODE ip // extract opcode from wINST
GOTO_OPCODE ip // jump to next instruction
@@ -215,7 +215,7 @@ MterpCommonTakenBranchNoFlags:
*/
MterpCheckSuspendAndContinue:
ldr xIBASE, [xSELF, #THREAD_CURRENT_IBASE_OFFSET] // refresh xIBASE
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne check1
GET_INST_OPCODE ip // extract opcode from wINST
GOTO_OPCODE ip // jump to next instruction
@@ -270,7 +270,7 @@ MterpReturn:
ldr lr, [xSELF, #THREAD_FLAGS_OFFSET]
str x0, [x2]
mov x0, xSELF
- ands lr, lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.eq check2
bl MterpSuspendCheck // (self)
check2:
diff --git a/runtime/interpreter/mterp/arm64/op_return.S b/runtime/interpreter/mterp/arm64/op_return.S
index 28630eed48..9f125c7fef 100644
--- a/runtime/interpreter/mterp/arm64/op_return.S
+++ b/runtime/interpreter/mterp/arm64/op_return.S
@@ -8,7 +8,7 @@
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .L${opcode}_check
.L${opcode}_return:
lsr w2, wINST, #8 // r2<- AA
diff --git a/runtime/interpreter/mterp/arm64/op_return_void.S b/runtime/interpreter/mterp/arm64/op_return_void.S
index 3a5aa56162..b2530062e8 100644
--- a/runtime/interpreter/mterp/arm64/op_return_void.S
+++ b/runtime/interpreter/mterp/arm64/op_return_void.S
@@ -2,7 +2,7 @@
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .L${opcode}_check
.L${opcode}_return:
mov x0, #0
diff --git a/runtime/interpreter/mterp/arm64/op_return_void_no_barrier.S b/runtime/interpreter/mterp/arm64/op_return_void_no_barrier.S
index 1e0695374d..c817169070 100644
--- a/runtime/interpreter/mterp/arm64/op_return_void_no_barrier.S
+++ b/runtime/interpreter/mterp/arm64/op_return_void_no_barrier.S
@@ -1,6 +1,6 @@
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .L${opcode}_check
.L${opcode}_return:
mov x0, #0
diff --git a/runtime/interpreter/mterp/arm64/op_return_wide.S b/runtime/interpreter/mterp/arm64/op_return_wide.S
index c6e1d9da80..c47661cd54 100644
--- a/runtime/interpreter/mterp/arm64/op_return_wide.S
+++ b/runtime/interpreter/mterp/arm64/op_return_wide.S
@@ -7,7 +7,7 @@
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .L${opcode}_check
.L${opcode}_return:
lsr w2, wINST, #8 // w2<- AA
diff --git a/runtime/interpreter/mterp/mips/footer.S b/runtime/interpreter/mterp/mips/footer.S
index 1363751566..9909dfeb47 100644
--- a/runtime/interpreter/mterp/mips/footer.S
+++ b/runtime/interpreter/mterp/mips/footer.S
@@ -151,7 +151,7 @@ MterpCommonTakenBranchNoFlags:
REFRESH_IBASE()
addu a2, rINST, rINST # a2<- byte offset
FETCH_ADVANCE_INST_RB(a2) # update rPC, load rINST
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
bnez ra, .L_suspend_request_pending
GET_INST_OPCODE(t0) # extract opcode from rINST
GOTO_OPCODE(t0) # jump to next instruction
diff --git a/runtime/interpreter/mterp/mips/op_return.S b/runtime/interpreter/mterp/mips/op_return.S
index 894ae18de0..44b93958d2 100644
--- a/runtime/interpreter/mterp/mips/op_return.S
+++ b/runtime/interpreter/mterp/mips/op_return.S
@@ -8,7 +8,7 @@
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
diff --git a/runtime/interpreter/mterp/mips/op_return_void.S b/runtime/interpreter/mterp/mips/op_return_void.S
index 35c1326306..1f616ea198 100644
--- a/runtime/interpreter/mterp/mips/op_return_void.S
+++ b/runtime/interpreter/mterp/mips/op_return_void.S
@@ -2,7 +2,7 @@
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
diff --git a/runtime/interpreter/mterp/mips/op_return_void_no_barrier.S b/runtime/interpreter/mterp/mips/op_return_void_no_barrier.S
index 56968b5fc4..e670c2867f 100644
--- a/runtime/interpreter/mterp/mips/op_return_void_no_barrier.S
+++ b/runtime/interpreter/mterp/mips/op_return_void_no_barrier.S
@@ -1,6 +1,6 @@
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
diff --git a/runtime/interpreter/mterp/mips/op_return_wide.S b/runtime/interpreter/mterp/mips/op_return_wide.S
index 91d62bf550..f0f679dbde 100644
--- a/runtime/interpreter/mterp/mips/op_return_wide.S
+++ b/runtime/interpreter/mterp/mips/op_return_wide.S
@@ -6,7 +6,7 @@
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
diff --git a/runtime/interpreter/mterp/mips64/footer.S b/runtime/interpreter/mterp/mips64/footer.S
index 40631626b6..64772c804d 100644
--- a/runtime/interpreter/mterp/mips64/footer.S
+++ b/runtime/interpreter/mterp/mips64/footer.S
@@ -108,7 +108,7 @@ MterpCommonTakenBranchNoFlags:
REFRESH_IBASE
daddu a2, rINST, rINST # a2<- byte offset
FETCH_ADVANCE_INST_RB a2 # update rPC, load rINST
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
bnezc ra, .L_suspend_request_pending
GET_INST_OPCODE v0 # extract opcode from rINST
GOTO_OPCODE v0 # jump to next instruction
@@ -225,7 +225,7 @@ MterpReturn:
lw ra, THREAD_FLAGS_OFFSET(rSELF)
sd a0, 0(a2)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, check2
jal MterpSuspendCheck # (self)
check2:
diff --git a/runtime/interpreter/mterp/mips64/op_return.S b/runtime/interpreter/mterp/mips64/op_return.S
index b10c03f072..edd795f561 100644
--- a/runtime/interpreter/mterp/mips64/op_return.S
+++ b/runtime/interpreter/mterp/mips64/op_return.S
@@ -10,7 +10,7 @@
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
diff --git a/runtime/interpreter/mterp/mips64/op_return_void.S b/runtime/interpreter/mterp/mips64/op_return_void.S
index 05253aea05..f6eee915a5 100644
--- a/runtime/interpreter/mterp/mips64/op_return_void.S
+++ b/runtime/interpreter/mterp/mips64/op_return_void.S
@@ -3,7 +3,7 @@
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
diff --git a/runtime/interpreter/mterp/mips64/op_return_void_no_barrier.S b/runtime/interpreter/mterp/mips64/op_return_void_no_barrier.S
index f67e811c70..4e9b640226 100644
--- a/runtime/interpreter/mterp/mips64/op_return_void_no_barrier.S
+++ b/runtime/interpreter/mterp/mips64/op_return_void_no_barrier.S
@@ -1,7 +1,7 @@
.extern MterpSuspendCheck
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
diff --git a/runtime/interpreter/mterp/mips64/op_return_wide.S b/runtime/interpreter/mterp/mips64/op_return_wide.S
index 544e02794d..91ca1fae59 100644
--- a/runtime/interpreter/mterp/mips64/op_return_wide.S
+++ b/runtime/interpreter/mterp/mips64/op_return_wide.S
@@ -8,7 +8,7 @@
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
diff --git a/runtime/interpreter/mterp/mterp.cc b/runtime/interpreter/mterp/mterp.cc
index 46d5af179f..2bd47bbfcf 100644
--- a/runtime/interpreter/mterp/mterp.cc
+++ b/runtime/interpreter/mterp/mterp.cc
@@ -564,6 +564,8 @@ extern "C" void MterpLogSuspendFallback(Thread* self, ShadowFrame* shadow_frame,
LOG(INFO) << "Checkpoint fallback: " << inst->Opcode(inst_data);
} else if (flags & kSuspendRequest) {
LOG(INFO) << "Suspend fallback: " << inst->Opcode(inst_data);
+ } else if (flags & kEmptyCheckpointRequest) {
+ LOG(INFO) << "Empty checkpoint fallback: " << inst->Opcode(inst_data);
}
}
diff --git a/runtime/interpreter/mterp/out/mterp_arm.S b/runtime/interpreter/mterp/out/mterp_arm.S
index 78a90af54f..4d540d768b 100644
--- a/runtime/interpreter/mterp/out/mterp_arm.S
+++ b/runtime/interpreter/mterp/out/mterp_arm.S
@@ -619,7 +619,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r0, #0
mov r1, #0
@@ -639,7 +639,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
GET_VREG r0, r2 @ r0<- vAA
@@ -658,7 +658,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
VREG_INDEX_TO_ADDR r2, r2 @ r2<- &fp[AA]
@@ -680,7 +680,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
GET_VREG r0, r2 @ r0<- vAA
@@ -3149,7 +3149,7 @@ artMterpAsmInstructionStart = .L_op_nop
/* File: arm/op_return_void_no_barrier.S */
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r0, #0
mov r1, #0
@@ -11989,7 +11989,7 @@ MterpCommonTakenBranch:
REFRESH_IBASE
add r2, rINST, rINST @ r2<- byte offset
FETCH_ADVANCE_INST_RB r2 @ update rPC, load rINST
- ands lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
bne .L_suspend_request_pending
GET_INST_OPCODE ip @ extract opcode from rINST
GOTO_OPCODE ip @ jump to next instruction
diff --git a/runtime/interpreter/mterp/out/mterp_arm64.S b/runtime/interpreter/mterp/out/mterp_arm64.S
index dafcc3ef6a..42f8c1b08d 100644
--- a/runtime/interpreter/mterp/out/mterp_arm64.S
+++ b/runtime/interpreter/mterp/out/mterp_arm64.S
@@ -616,7 +616,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .Lop_return_void_check
.Lop_return_void_return:
mov x0, #0
@@ -639,7 +639,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .Lop_return_check
.Lop_return_return:
lsr w2, wINST, #8 // r2<- AA
@@ -662,7 +662,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .Lop_return_wide_check
.Lop_return_wide_return:
lsr w2, wINST, #8 // w2<- AA
@@ -687,7 +687,7 @@ artMterpAsmInstructionStart = .L_op_nop
bl MterpThreadFenceForConstructor
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .Lop_return_object_check
.Lop_return_object_return:
lsr w2, wINST, #8 // r2<- AA
@@ -3033,7 +3033,7 @@ artMterpAsmInstructionStart = .L_op_nop
/* File: arm64/op_return_void_no_barrier.S */
ldr w7, [xSELF, #THREAD_FLAGS_OFFSET]
mov x0, xSELF
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .Lop_return_void_no_barrier_check
.Lop_return_void_no_barrier_return:
mov x0, #0
@@ -7082,7 +7082,7 @@ MterpCommonTakenBranchNoFlags:
add w2, wINST, wINST // w2<- byte offset
FETCH_ADVANCE_INST_RB w2 // update rPC, load wINST
REFRESH_IBASE
- ands lr, lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne .L_suspend_request_pending
GET_INST_OPCODE ip // extract opcode from wINST
GOTO_OPCODE ip // jump to next instruction
@@ -7156,7 +7156,7 @@ MterpCommonTakenBranchNoFlags:
*/
MterpCheckSuspendAndContinue:
ldr xIBASE, [xSELF, #THREAD_CURRENT_IBASE_OFFSET] // refresh xIBASE
- ands w7, w7, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands w7, w7, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.ne check1
GET_INST_OPCODE ip // extract opcode from wINST
GOTO_OPCODE ip // jump to next instruction
@@ -7211,7 +7211,7 @@ MterpReturn:
ldr lr, [xSELF, #THREAD_FLAGS_OFFSET]
str x0, [x2]
mov x0, xSELF
- ands lr, lr, #(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ ands lr, lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
b.eq check2
bl MterpSuspendCheck // (self)
check2:
diff --git a/runtime/interpreter/mterp/out/mterp_mips.S b/runtime/interpreter/mterp/out/mterp_mips.S
index c1ba79422d..e17187aa48 100644
--- a/runtime/interpreter/mterp/out/mterp_mips.S
+++ b/runtime/interpreter/mterp/out/mterp_mips.S
@@ -818,7 +818,7 @@ artMterpAsmInstructionStart = .L_op_nop
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
@@ -840,7 +840,7 @@ artMterpAsmInstructionStart = .L_op_nop
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
@@ -861,7 +861,7 @@ artMterpAsmInstructionStart = .L_op_nop
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
@@ -885,7 +885,7 @@ artMterpAsmInstructionStart = .L_op_nop
JAL(MterpThreadFenceForConstructor)
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
@@ -3344,7 +3344,7 @@ artMterpAsmInstructionStart = .L_op_nop
/* File: mips/op_return_void_no_barrier.S */
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqz ra, 1f
JAL(MterpSuspendCheck) # (self)
1:
@@ -12791,7 +12791,7 @@ MterpCommonTakenBranchNoFlags:
REFRESH_IBASE()
addu a2, rINST, rINST # a2<- byte offset
FETCH_ADVANCE_INST_RB(a2) # update rPC, load rINST
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
bnez ra, .L_suspend_request_pending
GET_INST_OPCODE(t0) # extract opcode from rINST
GOTO_OPCODE(t0) # jump to next instruction
diff --git a/runtime/interpreter/mterp/out/mterp_mips64.S b/runtime/interpreter/mterp/out/mterp_mips64.S
index 143aeb034c..037787f6b4 100644
--- a/runtime/interpreter/mterp/out/mterp_mips64.S
+++ b/runtime/interpreter/mterp/out/mterp_mips64.S
@@ -637,7 +637,7 @@ artMterpAsmInstructionStart = .L_op_nop
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
@@ -659,7 +659,7 @@ artMterpAsmInstructionStart = .L_op_nop
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
@@ -681,7 +681,7 @@ artMterpAsmInstructionStart = .L_op_nop
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
@@ -705,7 +705,7 @@ artMterpAsmInstructionStart = .L_op_nop
jal MterpThreadFenceForConstructor
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
@@ -3121,7 +3121,7 @@ artMterpAsmInstructionStart = .L_op_nop
.extern MterpSuspendCheck
lw ra, THREAD_FLAGS_OFFSET(rSELF)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, 1f
jal MterpSuspendCheck # (self)
1:
@@ -12179,7 +12179,7 @@ MterpCommonTakenBranchNoFlags:
REFRESH_IBASE
daddu a2, rINST, rINST # a2<- byte offset
FETCH_ADVANCE_INST_RB a2 # update rPC, load rINST
- and ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
bnezc ra, .L_suspend_request_pending
GET_INST_OPCODE v0 # extract opcode from rINST
GOTO_OPCODE v0 # jump to next instruction
@@ -12296,7 +12296,7 @@ MterpReturn:
lw ra, THREAD_FLAGS_OFFSET(rSELF)
sd a0, 0(a2)
move a0, rSELF
- and ra, ra, (THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST)
+ and ra, ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
beqzc ra, check2
jal MterpSuspendCheck # (self)
check2:
diff --git a/runtime/interpreter/mterp/out/mterp_x86.S b/runtime/interpreter/mterp/out/mterp_x86.S
index d676fdab96..695d1e4973 100644
--- a/runtime/interpreter/mterp/out/mterp_x86.S
+++ b/runtime/interpreter/mterp/out/mterp_x86.S
@@ -612,7 +612,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
@@ -634,7 +634,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
@@ -654,7 +654,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
@@ -677,7 +677,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
@@ -3104,7 +3104,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.L_op_return_void_no_barrier: /* 0x73 */
/* File: x86/op_return_void_no_barrier.S */
movl rSELF, %eax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
@@ -12678,7 +12678,7 @@ MterpCommonTakenBranch:
je .L_add_batch # counted down to zero - report
.L_resume_backward_branch:
movl rSELF, %eax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
leal (rPC, rINST, 2), rPC
FETCH_INST
jnz .L_suspend_request_pending
diff --git a/runtime/interpreter/mterp/out/mterp_x86_64.S b/runtime/interpreter/mterp/out/mterp_x86_64.S
index df88499a62..2eab58c053 100644
--- a/runtime/interpreter/mterp/out/mterp_x86_64.S
+++ b/runtime/interpreter/mterp/out/mterp_x86_64.S
@@ -587,7 +587,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
@@ -607,7 +607,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
@@ -625,7 +625,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
@@ -646,7 +646,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
@@ -2972,7 +2972,7 @@ SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
.L_op_return_void_no_barrier: /* 0x73 */
/* File: x86_64/op_return_void_no_barrier.S */
movq rSELF, OUT_ARG0
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
@@ -11915,7 +11915,7 @@ MterpCommonTakenBranch:
je .L_add_batch # counted down to zero - report
.L_resume_backward_branch:
movq rSELF, %rax
- testl $(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%rax)
+ testl $(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%rax)
REFRESH_IBASE
leaq (rPC, rINSTq, 2), rPC
FETCH_INST
diff --git a/runtime/interpreter/mterp/x86/footer.S b/runtime/interpreter/mterp/x86/footer.S
index e8c8ca8d79..088cb127dc 100644
--- a/runtime/interpreter/mterp/x86/footer.S
+++ b/runtime/interpreter/mterp/x86/footer.S
@@ -167,7 +167,7 @@ MterpCommonTakenBranch:
je .L_add_batch # counted down to zero - report
.L_resume_backward_branch:
movl rSELF, %eax
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
leal (rPC, rINST, 2), rPC
FETCH_INST
jnz .L_suspend_request_pending
diff --git a/runtime/interpreter/mterp/x86/op_return.S b/runtime/interpreter/mterp/x86/op_return.S
index 8e3cfad380..a8ebbed64e 100644
--- a/runtime/interpreter/mterp/x86/op_return.S
+++ b/runtime/interpreter/mterp/x86/op_return.S
@@ -7,7 +7,7 @@
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
diff --git a/runtime/interpreter/mterp/x86/op_return_void.S b/runtime/interpreter/mterp/x86/op_return_void.S
index a14a4f6394..d9eddf39f2 100644
--- a/runtime/interpreter/mterp/x86/op_return_void.S
+++ b/runtime/interpreter/mterp/x86/op_return_void.S
@@ -1,7 +1,7 @@
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
diff --git a/runtime/interpreter/mterp/x86/op_return_void_no_barrier.S b/runtime/interpreter/mterp/x86/op_return_void_no_barrier.S
index 1d0e93331b..2fbda6bfe9 100644
--- a/runtime/interpreter/mterp/x86/op_return_void_no_barrier.S
+++ b/runtime/interpreter/mterp/x86/op_return_void_no_barrier.S
@@ -1,5 +1,5 @@
movl rSELF, %eax
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
diff --git a/runtime/interpreter/mterp/x86/op_return_wide.S b/runtime/interpreter/mterp/x86/op_return_wide.S
index 7d1850a962..5fff626200 100644
--- a/runtime/interpreter/mterp/x86/op_return_wide.S
+++ b/runtime/interpreter/mterp/x86/op_return_wide.S
@@ -5,7 +5,7 @@
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movl rSELF, %eax
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
jz 1f
movl %eax, OUT_ARG0(%esp)
call SYMBOL(MterpSuspendCheck)
diff --git a/runtime/interpreter/mterp/x86_64/footer.S b/runtime/interpreter/mterp/x86_64/footer.S
index f78f163576..ed5e5eabfa 100644
--- a/runtime/interpreter/mterp/x86_64/footer.S
+++ b/runtime/interpreter/mterp/x86_64/footer.S
@@ -151,7 +151,7 @@ MterpCommonTakenBranch:
je .L_add_batch # counted down to zero - report
.L_resume_backward_branch:
movq rSELF, %rax
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%rax)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%rax)
REFRESH_IBASE
leaq (rPC, rINSTq, 2), rPC
FETCH_INST
diff --git a/runtime/interpreter/mterp/x86_64/op_return.S b/runtime/interpreter/mterp/x86_64/op_return.S
index 07e0e5357c..8cb6cbaee1 100644
--- a/runtime/interpreter/mterp/x86_64/op_return.S
+++ b/runtime/interpreter/mterp/x86_64/op_return.S
@@ -7,7 +7,7 @@
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
diff --git a/runtime/interpreter/mterp/x86_64/op_return_void.S b/runtime/interpreter/mterp/x86_64/op_return_void.S
index 6a12df318b..ba68e7e444 100644
--- a/runtime/interpreter/mterp/x86_64/op_return_void.S
+++ b/runtime/interpreter/mterp/x86_64/op_return_void.S
@@ -1,7 +1,7 @@
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
diff --git a/runtime/interpreter/mterp/x86_64/op_return_void_no_barrier.S b/runtime/interpreter/mterp/x86_64/op_return_void_no_barrier.S
index 822b2e85e6..6799da1dbd 100644
--- a/runtime/interpreter/mterp/x86_64/op_return_void_no_barrier.S
+++ b/runtime/interpreter/mterp/x86_64/op_return_void_no_barrier.S
@@ -1,5 +1,5 @@
movq rSELF, OUT_ARG0
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
diff --git a/runtime/interpreter/mterp/x86_64/op_return_wide.S b/runtime/interpreter/mterp/x86_64/op_return_wide.S
index 288eb96f8c..d6d6d1bf5e 100644
--- a/runtime/interpreter/mterp/x86_64/op_return_wide.S
+++ b/runtime/interpreter/mterp/x86_64/op_return_wide.S
@@ -5,7 +5,7 @@
.extern MterpThreadFenceForConstructor
call SYMBOL(MterpThreadFenceForConstructor)
movq rSELF, OUT_ARG0
- testl $$(THREAD_SUSPEND_REQUEST | THREAD_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
+ testl $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(OUT_ARG0)
jz 1f
call SYMBOL(MterpSuspendCheck)
1:
diff --git a/runtime/java_vm_ext.cc b/runtime/java_vm_ext.cc
index 8e76aeb7cd..caf705a9cb 100644
--- a/runtime/java_vm_ext.cc
+++ b/runtime/java_vm_ext.cc
@@ -562,6 +562,9 @@ jweak JavaVMExt::AddWeakGlobalRef(Thread* self, ObjPtr<mirror::Object> obj) {
}
MutexLock mu(self, *Locks::jni_weak_globals_lock_);
while (UNLIKELY(!MayAccessWeakGlobals(self))) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
weak_globals_add_condition_.WaitHoldingLocks(self);
}
IndirectRef ref = weak_globals_.Add(kIRTFirstSegment, obj);
@@ -648,7 +651,6 @@ void JavaVMExt::AllowNewWeakGlobals() {
}
void JavaVMExt::BroadcastForNewWeakGlobals() {
- CHECK(kUseReadBarrier);
Thread* self = Thread::Current();
MutexLock mu(self, *Locks::jni_weak_globals_lock_);
weak_globals_add_condition_.Broadcast(self);
@@ -694,6 +696,9 @@ ObjPtr<mirror::Object> JavaVMExt::DecodeWeakGlobalLocked(Thread* self, IndirectR
Locks::jni_weak_globals_lock_->AssertHeld(self);
}
while (UNLIKELY(!MayAccessWeakGlobals(self))) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
weak_globals_add_condition_.WaitHoldingLocks(self);
}
return weak_globals_.Get(ref);
@@ -716,6 +721,9 @@ bool JavaVMExt::IsWeakGlobalCleared(Thread* self, IndirectRef ref) {
DCHECK_EQ(IndirectReferenceTable::GetIndirectRefKind(ref), kWeakGlobal);
MutexLock mu(self, *Locks::jni_weak_globals_lock_);
while (UNLIKELY(!MayAccessWeakGlobals(self))) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
weak_globals_add_condition_.WaitHoldingLocks(self);
}
// When just checking a weak ref has been cleared, avoid triggering the read barrier in decode
diff --git a/runtime/java_vm_ext.h b/runtime/java_vm_ext.h
index 9e37f1178c..7374920f2b 100644
--- a/runtime/java_vm_ext.h
+++ b/runtime/java_vm_ext.h
@@ -136,7 +136,6 @@ class JavaVMExt : public JavaVM {
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Locks::jni_weak_globals_lock_);
void BroadcastForNewWeakGlobals()
- REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Locks::jni_weak_globals_lock_);
jobject AddGlobalRef(Thread* self, ObjPtr<mirror::Object> obj)
diff --git a/runtime/monitor.cc b/runtime/monitor.cc
index eb74fcf7f4..5a7165a16d 100644
--- a/runtime/monitor.cc
+++ b/runtime/monitor.cc
@@ -1330,7 +1330,6 @@ void MonitorList::AllowNewMonitors() {
}
void MonitorList::BroadcastForNewMonitors() {
- CHECK(kUseReadBarrier);
Thread* self = Thread::Current();
MutexLock mu(self, monitor_list_lock_);
monitor_add_condition_.Broadcast(self);
@@ -1341,6 +1340,9 @@ void MonitorList::Add(Monitor* m) {
MutexLock mu(self, monitor_list_lock_);
while (UNLIKELY((!kUseReadBarrier && !allow_new_monitors_) ||
(kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
+ // Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
+ // presence of threads blocking for weak ref access.
+ self->CheckEmptyCheckpoint();
monitor_add_condition_.WaitHoldingLocks(self);
}
list_.push_front(m);
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index d645c5a398..530b1ab8b1 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1756,10 +1756,10 @@ void Runtime::AllowNewSystemWeaks() {
}
}
-void Runtime::BroadcastForNewSystemWeaks() {
+void Runtime::BroadcastForNewSystemWeaks(bool broadcast_for_checkpoint) {
// This is used for the read barrier case that uses the thread-local
- // Thread::GetWeakRefAccessEnabled() flag.
- CHECK(kUseReadBarrier);
+ // Thread::GetWeakRefAccessEnabled() flag and the checkpoint while weak ref access is disabled
+ // (see ThreadList::RunCheckpoint).
monitor_list_->BroadcastForNewMonitors();
intern_table_->BroadcastForNewInterns();
java_vm_->BroadcastForNewWeakGlobals();
@@ -1767,7 +1767,7 @@ void Runtime::BroadcastForNewSystemWeaks() {
// All other generic system-weak holders.
for (gc::AbstractSystemWeakHolder* holder : system_weak_holders_) {
- holder->Broadcast();
+ holder->Broadcast(broadcast_for_checkpoint);
}
}
diff --git a/runtime/runtime.h b/runtime/runtime.h
index 043ff5d73e..02a6bb697a 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -322,7 +322,10 @@ class Runtime {
void DisallowNewSystemWeaks() REQUIRES_SHARED(Locks::mutator_lock_);
void AllowNewSystemWeaks() REQUIRES_SHARED(Locks::mutator_lock_);
- void BroadcastForNewSystemWeaks() REQUIRES_SHARED(Locks::mutator_lock_);
+ // broadcast_for_checkpoint is true when we broadcast for making blocking threads to respond to
+ // checkpoint requests. It's false when we broadcast to unblock blocking threads after system weak
+ // access is reenabled.
+ void BroadcastForNewSystemWeaks(bool broadcast_for_checkpoint = false);
// Visit all the roots. If only_dirty is true then non-dirty roots won't be visited. If
// clean_dirty is true then dirty roots will be marked as non-dirty after visiting.
diff --git a/runtime/thread-inl.h b/runtime/thread-inl.h
index 5fa9353207..c92305f373 100644
--- a/runtime/thread-inl.h
+++ b/runtime/thread-inl.h
@@ -72,6 +72,19 @@ inline void Thread::CheckSuspend() {
RunCheckpointFunction();
} else if (ReadFlag(kSuspendRequest)) {
FullSuspendCheck();
+ } else if (ReadFlag(kEmptyCheckpointRequest)) {
+ RunEmptyCheckpoint();
+ } else {
+ break;
+ }
+ }
+}
+
+inline void Thread::CheckEmptyCheckpoint() {
+ DCHECK_EQ(Thread::Current(), this);
+ for (;;) {
+ if (ReadFlag(kEmptyCheckpointRequest)) {
+ RunEmptyCheckpoint();
} else {
break;
}
@@ -145,8 +158,13 @@ inline void Thread::TransitionToSuspendedAndRunCheckpoints(ThreadState new_state
RunCheckpointFunction();
continue;
}
+ if (UNLIKELY((old_state_and_flags.as_struct.flags & kEmptyCheckpointRequest) != 0)) {
+ RunEmptyCheckpoint();
+ continue;
+ }
// Change the state but keep the current flags (kCheckpointRequest is clear).
DCHECK_EQ((old_state_and_flags.as_struct.flags & kCheckpointRequest), 0);
+ DCHECK_EQ((old_state_and_flags.as_struct.flags & kEmptyCheckpointRequest), 0);
new_state_and_flags.as_struct.flags = old_state_and_flags.as_struct.flags;
new_state_and_flags.as_struct.state = new_state;
@@ -163,7 +181,8 @@ inline void Thread::TransitionToSuspendedAndRunCheckpoints(ThreadState new_state
inline void Thread::PassActiveSuspendBarriers() {
while (true) {
uint16_t current_flags = tls32_.state_and_flags.as_struct.flags;
- if (LIKELY((current_flags & (kCheckpointRequest | kActiveSuspendBarrier)) == 0)) {
+ if (LIKELY((current_flags &
+ (kCheckpointRequest | kEmptyCheckpointRequest | kActiveSuspendBarrier)) == 0)) {
break;
} else if ((current_flags & kActiveSuspendBarrier) != 0) {
PassActiveSuspendBarriers(this);
@@ -211,7 +230,8 @@ inline ThreadState Thread::TransitionFromSuspendedToRunnable() {
}
} else if ((old_state_and_flags.as_struct.flags & kActiveSuspendBarrier) != 0) {
PassActiveSuspendBarriers(this);
- } else if ((old_state_and_flags.as_struct.flags & kCheckpointRequest) != 0) {
+ } else if ((old_state_and_flags.as_struct.flags &
+ (kCheckpointRequest | kEmptyCheckpointRequest)) != 0) {
// Impossible
LOG(FATAL) << "Transitioning to runnable with checkpoint flag, "
<< " flags=" << old_state_and_flags.as_struct.flags
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 3f7d0868b0..2434ee258d 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1148,6 +1148,12 @@ void Thread::RunCheckpointFunction() {
} while (!done);
}
+void Thread::RunEmptyCheckpoint() {
+ DCHECK_EQ(Thread::Current(), this);
+ AtomicClearFlag(kEmptyCheckpointRequest);
+ Runtime::Current()->GetThreadList()->EmptyCheckpointBarrier()->Pass(this);
+}
+
bool Thread::RequestCheckpoint(Closure* function) {
union StateAndFlags old_state_and_flags;
old_state_and_flags.as_int = tls32_.state_and_flags.as_int;
@@ -1175,6 +1181,28 @@ bool Thread::RequestCheckpoint(Closure* function) {
return success;
}
+bool Thread::RequestEmptyCheckpoint() {
+ union StateAndFlags old_state_and_flags;
+ old_state_and_flags.as_int = tls32_.state_and_flags.as_int;
+ if (old_state_and_flags.as_struct.state != kRunnable) {
+ // If it's not runnable, we don't need to do anything because it won't be in the middle of a
+ // heap access (eg. the read barrier).
+ return false;
+ }
+
+ // We must be runnable to request a checkpoint.
+ DCHECK_EQ(old_state_and_flags.as_struct.state, kRunnable);
+ union StateAndFlags new_state_and_flags;
+ new_state_and_flags.as_int = old_state_and_flags.as_int;
+ new_state_and_flags.as_struct.flags |= kEmptyCheckpointRequest;
+ bool success = tls32_.state_and_flags.as_atomic_int.CompareExchangeStrongSequentiallyConsistent(
+ old_state_and_flags.as_int, new_state_and_flags.as_int);
+ if (success) {
+ TriggerSuspend();
+ }
+ return success;
+}
+
class BarrierClosure : public Closure {
public:
explicit BarrierClosure(Closure* wrapped) : wrapped_(wrapped), barrier_(0) {}
@@ -1833,7 +1861,8 @@ Thread::~Thread() {
tlsPtr_.jni_env = nullptr;
}
CHECK_NE(GetState(), kRunnable);
- CHECK_NE(ReadFlag(kCheckpointRequest), true);
+ CHECK(!ReadFlag(kCheckpointRequest));
+ CHECK(!ReadFlag(kEmptyCheckpointRequest));
CHECK(tlsPtr_.checkpoint_function == nullptr);
CHECK_EQ(checkpoint_overflow_.size(), 0u);
CHECK(tlsPtr_.flip_function == nullptr);
diff --git a/runtime/thread.h b/runtime/thread.h
index 75b5b123da..12b80a907b 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -105,7 +105,8 @@ enum ThreadFlag {
kSuspendRequest = 1, // If set implies that suspend_count_ > 0 and the Thread should enter the
// safepoint handler.
kCheckpointRequest = 2, // Request that the thread do some checkpoint work and then continue.
- kActiveSuspendBarrier = 4 // Register that at least 1 suspend barrier needs to be passed.
+ kEmptyCheckpointRequest = 4, // Request that the thread do empty checkpoint and then continue.
+ kActiveSuspendBarrier = 8, // Register that at least 1 suspend barrier needs to be passed.
};
enum class StackedShadowFrameType {
@@ -171,6 +172,9 @@ class Thread {
// Process pending thread suspension request and handle if pending.
void CheckSuspend() REQUIRES_SHARED(Locks::mutator_lock_);
+ // Process a pending empty checkpoint if pending.
+ void CheckEmptyCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_);
+
static Thread* FromManagedThread(const ScopedObjectAccessAlreadyRunnable& ts,
mirror::Object* thread_peer)
REQUIRES(Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_)
@@ -239,6 +243,8 @@ class Thread {
REQUIRES(Locks::thread_suspend_count_lock_);
void RequestSynchronousCheckpoint(Closure* function)
REQUIRES(!Locks::thread_suspend_count_lock_, !Locks::thread_list_lock_);
+ bool RequestEmptyCheckpoint()
+ REQUIRES(Locks::thread_suspend_count_lock_);
void SetFlipFunction(Closure* function);
Closure* GetFlipFunction();
@@ -1218,6 +1224,7 @@ class Thread {
REQUIRES(Locks::thread_suspend_count_lock_);
void RunCheckpointFunction();
+ void RunEmptyCheckpoint();
bool PassActiveSuspendBarriers(Thread* self)
REQUIRES(!Locks::thread_suspend_count_lock_);
diff --git a/runtime/thread_list.cc b/runtime/thread_list.cc
index eba6666dec..53d2d4aff5 100644
--- a/runtime/thread_list.cc
+++ b/runtime/thread_list.cc
@@ -32,6 +32,7 @@
#include "base/timing_logger.h"
#include "debugger.h"
#include "gc/collector/concurrent_copying.h"
+#include "gc/reference_processor.h"
#include "jni_internal.h"
#include "lock_word.h"
#include "monitor.h"
@@ -68,7 +69,8 @@ ThreadList::ThreadList()
debug_suspend_all_count_(0),
unregistering_count_(0),
suspend_all_historam_("suspend all histogram", 16, 64),
- long_suspend_(false) {
+ long_suspend_(false),
+ empty_checkpoint_barrier_(new Barrier(0)) {
CHECK(Monitor::IsValidLockWord(LockWord::FromThinLockId(kMaxThreadId, 1, 0U)));
}
@@ -373,6 +375,43 @@ size_t ThreadList::RunCheckpoint(Closure* checkpoint_function, Closure* callback
return count;
}
+size_t ThreadList::RunEmptyCheckpoint() {
+ Thread* self = Thread::Current();
+ Locks::mutator_lock_->AssertNotExclusiveHeld(self);
+ Locks::thread_list_lock_->AssertNotHeld(self);
+ Locks::thread_suspend_count_lock_->AssertNotHeld(self);
+
+ size_t count = 0;
+ {
+ MutexLock mu(self, *Locks::thread_list_lock_);
+ MutexLock mu2(self, *Locks::thread_suspend_count_lock_);
+ for (Thread* thread : list_) {
+ if (thread != self) {
+ while (true) {
+ if (thread->RequestEmptyCheckpoint()) {
+ // This thread will run an empty checkpoint (decrement the empty checkpoint barrier)
+ // some time in the near future.
+ ++count;
+ break;
+ }
+ if (thread->GetState() != kRunnable) {
+ // It's seen suspended, we are done because it must not be in the middle of a mutator
+ // heap access.
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ // Wake up the threads blocking for weak ref access so that they will respond to the empty
+ // checkpoint request. Otherwise we will hang as they are blocking in the kRunnable state.
+ Runtime::Current()->GetHeap()->GetReferenceProcessor()->BroadcastForSlowPath(self);
+ Runtime::Current()->BroadcastForNewSystemWeaks(/*broadcast_for_checkpoint*/true);
+
+ return count;
+}
+
// Request that a checkpoint function be run on all active (non-suspended)
// threads. Returns the number of successful requests.
size_t ThreadList::RunCheckpointOnRunnableThreads(Closure* checkpoint_function) {
diff --git a/runtime/thread_list.h b/runtime/thread_list.h
index b455e31e4c..133d43029a 100644
--- a/runtime/thread_list.h
+++ b/runtime/thread_list.h
@@ -17,6 +17,7 @@
#ifndef ART_RUNTIME_THREAD_LIST_H_
#define ART_RUNTIME_THREAD_LIST_H_
+#include "barrier.h"
#include "base/histogram.h"
#include "base/mutex.h"
#include "base/value_object.h"
@@ -100,6 +101,14 @@ class ThreadList {
size_t RunCheckpoint(Closure* checkpoint_function, Closure* callback = nullptr)
REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_);
+ // Run an empty checkpoint on threads. Wait until threads pass the next suspend point or are
+ // suspended. This is used to ensure that the threads finish or aren't in the middle of an
+ // in-flight mutator heap access (eg. a read barrier.) Runnable threads will respond by
+ // decrementing the empty checkpoint barrier count. This works even when the weak ref access is
+ // disabled. Only one concurrent use is currently supported.
+ size_t RunEmptyCheckpoint()
+ REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_);
+
size_t RunCheckpointOnRunnableThreads(Closure* checkpoint_function)
REQUIRES(!Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_);
@@ -158,6 +167,10 @@ class ThreadList {
void DumpNativeStacks(std::ostream& os)
REQUIRES(!Locks::thread_list_lock_);
+ Barrier* EmptyCheckpointBarrier() {
+ return empty_checkpoint_barrier_.get();
+ }
+
private:
uint32_t AllocThreadId(Thread* self);
void ReleaseThreadId(Thread* self, uint32_t id) REQUIRES(!Locks::allocated_thread_ids_lock_);
@@ -203,6 +216,8 @@ class ThreadList {
// Whether or not the current thread suspension is long.
bool long_suspend_;
+ std::unique_ptr<Barrier> empty_checkpoint_barrier_;
+
friend class Thread;
DISALLOW_COPY_AND_ASSIGN(ThreadList);
diff --git a/tools/cpp-define-generator/constant_thread.def b/tools/cpp-define-generator/constant_thread.def
index af5ca21f17..1364b558ec 100644
--- a/tools/cpp-define-generator/constant_thread.def
+++ b/tools/cpp-define-generator/constant_thread.def
@@ -25,5 +25,7 @@
DEFINE_THREAD_CONSTANT(SUSPEND_REQUEST, int32_t, art::kSuspendRequest)
DEFINE_THREAD_CONSTANT(CHECKPOINT_REQUEST, int32_t, art::kCheckpointRequest)
+DEFINE_THREAD_CONSTANT(EMPTY_CHECKPOINT_REQUEST, int32_t, art::kEmptyCheckpointRequest)
+DEFINE_THREAD_CONSTANT(SUSPEND_OR_CHECKPOINT_REQUEST, int32_t, art::kSuspendRequest | art::kCheckpointRequest | art::kEmptyCheckpointRequest)
#undef DEFINE_THREAD_CONSTANT