summaryrefslogtreecommitdiff
path: root/runtime/gc/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/gc/heap.h')
-rw-r--r--runtime/gc/heap.h42
1 files changed, 3 insertions, 39 deletions
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 0593e45613..bbddf57090 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -383,28 +383,8 @@ class Heap {
bool sorted = false)
REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
- // Approximates whether the object in question was explicitly requested to be nonmovable.
- // May rarely err on the side of claiming immovability for objects that were allocated movable,
- // but will not be moved.
- // Returns true if and only if one of the following is true:
- // 1) The object was allocated as nonmovable, whether or not it has moved to ZygoteSpace.
- // 2) All objects are being allocated in a non-movable space.
- // 3) The CC collector decided to spuriously allocate in non-moving space because it ran
- // out of memory at an inopportune time.
- // This is used primarily to determine Object.clone() behavior, where (2)
- // doesn't matter. (3) is unfortunate, but we can live with it.
- // SHOULD NOT BE CALLED ON CLASS OBJECTS.
- bool IsNonMovable(ObjPtr<mirror::Object> obj) const REQUIRES_SHARED(Locks::mutator_lock_);
-
- // The negation of the above, but resolves ambiguous cases in the direction of assuming
- // movability. Used for partial error checking where an object must be movable.
- EXPORT bool PossiblyAllocatedMovable(ObjPtr<mirror::Object> obj) const
- REQUIRES_SHARED(Locks::mutator_lock_);
-
- // Returns true if there is any chance that the object (obj) will move. Returns false for image
- // and zygote space, since we don't actually move objects in those spaces. Unlike the preceding
- // function, the result here depends on whether the object was moved to zygote or image space.
- bool ObjectMayMove(ObjPtr<mirror::Object> obj) const REQUIRES_SHARED(Locks::mutator_lock_);
+ // Returns true if there is any chance that the object (obj) will move.
+ bool IsMovableObject(ObjPtr<mirror::Object> obj) const REQUIRES_SHARED(Locks::mutator_lock_);
// Enables us to compacting GC until objects are released.
EXPORT void IncrementDisableMovingGC(Thread* self) REQUIRES(!*gc_complete_lock_);
@@ -1047,12 +1027,6 @@ class Heap {
return size < pud_size ? pmd_size : pud_size;
}
- // Add a reference to the set of preexisting zygote nonmovable objects.
- void AddNonMovableZygoteObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
- non_movable_zygote_objects_.insert(
- mirror::CompressedReference<mirror::Object>::FromMirrorPtr(obj));
- }
-
private:
class ConcurrentGCTask;
class CollectorTransitionTask;
@@ -1371,7 +1345,7 @@ class Heap {
std::vector<space::AllocSpace*> alloc_spaces_;
// A space where non-movable objects are allocated, when compaction is enabled it contains
- // Classes, and non moving objects.
+ // Classes, ArtMethods, ArtFields, and non moving objects.
space::MallocSpace* non_moving_space_;
// Space which we use for the kAllocatorTypeROSAlloc.
@@ -1785,16 +1759,6 @@ class Heap {
std::unique_ptr<Verification> verification_;
- // Non-class immovable objects allocated before we created zygote space.
- // TODO: We may need a smaller data structure. Unfortunately, HashSets minimum size is too big.
- struct CRComparator {
- bool operator()(mirror::CompressedReference<mirror::Object> x,
- mirror::CompressedReference<mirror::Object> y) const {
- return x.AsVRegValue() < y.AsVRegValue();
- }
- };
- std::set<mirror::CompressedReference<mirror::Object>, CRComparator> non_movable_zygote_objects_;
-
friend class CollectorTransitionTask;
friend class collector::GarbageCollector;
friend class collector::ConcurrentCopying;