Move Heap parameters to ObjPtr

Deleted some unused object dumping code.

Test: test-art-host

Bug: 31113334

Change-Id: I747220caafe6679591fd4b361d7f50383a046164
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 9d7f98f..7cbcac8 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -100,10 +100,10 @@
   explicit SetLengthVisitor(int32_t length) : length_(length) {
   }
 
-  void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Avoid AsArray as object is not yet in live bitmap or allocation stack.
-    Array* array = down_cast<Array*>(obj);
+    ObjPtr<Array> array = ObjPtr<Array>::DownCast(obj);
     // DCHECK(array->IsArrayInstance());
     array->SetLength(length_);
   }
@@ -124,10 +124,10 @@
       component_size_shift_(component_size_shift) {
   }
 
-  void operator()(Object* obj, size_t usable_size) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Avoid AsArray as object is not yet in live bitmap or allocation stack.
-    Array* array = down_cast<Array*>(obj);
+    ObjPtr<Array> array = ObjPtr<Array>::DownCast(obj);
     // DCHECK(array->IsArrayInstance());
     int32_t length = (usable_size - header_size_) >> component_size_shift_;
     DCHECK_GE(length, minimum_length_);
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index cc088b8..98d383d 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -707,9 +707,13 @@
   if (!kCheckAddFinalizer) {
     DCHECK(!IsFinalizable());
   }
-  mirror::Object* obj =
-      heap->AllocObjectWithAllocator<kIsInstrumented, false>(self, this, this->object_size_,
-                                                             allocator_type, VoidFunctor());
+  // Note that the this pointer may be invalidated after the allocation.
+  ObjPtr<mirror::Object> obj =
+      heap->AllocObjectWithAllocator<kIsInstrumented, false>(self,
+                                                             this,
+                                                             this->object_size_,
+                                                             allocator_type,
+                                                             VoidFunctor());
   if (add_finalizer && LIKELY(obj != nullptr)) {
     heap->AddFinalizerReference(self, &obj);
     if (UNLIKELY(self->IsExceptionPending())) {
@@ -717,7 +721,7 @@
       obj = nullptr;
     }
   }
-  return obj;
+  return obj.Ptr();
 }
 
 inline Object* Class::AllocObject(Thread* self) {
@@ -879,11 +883,11 @@
   SetFieldBoolean<false, false>(GetSlowPathFlagOffset(), enabled);
 }
 
-inline void Class::InitializeClassVisitor::operator()(
-    mirror::Object* obj, size_t usable_size) const {
+inline void Class::InitializeClassVisitor::operator()(ObjPtr<mirror::Object> obj,
+                                                      size_t usable_size) const {
   DCHECK_LE(class_size_, usable_size);
   // Avoid AsClass as object is not yet in live bitmap or allocation stack.
-  mirror::Class* klass = down_cast<mirror::Class*>(obj);
+  ObjPtr<mirror::Class> klass = ObjPtr<mirror::Class>::DownCast(obj);
   // DCHECK(klass->IsClass());
   klass->SetClassSize(class_size_);
   klass->SetPrimitiveType(Primitive::kPrimNot);  // Default to not being primitive.
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 40742d2..7606915 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -998,7 +998,7 @@
         copy_bytes_(copy_bytes), imt_(imt), pointer_size_(pointer_size) {
   }
 
-  void operator()(mirror::Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<mirror::Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     StackHandleScope<1> hs(self_);
     Handle<mirror::Class> h_new_class_obj(hs.NewHandle(obj->AsClass()));
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index a0d6f37..725939a 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -1232,7 +1232,7 @@
     explicit InitializeClassVisitor(uint32_t class_size) : class_size_(class_size) {
     }
 
-    void operator()(mirror::Object* obj, size_t usable_size) const
+    void operator()(ObjPtr<mirror::Object> obj, size_t usable_size) const
         REQUIRES_SHARED(Locks::mutator_lock_);
 
    private:
diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc
index 9d3c26e..dbfe1d9 100644
--- a/runtime/mirror/object.cc
+++ b/runtime/mirror/object.cc
@@ -43,10 +43,10 @@
 
 class CopyReferenceFieldsWithReadBarrierVisitor {
  public:
-  explicit CopyReferenceFieldsWithReadBarrierVisitor(Object* dest_obj)
+  explicit CopyReferenceFieldsWithReadBarrierVisitor(ObjPtr<Object> dest_obj)
       : dest_obj_(dest_obj) {}
 
-  void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
+  void operator()(ObjPtr<Object> obj, MemberOffset offset, bool /* is_static */) const
       ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
     // GetFieldObject() contains a RB.
     Object* ref = obj->GetFieldObject<Object>(offset);
@@ -55,7 +55,7 @@
     dest_obj_->SetFieldObjectWithoutWriteBarrier<false, false>(offset, ref);
   }
 
-  void operator()(mirror::Class* klass, mirror::Reference* ref) const
+  void operator()(ObjPtr<mirror::Class> klass, mirror::Reference* ref) const
       ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
     // Copy java.lang.ref.Reference.referent which isn't visited in
     // Object::VisitReferences().
@@ -69,18 +69,18 @@
   void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
 
  private:
-  Object* const dest_obj_;
+  ObjPtr<Object> const dest_obj_;
 };
 
 Object* Object::CopyObject(Thread* self,
-                           mirror::Object* dest,
-                           mirror::Object* src,
+                           ObjPtr<mirror::Object> dest,
+                           ObjPtr<mirror::Object> src,
                            size_t num_bytes) {
   // Copy instance data.  Don't assume memcpy copies by words (b/32012820).
   {
     const size_t offset = sizeof(Object);
-    uint8_t* src_bytes = reinterpret_cast<uint8_t*>(src) + offset;
-    uint8_t* dst_bytes = reinterpret_cast<uint8_t*>(dest) + offset;
+    uint8_t* src_bytes = reinterpret_cast<uint8_t*>(src.Ptr()) + offset;
+    uint8_t* dst_bytes = reinterpret_cast<uint8_t*>(dest.Ptr()) + offset;
     num_bytes -= offset;
     DCHECK_ALIGNED(src_bytes, sizeof(uintptr_t));
     DCHECK_ALIGNED(dst_bytes, sizeof(uintptr_t));
@@ -131,7 +131,7 @@
   if (c->IsFinalizable()) {
     heap->AddFinalizerReference(self, &dest);
   }
-  return dest;
+  return dest.Ptr();
 }
 
 // An allocation pre-fence visitor that copies the object.
@@ -141,7 +141,7 @@
       : self_(self), orig_(orig), num_bytes_(num_bytes) {
   }
 
-  void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     Object::CopyObject(self_, obj, orig_->Get(), num_bytes_);
   }
@@ -161,14 +161,14 @@
   size_t num_bytes = SizeOf();
   StackHandleScope<1> hs(self);
   Handle<Object> this_object(hs.NewHandle(this));
-  Object* copy;
+  ObjPtr<Object> copy;
   CopyObjectVisitor visitor(self, &this_object, num_bytes);
   if (heap->IsMovableObject(this)) {
     copy = heap->AllocObject<true>(self, GetClass(), num_bytes, visitor);
   } else {
     copy = heap->AllocNonMovableObject<true>(self, GetClass(), num_bytes, visitor);
   }
-  return copy;
+  return copy.Ptr();
 }
 
 uint32_t Object::GenerateIdentityHashCode() {
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index 9ddf995..175b0c3 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -612,7 +612,9 @@
   // A utility function that copies an object in a read barrier and
   // write barrier-aware way. This is internally used by Clone() and
   // Class::CopyOf().
-  static Object* CopyObject(Thread* self, mirror::Object* dest, mirror::Object* src,
+  static Object* CopyObject(Thread* self,
+                            ObjPtr<mirror::Object> dest,
+                            ObjPtr<mirror::Object> src,
                             size_t num_bytes)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index aea6ff1..cf902af 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -43,10 +43,10 @@
   explicit SetStringCountVisitor(int32_t count) : count_(count) {
   }
 
-  void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
-    String* string = down_cast<String*>(obj);
+    ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
     string->SetCount(count_);
     DCHECK(!string->IsCompressed() || kUseStringCompression);
   }
@@ -63,10 +63,10 @@
       : count_(count), src_array_(src_array), offset_(offset), high_byte_(high_byte) {
   }
 
-  void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
-    String* string = down_cast<String*>(obj);
+    ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
     string->SetCount(count_);
     DCHECK(!string->IsCompressed() || kUseStringCompression);
     int32_t length = String::GetLengthFromCount(count_);
@@ -99,10 +99,10 @@
     count_(count), src_array_(src_array), offset_(offset) {
   }
 
-  void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
-    String* string = down_cast<String*>(obj);
+    ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
     string->SetCount(count_);
     const uint16_t* const src = src_array_->GetData() + offset_;
     const int32_t length = String::GetLengthFromCount(count_);
@@ -131,10 +131,10 @@
     count_(count), src_string_(src_string), offset_(offset) {
   }
 
-  void operator()(Object* obj, size_t usable_size ATTRIBUTE_UNUSED) const
+  void operator()(ObjPtr<Object> obj, size_t usable_size ATTRIBUTE_UNUSED) const
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
-    String* string = down_cast<String*>(obj);
+    ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
     string->SetCount(count_);
     const int32_t length = String::GetLengthFromCount(count_);
     bool compressible = kUseStringCompression && String::GetCompressionFlagFromCount(count_);