Use ClassStatus::kVisiblyInitialized for allocations.

And move the "intialized but not visibly initialized" check
to ClassLinker::EnsureIntialized().

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: aosp_taimen-userdebug boots.
Test: run-gtests.sh
Test: testrunner.py --target --optimizing
Bug: 36692143
Change-Id: Ib5e19326b1149b2aef586b905ce89470c3a8e405
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index ecda78d..7ac9147 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -1200,13 +1200,11 @@
 
     ldr    r3, [r0, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET]  // Load the object size (r3)
     cmp    r3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE        // Check if the size is for a thread
-                                                              // local allocation. Also does the
-                                                              // initialized and finalizable checks.
-    // When isInitialized == 0, then the class is potentially not yet initialized.
-    // If the class is not yet initialized, the object size will be very large to force the branch
-    // below to be taken.
+                                                              // local allocation.
+    // If the class is not yet visibly initialized, or it is finalizable,
+    // the object size will be very large to force the branch below to be taken.
     //
-    // See InitializeClassVisitors in class-inl.h for more details.
+    // See Class::SetStatus() in class.cc for more details.
     bhs    .Lslow_path\c_name
                                                               // Compute the rosalloc bracket index
                                                               // from the size. Since the size is
@@ -1274,19 +1272,9 @@
     str    r1, [r12, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
 
     mov    r0, r3                                             // Set the return value and return.
-.if \isInitialized == 0
-    // This barrier is only necessary when the allocation also requires
-    // a class initialization check.
-    //
-    // If the class is already observably initialized, then new-instance allocations are protected
+    // No barrier. The class is already observably initialized (otherwise the fast
+    // path size check above would fail) and new-instance allocations are protected
     // from publishing by the compiler which inserts its own StoreStore barrier.
-    dmb    ish
-    // Use a "dmb ish" fence here because if there are later loads of statics (e.g. class size),
-    // they should happen-after the implicit initialization check.
-    //
-    // TODO: Remove this dmb for class initialization checks (b/36692143) by introducing
-    // a new observably-initialized class state.
-.endif
     bx     lr
 
 .Lslow_path\c_name:
@@ -1321,11 +1309,10 @@
     sub    r12, r3, r12                                       // Compute the remaining buf size.
     ldr    r3, [r0, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET]  // Load the object size (r3).
     cmp    r3, r12                                            // Check if it fits.
-    // When isInitialized == 0, then the class is potentially not yet initialized.
-    // If the class is not yet initialized, the object size will be very large to force the branch
-    // below to be taken.
+    // If the class is not yet visibly initialized, or it is finalizable,
+    // the object size will be very large to force the branch below to be taken.
     //
-    // See InitializeClassVisitors in class-inl.h for more details.
+    // See Class::SetStatus() in class.cc for more details.
     bhi    \slowPathLabel
     // "Point of no slow path". Won't go to the slow path from here on. OK to clobber r0 and r1.
                                                               // Reload old thread_local_pos (r0)
@@ -1360,18 +1347,9 @@
                                                               // site will see the right values in
                                                               // the fields of the class.
     mov    r0, r2
-.if \isInitialized == 0
-    // This barrier is only necessary when the allocation also requires
-    // a class initialization check.
-    //
-    // If the class is already observably initialized, then new-instance allocations are protected
+    // No barrier. The class is already observably initialized (otherwise the fast
+    // path size check above would fail) and new-instance allocations are protected
     // from publishing by the compiler which inserts its own StoreStore barrier.
-    dmb    ish
-    // Use a "dmb ish" fence here because if there are later loads of statics (e.g. class size),
-    // they should happen-after the implicit initialization check.
-    //
-    // TODO: Remove dmb for class initialization checks (b/36692143)
-.endif
     bx     lr
 .endm
 
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 1c252a2..e5e7ee5 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1641,14 +1641,11 @@
     bhs    .Lslow_path\c_name
     ldr    w3, [x0, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET]  // Load the object size (x3)
     cmp    x3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE        // Check if the size is for a thread
-                                                              // local allocation. Also does the
-                                                              // finalizable and initialization
-                                                              // checks.
-    // When isInitialized == 0, then the class is potentially not yet initialized.
-    // If the class is not yet initialized, the object size will be very large to force the branch
-    // below to be taken.
+                                                              // local allocation.
+    // If the class is not yet visibly initialized, or it is finalizable,
+    // the object size will be very large to force the branch below to be taken.
     //
-    // See InitializeClassVisitors in class-inl.h for more details.
+    // See Class::SetStatus() in class.cc for more details.
     bhs    .Lslow_path\c_name
                                                               // Compute the rosalloc bracket index
                                                               // from the size. Since the size is
@@ -1716,19 +1713,9 @@
     str    w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
 
     mov    x0, x3                                             // Set the return value and return.
-.if \isInitialized == 0
-    // This barrier is only necessary when the allocation also requires
-    // a class initialization check.
-    //
-    // If the class is already observably initialized, then new-instance allocations are protected
+    // No barrier. The class is already observably initialized (otherwise the fast
+    // path size check above would fail) and new-instance allocations are protected
     // from publishing by the compiler which inserts its own StoreStore barrier.
-    dmb    ish
-    // Use a "dmb ish" fence here because if there are later loads of statics (e.g. class size),
-    // they should happen-after the implicit initialization check.
-    //
-    // TODO: Remove this dmb for class initialization checks (b/36692143) by introducing
-    // a new observably-initialized class state.
-.endif
     ret
 .Lslow_path\c_name:
     SETUP_SAVE_REFS_ONLY_FRAME                      // save callee saves in case of GC
@@ -1754,11 +1741,10 @@
                                                               // since the tlab pos and end are 32
                                                               // bit values.
 
-    // When isInitialized == 0, then the class is potentially not yet initialized.
-    // If the class is not yet initialized, the object size will be very large to force the branch
-    // below to be taken.
+    // If the class is not yet visibly initialized, or it is finalizable,
+    // the object size will be very large to force the branch below to be taken.
     //
-    // See InitializeClassVisitors in class-inl.h for more details.
+    // See Class::SetStatus() in class.cc for more details.
     bhi    \slowPathLabel
     str    x6, [xSELF, #THREAD_LOCAL_POS_OFFSET]              // Store new thread_local_pos.
     ldr    x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET]          // Increment thread_local_objects.
@@ -1771,19 +1757,9 @@
                                                               // site will see the right values in
                                                               // the fields of the class.
     mov    x0, x4
-.if \isInitialized == 0
-    // This barrier is only necessary when the allocation also requires
-    // a class initialization check.
-    //
-    // If the class is already observably initialized, then new-instance allocations are protected
+    // No barrier. The class is already observably initialized (otherwise the fast
+    // path size check above would fail) and new-instance allocations are protected
     // from publishing by the compiler which inserts its own StoreStore barrier.
-    dmb    ish
-    // Use a "dmb ish" fence here because if there are later loads of statics (e.g. class size),
-    // they should happen-after the implicit initialization check.
-    //
-    // TODO: Remove this dmb for class initialization checks (b/36692143) by introducing
-    // a new observably-initialized class state.
-.endif
     ret
 .endm
 
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index fc32114..a79b87d 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -5963,6 +5963,16 @@
   DCHECK(c != nullptr);
 
   if (c->IsInitialized()) {
+    // If we've seen an initialized but not visibly initialized class
+    // many times, request visible initialization.
+    if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
+      // Thanks to the x86 memory model classes skip the initialized status.
+      DCHECK(c->IsVisiblyInitialized());
+    } else if (UNLIKELY(!c->IsVisiblyInitialized())) {
+      if (self->IncrementMakeVisiblyInitializedCounter()) {
+        MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
+      }
+    }
     DCHECK(c->WasVerificationAttempted()) << c->PrettyClassAndClassLoader();
     return true;
   }
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index 4d24a8c..01fb4dd 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -114,6 +114,35 @@
   return method;
 }
 
+ALWAYS_INLINE
+inline ObjPtr<mirror::Class> CheckClassInitializedForObjectAlloc(ObjPtr<mirror::Class> klass,
+                                                                 Thread* self,
+                                                                 bool* slow_path)
+    REQUIRES_SHARED(Locks::mutator_lock_)
+    REQUIRES(!Roles::uninterruptible_) {
+  if (UNLIKELY(!klass->IsVisiblyInitialized())) {
+    StackHandleScope<1> hs(self);
+    Handle<mirror::Class> h_class(hs.NewHandle(klass));
+    // EnsureInitialized (the class initializer) might cause a GC.
+    // may cause us to suspend meaning that another thread may try to
+    // change the allocator while we are stuck in the entrypoints of
+    // an old allocator. Also, the class initialization may fail. To
+    // handle these cases we mark the slow path boolean as true so
+    // that the caller knows to check the allocator type to see if it
+    // has changed and to null-check the return value in case the
+    // initialization fails.
+    *slow_path = true;
+    if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
+      DCHECK(self->IsExceptionPending());
+      return nullptr;  // Failure
+    } else {
+      DCHECK(!self->IsExceptionPending());
+    }
+    return h_class.Get();
+  }
+  return klass;
+}
+
 ALWAYS_INLINE inline ObjPtr<mirror::Class> CheckObjectAlloc(ObjPtr<mirror::Class> klass,
                                                             Thread* self,
                                                             bool* slow_path)
@@ -130,54 +159,7 @@
     *slow_path = true;
     return nullptr;  // Failure
   }
-  if (UNLIKELY(!klass->IsInitialized())) {
-    StackHandleScope<1> hs(self);
-    Handle<mirror::Class> h_klass(hs.NewHandle(klass));
-    // EnsureInitialized (the class initializer) might cause a GC.
-    // may cause us to suspend meaning that another thread may try to
-    // change the allocator while we are stuck in the entrypoints of
-    // an old allocator. Also, the class initialization may fail. To
-    // handle these cases we mark the slow path boolean as true so
-    // that the caller knows to check the allocator type to see if it
-    // has changed and to null-check the return value in case the
-    // initialization fails.
-    *slow_path = true;
-    if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
-      DCHECK(self->IsExceptionPending());
-      return nullptr;  // Failure
-    } else {
-      DCHECK(!self->IsExceptionPending());
-    }
-    return h_klass.Get();
-  }
-  return klass;
-}
-
-ALWAYS_INLINE
-inline ObjPtr<mirror::Class> CheckClassInitializedForObjectAlloc(ObjPtr<mirror::Class> klass,
-                                                                 Thread* self,
-                                                                 bool* slow_path)
-    REQUIRES_SHARED(Locks::mutator_lock_)
-    REQUIRES(!Roles::uninterruptible_) {
-  if (UNLIKELY(!klass->IsInitialized())) {
-    StackHandleScope<1> hs(self);
-    Handle<mirror::Class> h_class(hs.NewHandle(klass));
-    // EnsureInitialized (the class initializer) might cause a GC.
-    // may cause us to suspend meaning that another thread may try to
-    // change the allocator while we are stuck in the entrypoints of
-    // an old allocator. Also, the class initialization may fail. To
-    // handle these cases we mark the slow path boolean as true so
-    // that the caller knows to check the allocator type to see if it
-    // has changed and to null-check the return value in case the
-    // initialization fails.
-    *slow_path = true;
-    if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
-      DCHECK(self->IsExceptionPending());
-      return nullptr;  // Failure
-    }
-    return h_class.Get();
-  }
-  return klass;
+  return CheckClassInitializedForObjectAlloc(klass, self, slow_path);
 }
 
 // Allocate an instance of klass. Throws InstantationError if klass is not instantiable,
diff --git a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc
index ecf6f67..dba4ecc 100644
--- a/runtime/entrypoints/quick/quick_alloc_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_alloc_entrypoints.cc
@@ -41,22 +41,21 @@
   ScopedQuickEntrypointChecks sqec(self);
   DCHECK(klass != nullptr);
   if (kUseTlabFastPath && !kInstrumented && allocator_type == gc::kAllocatorTypeTLAB) {
-    if (kInitialized || klass->IsInitialized()) {
-      if (!kFinalize || !klass->IsFinalizable()) {
-        size_t byte_count = klass->GetObjectSize();
-        byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment);
-        mirror::Object* obj;
-        if (LIKELY(byte_count < self->TlabSize())) {
-          obj = self->AllocTlab(byte_count);
-          DCHECK(obj != nullptr) << "AllocTlab can't fail";
-          obj->SetClass(klass);
-          if (kUseBakerReadBarrier) {
-            obj->AssertReadBarrierState();
-          }
-          QuasiAtomic::ThreadFenceForConstructor();
-          return obj;
-        }
+    // The "object size alloc fast path" is set when the class is
+    // visibly initialized, objects are fixed size and non-finalizable.
+    // Otherwise, the value is too large for the size check to succeed.
+    size_t byte_count = klass->GetObjectSizeAllocFastPath();
+    if (LIKELY(byte_count < self->TlabSize())) {
+      static_assert(kObjectAlignment == gc::space::BumpPointerSpace::kAlignment, "Alignment check");
+      DCHECK_ALIGNED(byte_count, gc::space::BumpPointerSpace::kAlignment);
+      mirror::Object* obj = self->AllocTlab(byte_count);
+      DCHECK(obj != nullptr) << "AllocTlab can't fail";
+      obj->SetClass(klass);
+      if (kUseBakerReadBarrier) {
+        obj->AssertReadBarrierState();
       }
+      QuasiAtomic::ThreadFenceForConstructor();
+      return obj;
     }
   }
   if (kInitialized) {
diff --git a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
index 0b286e3..838b5b5 100644
--- a/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_dexcache_entrypoints.cc
@@ -140,12 +140,6 @@
   ScopedQuickEntrypointChecks sqec(self);
   DCHECK(klass != nullptr);
   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
-  if (UNLIKELY(klass->IsInitialized())) {
-    if (self->IncrementMakeVisiblyInitializedCounter()) {
-      class_linker->MakeInitializedClassesVisiblyInitialized(self, /*wait=*/ false);
-    }
-    return klass;
-  }
   StackHandleScope<1> hs(self);
   Handle<mirror::Class> h_klass = hs.NewHandle(klass);
   bool success = class_linker->EnsureInitialized(
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 7919621..d0901c4 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -776,24 +776,15 @@
 
     if (method->IsStatic()) {
       ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
-      if (LIKELY(declaring_class->IsVisiblyInitialized())) {
-        // Visibly initialized, nothing to do.
-      } else if (!declaring_class->IsInitialized()) {
+      if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) {
         // Ensure static method's class is initialized.
         StackHandleScope<1> hs(self);
-        Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
+        Handle<mirror::Class> h_class(hs.NewHandle(declaring_class));
         if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
-          DCHECK(Thread::Current()->IsExceptionPending())
-              << shadow_frame->GetMethod()->PrettyMethod();
+          DCHECK(Thread::Current()->IsExceptionPending()) << method->PrettyMethod();
           self->PopManagedStackFragment(fragment);
           return 0;
         }
-      } else {
-        // Initialized but not visibly initialized.
-        if (self->IncrementMakeVisiblyInitializedCounter()) {
-          Runtime::Current()->GetClassLinker()->MakeInitializedClassesVisiblyInitialized(
-              self, /*wait=*/ false);
-        }
       }
     }
 
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index a299f34..dde056a 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -202,11 +202,7 @@
   // Setting the object size alloc fast path needs to be after the status write so that if the
   // alloc path sees a valid object size, we would know that it's initialized as long as it has a
   // load-acquire/fake dependency.
-  // TODO: Update the object size alloc fast path only for ClassStatus::kVisiblyInitialized
-  // and take advantage of this in allocation entrypoints. b/36692143
-  if (new_status >= ClassStatus::kInitialized &&
-      old_status < ClassStatus::kInitialized &&
-      !h_this->IsVariableSize()) {
+  if (new_status == ClassStatus::kVisiblyInitialized && !h_this->IsVariableSize()) {
     DCHECK_EQ(h_this->GetObjectSizeAllocFastPath(), std::numeric_limits<uint32_t>::max());
     // Finalizable objects must always go slow path.
     if (!h_this->IsFinalizable()) {