Revert^4 "Allow structural redefinition on non-final classes."

This reverts commit 664999a12d6fc8a8ef5c0519b12ec1e8a51bb085.

Fixed issues with GC and quickened instructions in parent CLs.

Reason for revert: Fixed issues with GC CHECK fail and device SEGVs.

Test: ./test.py --host
Test: ./test.py --target
Bug: 134162467
Bug: 144168550
Change-Id: Ibacddaf45beb72184f97d53d5d048bd442578658
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 3f7c9b1..a0a2365 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -3367,12 +3367,57 @@
       StartsWith(descriptor_sv, "Landroid/media/");
 }
 
+// Helper for maintaining DefineClass counting. We need to notify callbacks when we start/end a
+// define-class and how many recursive DefineClasses we are at in order to allow for doing  things
+// like pausing class definition.
+struct ScopedDefiningClass {
+ public:
+  explicit ScopedDefiningClass(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
+      : self_(self), returned_(false) {
+    Locks::mutator_lock_->AssertSharedHeld(self_);
+    Runtime::Current()->GetRuntimeCallbacks()->BeginDefineClass();
+    self_->IncrDefineClassCount();
+  }
+  ~ScopedDefiningClass() REQUIRES_SHARED(Locks::mutator_lock_) {
+    Locks::mutator_lock_->AssertSharedHeld(self_);
+    CHECK(returned_);
+  }
+
+  ObjPtr<mirror::Class> Finish(Handle<mirror::Class> h_klass)
+      REQUIRES_SHARED(Locks::mutator_lock_) {
+    CHECK(!returned_);
+    self_->DecrDefineClassCount();
+    Runtime::Current()->GetRuntimeCallbacks()->EndDefineClass();
+    Thread::PoisonObjectPointersIfDebug();
+    returned_ = true;
+    return h_klass.Get();
+  }
+
+  ObjPtr<mirror::Class> Finish(ObjPtr<mirror::Class> klass)
+      REQUIRES_SHARED(Locks::mutator_lock_) {
+    StackHandleScope<1> hs(self_);
+    Handle<mirror::Class> h_klass(hs.NewHandle(klass));
+    return Finish(h_klass);
+  }
+
+  ObjPtr<mirror::Class> Finish(nullptr_t np ATTRIBUTE_UNUSED)
+      REQUIRES_SHARED(Locks::mutator_lock_) {
+    ScopedNullHandle<mirror::Class> snh;
+    return Finish(snh);
+  }
+
+ private:
+  Thread* self_;
+  bool returned_;
+};
+
 ObjPtr<mirror::Class> ClassLinker::DefineClass(Thread* self,
                                                const char* descriptor,
                                                size_t hash,
                                                Handle<mirror::ClassLoader> class_loader,
                                                const DexFile& dex_file,
                                                const dex::ClassDef& dex_class_def) {
+  ScopedDefiningClass sdc(self);
   StackHandleScope<3> hs(self);
   auto klass = hs.NewHandle<mirror::Class>(nullptr);
 
@@ -3403,7 +3448,7 @@
     ObjPtr<mirror::Throwable> pre_allocated =
         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
     self->SetException(pre_allocated);
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
 
   // This is to prevent the calls to ClassLoad and ClassPrepare which can cause java/user-supplied
@@ -3414,7 +3459,7 @@
     ObjPtr<mirror::Throwable> pre_allocated =
         Runtime::Current()->GetPreAllocatedNoClassDefFoundError();
     self->SetException(pre_allocated);
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
 
   if (klass == nullptr) {
@@ -3425,12 +3470,12 @@
     if (CanAllocClass()) {
       klass.Assign(AllocClass(self, SizeOfClassWithoutEmbeddedTables(dex_file, dex_class_def)));
     } else {
-      return nullptr;
+      return sdc.Finish(nullptr);
     }
   }
   if (UNLIKELY(klass == nullptr)) {
     self->AssertPendingOOMException();
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
   // Get the real dex file. This will return the input if there aren't any callbacks or they do
   // nothing.
@@ -3447,12 +3492,12 @@
                                                             &new_class_def);
   // Check to see if an exception happened during runtime callbacks. Return if so.
   if (self->IsExceptionPending()) {
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
   ObjPtr<mirror::DexCache> dex_cache = RegisterDexFile(*new_dex_file, class_loader.Get());
   if (dex_cache == nullptr) {
     self->AssertPendingException();
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
   klass->SetDexCache(dex_cache);
   SetupClass(*new_dex_file, *new_class_def, klass, class_loader.Get());
@@ -3474,7 +3519,7 @@
   if (existing != nullptr) {
     // We failed to insert because we raced with another thread. Calling EnsureResolved may cause
     // this thread to block.
-    return EnsureResolved(self, descriptor, existing);
+    return sdc.Finish(EnsureResolved(self, descriptor, existing));
   }
 
   // Load the fields and other things after we are inserted in the table. This is so that we don't
@@ -3489,7 +3534,7 @@
     if (!klass->IsErroneous()) {
       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
     }
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
 
   // Finish loading (if necessary) by finding parents
@@ -3499,7 +3544,7 @@
     if (!klass->IsErroneous()) {
       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
     }
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
   CHECK(klass->IsLoaded());
 
@@ -3518,7 +3563,7 @@
     if (!klass->IsErroneous()) {
       mirror::Class::SetStatus(klass, ClassStatus::kErrorUnresolved, self);
     }
-    return nullptr;
+    return sdc.Finish(nullptr);
   }
   self->AssertNoPendingException();
   CHECK(h_new_class != nullptr) << descriptor;
@@ -3552,7 +3597,7 @@
   // Notify native debugger of the new class and its layout.
   jit::Jit::NewTypeLoadedIfUsingJit(h_new_class.Get());
 
-  return h_new_class.Get();
+  return sdc.Finish(h_new_class);
 }
 
 uint32_t ClassLinker::SizeOfClassWithoutEmbeddedTables(const DexFile& dex_file,
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 4e38e6b..f993d17 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -1456,6 +1456,10 @@
  public:
   virtual ~ClassLoadCallback() {}
 
+  // Called immediately before beginning class-definition and immediately before returning from it.
+  virtual void BeginDefineClass() REQUIRES_SHARED(Locks::mutator_lock_) {}
+  virtual void EndDefineClass() REQUIRES_SHARED(Locks::mutator_lock_) {}
+
   // If set we will replace initial_class_def & initial_dex_file with the final versions. The
   // callback author is responsible for ensuring these are allocated in such a way they can be
   // cleaned up if another transformation occurs. Note that both must be set or null/unchanged on
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index cc1a7f8..c67b1b0 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -358,7 +358,8 @@
       DCHECK(self->IsExceptionPending());  // Throw exception and unwind.
       return nullptr;  // Failure.
     }
-    if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
+    if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class) &&
+                 !referring_class->IsObsoleteVersionOf(fields_class))) {
       ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
       return nullptr;  // Failure.
     } else {
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index fc0cf24..6c45ebf 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -550,6 +550,22 @@
       access_to, method, dex_cache, method_idx, throw_invoke_type);
 }
 
+inline bool Class::IsObsoleteVersionOf(ObjPtr<Class> klass) {
+  DCHECK(!klass->IsObsoleteObject()) << klass->PrettyClass() << " is obsolete!";
+  if (LIKELY(!IsObsoleteObject())) {
+    return false;
+  }
+  ObjPtr<Class> current(klass);
+  do {
+    if (UNLIKELY(current == this)) {
+      return true;
+    } else {
+      current = current->GetObsoleteClass();
+    }
+  } while (!current.IsNull());
+  return false;
+}
+
 inline bool Class::IsSubClass(ObjPtr<Class> klass) {
   // Since the SubtypeCheck::IsSubtypeOf needs to lookup the Depth,
   // it is always O(Depth) in terms of speed to do the check.
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index b7429dd..e57cc98 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -188,28 +188,57 @@
   }
 }
 
+template <typename T>
+static void CheckSetStatus(Thread* self, T thiz, ClassStatus new_status, ClassStatus old_status)
+    REQUIRES_SHARED(Locks::mutator_lock_) {
+  if (UNLIKELY(new_status <= old_status && new_status != ClassStatus::kErrorUnresolved &&
+               new_status != ClassStatus::kErrorResolved && new_status != ClassStatus::kRetired)) {
+    LOG(FATAL) << "Unexpected change back of class status for " << thiz->PrettyClass() << " "
+               << old_status << " -> " << new_status;
+  }
+  if (old_status == ClassStatus::kInitialized) {
+    // We do not hold the lock for making the class visibly initialized
+    // as this is unnecessary and could lead to deadlocks.
+    CHECK_EQ(new_status, ClassStatus::kVisiblyInitialized);
+  } else if ((new_status >= ClassStatus::kResolved || old_status >= ClassStatus::kResolved) &&
+             !Locks::mutator_lock_->IsExclusiveHeld(self)) {
+    // When classes are being resolved the resolution code should hold the
+    // lock or have everything else suspended
+    CHECK_EQ(thiz->GetLockOwnerThreadId(), self->GetThreadId())
+        << "Attempt to change status of class while not holding its lock: " << thiz->PrettyClass()
+        << " " << old_status << " -> " << new_status;
+  }
+  if (UNLIKELY(Locks::mutator_lock_->IsExclusiveHeld(self))) {
+    CHECK(!Class::IsErroneous(new_status))
+        << "status " << new_status
+        << " cannot be set while suspend-all is active. Would require allocations.";
+    CHECK(thiz->IsResolved())
+        << thiz->PrettyClass()
+        << " not resolved during suspend-all status change. Waiters might be missed!";
+  }
+}
+
+void Class::SetStatusLocked(ClassStatus new_status) {
+  ClassStatus old_status = GetStatus();
+  CheckSetStatus(Thread::Current(), this, new_status, old_status);
+  if (kBitstringSubtypeCheckEnabled) {
+    // FIXME: This looks broken with respect to aborted transactions.
+    SubtypeCheck<ObjPtr<mirror::Class>>::WriteStatus(this, new_status);
+  } else {
+    // The ClassStatus is always in the 4 most-significant bits of status_.
+    static_assert(sizeof(status_) == sizeof(uint32_t), "Size of status_ not equal to uint32");
+    uint32_t new_status_value = static_cast<uint32_t>(new_status) << (32 - kClassStatusBitSize);
+    DCHECK(!Runtime::Current()->IsActiveTransaction());
+    SetField32Volatile<false>(StatusOffset(), new_status_value);
+  }
+}
+
 void Class::SetStatus(Handle<Class> h_this, ClassStatus new_status, Thread* self) {
   ClassStatus old_status = h_this->GetStatus();
   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
   bool class_linker_initialized = class_linker != nullptr && class_linker->IsInitialized();
   if (LIKELY(class_linker_initialized)) {
-    if (UNLIKELY(new_status <= old_status &&
-                 new_status != ClassStatus::kErrorUnresolved &&
-                 new_status != ClassStatus::kErrorResolved &&
-                 new_status != ClassStatus::kRetired)) {
-      LOG(FATAL) << "Unexpected change back of class status for " << h_this->PrettyClass()
-                 << " " << old_status << " -> " << new_status;
-    }
-    if (old_status == ClassStatus::kInitialized) {
-      // We do not hold the lock for making the class visibly initialized
-      // as this is unnecessary and could lead to deadlocks.
-      CHECK_EQ(new_status, ClassStatus::kVisiblyInitialized);
-    } else if (new_status >= ClassStatus::kResolved || old_status >= ClassStatus::kResolved) {
-      // When classes are being resolved the resolution code should hold the lock.
-      CHECK_EQ(h_this->GetLockOwnerThreadId(), self->GetThreadId())
-            << "Attempt to change status of class while not holding its lock: "
-            << h_this->PrettyClass() << " " << old_status << " -> " << new_status;
-    }
+    CheckSetStatus(self, h_this, new_status, old_status);
   }
   if (UNLIKELY(IsErroneous(new_status))) {
     CHECK(!h_this->IsErroneous())
@@ -336,6 +365,15 @@
   SetField32Transaction(OFFSET_OF_OBJECT_MEMBER(Class, class_size_), new_class_size);
 }
 
+ObjPtr<Class> Class::GetObsoleteClass() {
+  ObjPtr<ClassExt> ext(GetExtData());
+  if (ext.IsNull()) {
+    return nullptr;
+  } else {
+    return ext->GetObsoleteClass();
+  }
+}
+
 // Return the class' name. The exact format is bizarre, but it's the specified behavior for
 // Class.getName: keywords for primitive types, regular "[I" form for primitive arrays (so "int"
 // but "[I"), and arrays of reference types written between "L" and ";" but with dots rather than
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index ecbae71..b600c43 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -105,6 +105,10 @@
   static void SetStatus(Handle<Class> h_this, ClassStatus new_status, Thread* self)
       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
 
+  // Used for structural redefinition to directly set the class-status while
+  // holding a strong mutator-lock.
+  void SetStatusLocked(ClassStatus new_status) REQUIRES(Locks::mutator_lock_);
+
   void SetStatusForPrimitiveOrArray(ClassStatus new_status) REQUIRES_SHARED(Locks::mutator_lock_);
 
   static constexpr MemberOffset StatusOffset() {
@@ -617,6 +621,11 @@
   // to themselves. Classes for primitive types may not assign to each other.
   ALWAYS_INLINE bool IsAssignableFrom(ObjPtr<Class> src) REQUIRES_SHARED(Locks::mutator_lock_);
 
+  // Checks if 'klass' is a redefined version of this.
+  bool IsObsoleteVersionOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
+
+  ObjPtr<Class> GetObsoleteClass() REQUIRES_SHARED(Locks::mutator_lock_);
+
   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ALWAYS_INLINE ObjPtr<Class> GetSuperClass() REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/runtime_callbacks.cc b/runtime/runtime_callbacks.cc
index ac73364..e0f57c0 100644
--- a/runtime/runtime_callbacks.cc
+++ b/runtime/runtime_callbacks.cc
@@ -228,6 +228,19 @@
   }
 }
 
+void RuntimeCallbacks::EndDefineClass() {
+  for (ClassLoadCallback* cb : COPY(class_callbacks_)) {
+    cb->EndDefineClass();
+  }
+}
+
+void RuntimeCallbacks::BeginDefineClass() {
+  for (ClassLoadCallback* cb : COPY(class_callbacks_)) {
+    cb->BeginDefineClass();
+  }
+}
+
+
 void RuntimeCallbacks::ClassPreDefine(const char* descriptor,
                                       Handle<mirror::Class> temp_class,
                                       Handle<mirror::ClassLoader> loader,
diff --git a/runtime/runtime_callbacks.h b/runtime/runtime_callbacks.h
index 7111ba0..3cadd97 100644
--- a/runtime/runtime_callbacks.h
+++ b/runtime/runtime_callbacks.h
@@ -181,6 +181,8 @@
   void AddClassLoadCallback(ClassLoadCallback* cb) REQUIRES(Locks::mutator_lock_);
   void RemoveClassLoadCallback(ClassLoadCallback* cb) REQUIRES(Locks::mutator_lock_);
 
+  void BeginDefineClass() REQUIRES_SHARED(Locks::mutator_lock_);
+  void EndDefineClass() REQUIRES_SHARED(Locks::mutator_lock_);
   void ClassLoad(Handle<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
   void ClassPrepare(Handle<mirror::Class> temp_klass, Handle<mirror::Class> klass)
       REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/thread.h b/runtime/thread.h
index d3361ee..9d703cf 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -264,6 +264,17 @@
         (state_and_flags.as_struct.flags & kSuspendRequest) != 0;
   }
 
+  void DecrDefineClassCount() {
+    tls32_.define_class_counter--;
+  }
+
+  void IncrDefineClassCount() {
+    tls32_.define_class_counter++;
+  }
+  uint32_t GetDefineClassCount() const {
+    return tls32_.define_class_counter;
+  }
+
   // If delta > 0 and (this != self or suspend_barrier is not null), this function may temporarily
   // release thread_suspend_count_lock_ internally.
   ALWAYS_INLINE
@@ -1554,7 +1565,8 @@
           user_code_suspend_count(0),
           force_interpreter_count(0),
           use_mterp(0),
-          make_visibly_initialized_counter(0) {}
+          make_visibly_initialized_counter(0),
+          define_class_counter(0) {}
 
     union StateAndFlags state_and_flags;
     static_assert(sizeof(union StateAndFlags) == sizeof(int32_t),
@@ -1652,6 +1664,10 @@
     // initialized but not visibly initialized for a long time even if no more classes are
     // being initialized anymore.
     uint32_t make_visibly_initialized_counter;
+
+    // Counter for how many nested define-classes are ongoing in this thread. Used to allow waiting
+    // for threads to be done with class-definition work.
+    uint32_t define_class_counter;
   } tls32_;
 
   struct PACKED(8) tls_64bit_sized_values {