Clean up internal stack trace construction.

Simplify the code by ignoring active transactions. Writing
to fields of a newly allocated object does not need to be
recorded as aborting the transaction removes all references
to the new object and it's unnecessary to roll back writes
to unreachable object's fields.

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: aosp_taimen-userdebug boots.
Change-Id: Ia91d3274398b0ca0f5b0040dcf323921d915b657
diff --git a/compiler/exception_test.cc b/compiler/exception_test.cc
index 633e124..7d56da0 100644
--- a/compiler/exception_test.cc
+++ b/compiler/exception_test.cc
@@ -213,7 +213,7 @@
   // Set up thread to appear as if we called out of method_g_ at given pc dex.
   thread->SetTopOfStack(reinterpret_cast<ArtMethod**>(&fake_stack[0]));
 
-  jobject internal = thread->CreateInternalStackTrace<false>(soa);
+  jobject internal = thread->CreateInternalStackTrace(soa);
   ASSERT_TRUE(internal != nullptr);
   jobjectArray ste_array = Thread::InternalStackTraceToStackTraceElementArray(soa, internal);
   ASSERT_TRUE(ste_array != nullptr);
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index 405c9ec..2db1390 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -1178,7 +1178,7 @@
     ScopedObjectAccess soa(env);
 
     // Build stack trace
-    jobject internal = Thread::Current()->CreateInternalStackTrace<false>(soa);
+    jobject internal = Thread::Current()->CreateInternalStackTrace(soa);
     jobjectArray ste_array = Thread::InternalStackTraceToStackTraceElementArray(soa, internal);
     ObjPtr<mirror::ObjectArray<mirror::StackTraceElement>> trace_array =
         soa.Decode<mirror::ObjectArray<mirror::StackTraceElement>>(ste_array);
diff --git a/runtime/common_throws.cc b/runtime/common_throws.cc
index 1c9cf18..5a7944c 100644
--- a/runtime/common_throws.cc
+++ b/runtime/common_throws.cc
@@ -831,7 +831,7 @@
     ScopedLocalRef<jobject> stack_state_val(env, nullptr);
     {
       ScopedObjectAccessUnchecked soa(env);  // TODO: Is this necessary?
-      stack_state_val.reset(soa.Self()->CreateInternalStackTrace<false>(soa));
+      stack_state_val.reset(soa.Self()->CreateInternalStackTrace(soa));
     }
     if (stack_state_val != nullptr) {
       env->SetObjectField(exc.get(),
diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc
index 1bdffa3..6b10b7f 100644
--- a/runtime/interpreter/unstarted_runtime.cc
+++ b/runtime/interpreter/unstarted_runtime.cc
@@ -1837,11 +1837,7 @@
     Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver ATTRIBUTE_UNUSED,
     uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
   ScopedObjectAccessUnchecked soa(self);
-  if (Runtime::Current()->IsActiveTransaction()) {
-    result->SetL(soa.Decode<mirror::Object>(self->CreateInternalStackTrace<true>(soa)));
-  } else {
-    result->SetL(soa.Decode<mirror::Object>(self->CreateInternalStackTrace<false>(soa)));
-  }
+  result->SetL(soa.Decode<mirror::Object>(self->CreateInternalStackTrace(soa)));
 }
 
 void UnstartedRuntime::UnstartedJNIByteOrderIsLittleEndian(
diff --git a/runtime/mirror/array-inl.h b/runtime/mirror/array-inl.h
index 34925f5..b75aa5d 100644
--- a/runtime/mirror/array-inl.h
+++ b/runtime/mirror/array-inl.h
@@ -22,6 +22,7 @@
 #include <android-base/logging.h>
 
 #include "base/bit_utils.h"
+#include "base/casts.h"
 #include "class.h"
 #include "obj_ptr-inl.h"
 #include "runtime.h"
@@ -251,23 +252,22 @@
   return GetElementPtrSize<T, PointerSize::k32, kVerifyFlags>(idx);
 }
 
-template<bool kTransactionActive, bool kUnchecked>
+template<bool kTransactionActive, bool kCheckTransaction, bool kUnchecked>
 inline void PointerArray::SetElementPtrSize(uint32_t idx, uint64_t element, PointerSize ptr_size) {
   if (ptr_size == PointerSize::k64) {
     (kUnchecked ? ObjPtr<LongArray>::DownCast(ObjPtr<Object>(this)) : AsLongArray())->
-        SetWithoutChecks<kTransactionActive>(idx, element);
+        SetWithoutChecks<kTransactionActive, kCheckTransaction>(idx, element);
   } else {
-    DCHECK_LE(element, static_cast<uint64_t>(0xFFFFFFFFu));
+    uint32_t element32 = dchecked_integral_cast<uint32_t>(element);
     (kUnchecked ? ObjPtr<IntArray>::DownCast(ObjPtr<Object>(this)) : AsIntArray())
-        ->SetWithoutChecks<kTransactionActive>(idx, static_cast<uint32_t>(element));
+        ->SetWithoutChecks<kTransactionActive, kCheckTransaction>(idx, element32);
   }
 }
 
-template<bool kTransactionActive, bool kUnchecked, typename T>
+template<bool kTransactionActive, bool kCheckTransaction, bool kUnchecked, typename T>
 inline void PointerArray::SetElementPtrSize(uint32_t idx, T* element, PointerSize ptr_size) {
-  SetElementPtrSize<kTransactionActive, kUnchecked>(idx,
-                                                    reinterpret_cast<uintptr_t>(element),
-                                                    ptr_size);
+  SetElementPtrSize<kTransactionActive, kCheckTransaction, kUnchecked>(
+      idx, reinterpret_cast<uintptr_t>(element), ptr_size);
 }
 
 template <VerifyObjectFlags kVerifyFlags, typename Visitor>
@@ -278,7 +278,9 @@
     void* ptr = GetElementPtrSize<void*, kVerifyFlags>(i, pointer_size);
     void* new_ptr = visitor(ptr);
     if (ptr != new_ptr) {
-      dest->SetElementPtrSize<false, true>(i, new_ptr, pointer_size);
+      dest->SetElementPtrSize</*kActiveTransaction=*/ false,
+                              /*kCheckTransaction=*/ true,
+                              /*kUnchecked=*/ true>(i, new_ptr, pointer_size);
     }
   }
 }
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index 19f9a92..4c172f2 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -241,10 +241,13 @@
                                     static_cast<size_t>(ptr_size) * index);
   }
 
-  template<bool kTransactionActive = false, bool kUnchecked = false>
+  template<bool kTransactionActive = false, bool kCheckTransaction = true, bool kUnchecked = false>
   void SetElementPtrSize(uint32_t idx, uint64_t element, PointerSize ptr_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template<bool kTransactionActive = false, bool kUnchecked = false, typename T>
+  template<bool kTransactionActive = false,
+           bool kCheckTransaction = true,
+           bool kUnchecked = false,
+           typename T>
   void SetElementPtrSize(uint32_t idx, T* element, PointerSize ptr_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
diff --git a/runtime/native/dalvik_system_VMStack.cc b/runtime/native/dalvik_system_VMStack.cc
index 32733a8..9d2dfac 100644
--- a/runtime/native/dalvik_system_VMStack.cc
+++ b/runtime/native/dalvik_system_VMStack.cc
@@ -84,7 +84,7 @@
   ScopedFastNativeObjectAccess soa(env);
   auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
       REQUIRES_SHARED(Locks::mutator_lock_) -> jobject {
-    return thread->CreateInternalStackTrace<false>(soaa);
+    return thread->CreateInternalStackTrace(soaa);
   };
   jobject trace = GetThreadStack(soa, javaThread, fn);
   if (trace == nullptr) {
@@ -151,7 +151,7 @@
   ScopedFastNativeObjectAccess soa(env);
   auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
      REQUIRES_SHARED(Locks::mutator_lock_) -> jobject {
-    return thread->CreateInternalStackTrace<false>(soaa);
+    return thread->CreateInternalStackTrace(soaa);
   };
   jobject trace = GetThreadStack(soa, javaThread, fn);
   if (trace == nullptr) {
diff --git a/runtime/native/java_lang_Throwable.cc b/runtime/native/java_lang_Throwable.cc
index b5ef7d8..b89e287 100644
--- a/runtime/native/java_lang_Throwable.cc
+++ b/runtime/native/java_lang_Throwable.cc
@@ -27,7 +27,7 @@
 
 static jobject Throwable_nativeFillInStackTrace(JNIEnv* env, jclass) {
   ScopedFastNativeObjectAccess soa(env);
-  return soa.Self()->CreateInternalStackTrace<false>(soa);
+  return soa.Self()->CreateInternalStackTrace(soa);
 }
 
 static jobjectArray Throwable_nativeGetStackTrace(JNIEnv* env, jclass, jobject javaStackState) {
diff --git a/runtime/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc b/runtime/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc
index d405735..b50825a 100644
--- a/runtime/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc
+++ b/runtime/native/org_apache_harmony_dalvik_ddmc_DdmVmInternal.cc
@@ -60,7 +60,7 @@
   if (static_cast<uint32_t>(thin_lock_id) == self->GetThreadId()) {
     // No need to suspend ourself to build stacktrace.
     ScopedObjectAccess soa(env);
-    jobject internal_trace = self->CreateInternalStackTrace<false>(soa);
+    jobject internal_trace = self->CreateInternalStackTrace(soa);
     trace = Thread::InternalStackTraceToStackTraceElementArray(soa, internal_trace);
   } else {
     ThreadList* thread_list = Runtime::Current()->GetThreadList();
@@ -78,7 +78,7 @@
     if (thread != nullptr) {
       {
         ScopedObjectAccess soa(env);
-        jobject internal_trace = thread->CreateInternalStackTrace<false>(soa);
+        jobject internal_trace = thread->CreateInternalStackTrace(soa);
         trace = Thread::InternalStackTraceToStackTraceElementArray(soa, internal_trace);
       }
       // Restart suspended thread.
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 684fae4..607ad9f 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -2711,7 +2711,6 @@
   DISALLOW_COPY_AND_ASSIGN(FetchStackTraceVisitor);
 };
 
-template<bool kTransactionActive>
 class BuildInternalStackTraceVisitor : public StackVisitor {
  public:
   BuildInternalStackTraceVisitor(Thread* self, Thread* thread, int skip_depth)
@@ -2747,7 +2746,7 @@
       self_->AssertPendingOOMException();
       return false;
     }
-    trace->Set(0, methods_and_pcs);
+    trace->Set</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(0, methods_and_pcs);
     trace_ = trace.Get();
     // If We are called from native, use non-transactional mode.
     CHECK(last_no_suspend_cause == nullptr) << last_no_suspend_cause;
@@ -2775,15 +2774,15 @@
   }
 
   void AddFrame(ArtMethod* method, uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) {
-    ObjPtr<mirror::PointerArray> trace_methods_and_pcs = GetTraceMethodsAndPCs();
-    trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(count_, method, pointer_size_);
-    trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(
-        trace_methods_and_pcs->GetLength() / 2 + count_,
-        dex_pc,
-        pointer_size_);
+    ObjPtr<mirror::PointerArray> methods_and_pcs = GetTraceMethodsAndPCs();
+    methods_and_pcs->SetElementPtrSize</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
+        count_, method, pointer_size_);
+    methods_and_pcs->SetElementPtrSize</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
+        methods_and_pcs->GetLength() / 2 + count_, dex_pc, pointer_size_);
     // Save the declaring class of the method to ensure that the declaring classes of the methods
     // do not get unloaded while the stack trace is live.
-    trace_->Set(count_ + 1, method->GetDeclaringClass());
+    trace_->Set</*kTransactionActive=*/ false, /*kCheckTransaction=*/ false>(
+        count_ + 1, method->GetDeclaringClass());
     ++count_;
   }
 
@@ -2802,9 +2801,10 @@
   // Current position down stack trace.
   uint32_t count_ = 0;
   // An object array where the first element is a pointer array that contains the ArtMethod
-  // pointers on the stack and dex PCs. The rest of the elements are the declaring
-  // class of the ArtMethod pointers. trace_[i+1] contains the declaring class of the ArtMethod of
-  // the i'th frame.
+  // pointers on the stack and dex PCs. The rest of the elements are the declaring class of
+  // the ArtMethod pointers. trace_[i+1] contains the declaring class of the ArtMethod of the
+  // i'th frame. We're initializing a newly allocated trace, so we do not need to record that
+  // under a transaction. If the transaction is aborted, the whole trace shall be unreachable.
   mirror::ObjectArray<mirror::Object>* trace_ = nullptr;
   // For cross compilation.
   const PointerSize pointer_size_;
@@ -2812,7 +2812,6 @@
   DISALLOW_COPY_AND_ASSIGN(BuildInternalStackTraceVisitor);
 };
 
-template<bool kTransactionActive>
 jobject Thread::CreateInternalStackTrace(const ScopedObjectAccessAlreadyRunnable& soa) const {
   // Compute depth of stack, save frames if possible to avoid needing to recompute many.
   constexpr size_t kMaxSavedFrames = 256;
@@ -2825,9 +2824,8 @@
   const uint32_t skip_depth = count_visitor.GetSkipDepth();
 
   // Build internal stack trace.
-  BuildInternalStackTraceVisitor<kTransactionActive> build_trace_visitor(soa.Self(),
-                                                                         const_cast<Thread*>(this),
-                                                                         skip_depth);
+  BuildInternalStackTraceVisitor build_trace_visitor(
+      soa.Self(), const_cast<Thread*>(this), skip_depth);
   if (!build_trace_visitor.Init(depth)) {
     return nullptr;  // Allocation failed.
   }
@@ -2853,10 +2851,6 @@
   }
   return soa.AddLocalReference<jobject>(trace);
 }
-template jobject Thread::CreateInternalStackTrace<false>(
-    const ScopedObjectAccessAlreadyRunnable& soa) const;
-template jobject Thread::CreateInternalStackTrace<true>(
-    const ScopedObjectAccessAlreadyRunnable& soa) const;
 
 bool Thread::IsExceptionThrownByCurrentMethod(ObjPtr<mirror::Throwable> exception) const {
   // Only count the depth since we do not pass a stack frame array as an argument.
@@ -3273,10 +3267,7 @@
     if (cause.get() != nullptr) {
       exception->SetCause(DecodeJObject(cause.get())->AsThrowable());
     }
-    ScopedLocalRef<jobject> trace(GetJniEnv(),
-                                  Runtime::Current()->IsActiveTransaction()
-                                      ? CreateInternalStackTrace<true>(soa)
-                                      : CreateInternalStackTrace<false>(soa));
+    ScopedLocalRef<jobject> trace(GetJniEnv(), CreateInternalStackTrace(soa));
     if (trace.get() != nullptr) {
       exception->SetStackState(DecodeJObject(trace.get()).Ptr());
     }
diff --git a/runtime/thread.h b/runtime/thread.h
index faadd65..5742689 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -638,7 +638,6 @@
 
   // Create the internal representation of a stack trace, that is more time
   // and space efficient to compute than the StackTraceElement[].
-  template<bool kTransactionActive>
   jobject CreateInternalStackTrace(const ScopedObjectAccessAlreadyRunnable& soa) const
       REQUIRES_SHARED(Locks::mutator_lock_);