Clean up creating handles from `this`.
Make these member functions static and take an additional
parameter `Handle<.> h_this`. Callers mostly already have
a Handle<> to pass, so we avoid an extra StackHandleScope.
This pattern was already used for some functions.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing --interpreter
Change-Id: I4f4478b0526bcb2f3c23305d3b3cc4a65fff9ff5
diff --git a/openjdkjvmti/ti_class.cc b/openjdkjvmti/ti_class.cc
index 7537f28..988274b 100644
--- a/openjdkjvmti/ti_class.cc
+++ b/openjdkjvmti/ti_class.cc
@@ -213,7 +213,8 @@
art::StackHandleScope<2> hs(self);
// Save the results of all the non-retransformable agents.
// First allocate the ClassExt
- art::Handle<art::mirror::ClassExt> ext(hs.NewHandle(klass->EnsureExtDataPresent(self)));
+ art::Handle<art::mirror::ClassExt> ext =
+ hs.NewHandle(art::mirror::Class::EnsureExtDataPresent(klass, self));
// Make sure we have a ClassExt. This is fine even though we are a temporary since it will
// get copied.
if (ext.IsNull()) {
diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc
index 2474b02..e720317 100644
--- a/openjdkjvmti/ti_redefine.cc
+++ b/openjdkjvmti/ti_redefine.cc
@@ -1603,7 +1603,8 @@
return false;
}
// Allocate the classExt
- art::Handle<art::mirror::ClassExt> ext(hs.NewHandle(klass->EnsureExtDataPresent(driver_->self_)));
+ art::Handle<art::mirror::ClassExt> ext =
+ hs.NewHandle(art::mirror::Class::EnsureExtDataPresent(klass, driver_->self_));
if (ext == nullptr) {
// No memory. Clear exception (it's not useful) and return error.
driver_->self_->AssertPendingOOMException();
@@ -1618,8 +1619,8 @@
// however, since that can happen at any time.
cur_data->SetOldObsoleteMethods(ext->GetObsoleteMethods());
cur_data->SetOldDexCaches(ext->GetObsoleteDexCaches());
- if (!ext->ExtendObsoleteArrays(
- driver_->self_, klass->GetDeclaredMethodsSlice(art::kRuntimePointerSize).size())) {
+ if (!art::mirror::ClassExt::ExtendObsoleteArrays(
+ ext, driver_->self_, klass->GetDeclaredMethodsSlice(art::kRuntimePointerSize).size())) {
// OOM. Clear exception and return error.
driver_->self_->AssertPendingOOMException();
driver_->self_->ClearException();
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index b980a97..8fed3ca 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -5903,7 +5903,8 @@
CHECK(!klass->IsResolved());
// Retire the temporary class and create the correctly sized resolved class.
StackHandleScope<1> hs(self);
- auto h_new_class = hs.NewHandle(klass->CopyOf(self, class_size, imt, image_pointer_size_));
+ Handle<mirror::Class> h_new_class =
+ hs.NewHandle(mirror::Class::CopyOf(klass, self, class_size, imt, image_pointer_size_));
// Set arrays to null since we don't want to have multiple classes with the same ArtField or
// ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
// may not see any references to the target space and clean the card for a class if another
@@ -6265,7 +6266,7 @@
} else if (klass->HasSuperClass()) {
const size_t super_vtable_length = klass->GetSuperClass()->GetVTableLength();
const size_t max_count = num_virtual_methods + super_vtable_length;
- StackHandleScope<2> hs(self);
+ StackHandleScope<3> hs(self);
Handle<mirror::Class> super_class(hs.NewHandle(klass->GetSuperClass()));
MutableHandle<mirror::PointerArray> vtable;
if (super_class->ShouldHaveEmbeddedVTable()) {
@@ -6289,16 +6290,16 @@
}
} else {
DCHECK(super_class->IsAbstract() && !super_class->IsArrayClass());
- ObjPtr<mirror::PointerArray> super_vtable = super_class->GetVTable();
+ Handle<mirror::PointerArray> super_vtable = hs.NewHandle(super_class->GetVTable());
CHECK(super_vtable != nullptr) << super_class->PrettyClass();
// We might need to change vtable if we have new virtual methods or new interfaces (since that
// might give us new default methods). See comment above.
if (num_virtual_methods == 0 && super_class->GetIfTableCount() == klass->GetIfTableCount()) {
- klass->SetVTable(super_vtable);
+ klass->SetVTable(super_vtable.Get());
return true;
}
- vtable = hs.NewHandle(
- ObjPtr<mirror::PointerArray>::DownCast(super_vtable->CopyOf(self, max_count)));
+ vtable = hs.NewHandle(ObjPtr<mirror::PointerArray>::DownCast(
+ mirror::Array::CopyOf(super_vtable, self, max_count)));
if (UNLIKELY(vtable == nullptr)) {
self->AssertPendingOOMException();
return false;
@@ -6434,7 +6435,8 @@
// Shrink vtable if possible
CHECK_LE(actual_count, max_count);
if (actual_count < max_count) {
- vtable.Assign(ObjPtr<mirror::PointerArray>::DownCast(vtable->CopyOf(self, actual_count)));
+ vtable.Assign(ObjPtr<mirror::PointerArray>::DownCast(
+ mirror::Array::CopyOf(vtable, self, actual_count)));
if (UNLIKELY(vtable == nullptr)) {
self->AssertPendingOOMException();
return false;
@@ -6692,8 +6694,10 @@
DCHECK(if_table != nullptr);
DCHECK(if_table->GetMethodArray(i) != nullptr);
// If we are working on a super interface, try extending the existing method array.
- method_array = ObjPtr<mirror::PointerArray>::DownCast(
- if_table->GetMethodArray(i)->Clone(self));
+ StackHandleScope<1u> hs(self);
+ Handle<mirror::PointerArray> old_array = hs.NewHandle(if_table->GetMethodArray(i));
+ method_array =
+ ObjPtr<mirror::PointerArray>::DownCast(mirror::Object::Clone(old_array, self));
} else {
method_array = AllocPointerArray(self, num_methods);
}
@@ -7113,7 +7117,7 @@
if (new_ifcount < ifcount) {
DCHECK_NE(num_interfaces, 0U);
iftable.Assign(ObjPtr<mirror::IfTable>::DownCast(
- iftable->CopyOf(self, new_ifcount * mirror::IfTable::kMax)));
+ mirror::IfTable::CopyOf(iftable, self, new_ifcount * mirror::IfTable::kMax)));
if (UNLIKELY(iftable == nullptr)) {
self->AssertPendingOOMException();
return false;
@@ -7431,7 +7435,7 @@
ObjPtr<mirror::PointerArray> UpdateVtable(
const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
- ObjPtr<mirror::PointerArray> old_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
+ Handle<mirror::PointerArray> old_vtable) REQUIRES_SHARED(Locks::mutator_lock_);
void UpdateIfTable(Handle<mirror::IfTable> iftable) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -7759,7 +7763,7 @@
ObjPtr<mirror::PointerArray> ClassLinker::LinkInterfaceMethodsHelper::UpdateVtable(
const std::unordered_map<size_t, ClassLinker::MethodTranslation>& default_translations,
- ObjPtr<mirror::PointerArray> old_vtable) {
+ Handle<mirror::PointerArray> old_vtable) {
// Update the vtable to the new method structures. We can skip this for interfaces since they
// do not have vtables.
const size_t old_vtable_count = old_vtable->GetLength();
@@ -7768,8 +7772,8 @@
default_methods_.size() +
default_conflict_methods_.size();
- ObjPtr<mirror::PointerArray> vtable =
- ObjPtr<mirror::PointerArray>::DownCast(old_vtable->CopyOf(self_, new_vtable_count));
+ ObjPtr<mirror::PointerArray> vtable = ObjPtr<mirror::PointerArray>::DownCast(
+ mirror::Array::CopyOf(old_vtable, self_, new_vtable_count));
if (UNLIKELY(vtable == nullptr)) {
self_->AssertPendingOOMException();
return nullptr;
@@ -8103,7 +8107,7 @@
self->EndAssertNoThreadSuspension(old_cause);
if (fill_tables) {
- vtable.Assign(helper.UpdateVtable(default_translations, vtable.Get()));
+ vtable.Assign(helper.UpdateVtable(default_translations, vtable));
if (UNLIKELY(vtable == nullptr)) {
// The helper has already called self->AssertPendingOOMException();
return false;
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index 1a91abe..2a7cbaa 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -1522,7 +1522,7 @@
ASSERT_TRUE(dex_cache != nullptr);
}
// Make a copy of the dex cache and change the name.
- dex_cache.Assign(dex_cache->Clone(soa.Self())->AsDexCache());
+ dex_cache.Assign(mirror::Object::Clone(dex_cache, soa.Self())->AsDexCache());
const uint16_t data[] = { 0x20AC, 0x20A1 };
Handle<mirror::String> location(hs.NewHandle(mirror::String::AllocFromUtf16(soa.Self(),
arraysize(data),
diff --git a/runtime/handle.h b/runtime/handle.h
index 0c9c029..f6ed173 100644
--- a/runtime/handle.h
+++ b/runtime/handle.h
@@ -47,6 +47,11 @@
ALWAYS_INLINE Handle<T>& operator=(const Handle<T>& handle) = default;
+ template <typename Type,
+ typename = typename std::enable_if_t<std::is_base_of_v<T, Type>>>
+ ALWAYS_INLINE Handle(const Handle<Type>& other) : reference_(other.reference_) {
+ }
+
ALWAYS_INLINE explicit Handle(StackReference<T>* reference) : reference_(reference) {
}
diff --git a/runtime/interpreter/unstarted_runtime.cc b/runtime/interpreter/unstarted_runtime.cc
index 727cf2f..9bb3e9e 100644
--- a/runtime/interpreter/unstarted_runtime.cc
+++ b/runtime/interpreter/unstarted_runtime.cc
@@ -1398,12 +1398,14 @@
void UnstartedRuntime::UnstartedStringToCharArray(
Thread* self, ShadowFrame* shadow_frame, JValue* result, size_t arg_offset)
REQUIRES_SHARED(Locks::mutator_lock_) {
- ObjPtr<mirror::String> string = shadow_frame->GetVRegReference(arg_offset)->AsString();
+ StackHandleScope<1> hs(self);
+ Handle<mirror::String> string =
+ hs.NewHandle(shadow_frame->GetVRegReference(arg_offset)->AsString());
if (string == nullptr) {
AbortTransactionOrFail(self, "String.charAt with null object");
return;
}
- result->SetL(string->ToCharArray(self));
+ result->SetL(mirror::String::ToCharArray(string, self));
}
// This allows statically initializing ConcurrentHashMap and SynchronousQueue.
@@ -1797,7 +1799,9 @@
void UnstartedRuntime::UnstartedJNIObjectInternalClone(
Thread* self, ArtMethod* method ATTRIBUTE_UNUSED, mirror::Object* receiver,
uint32_t* args ATTRIBUTE_UNUSED, JValue* result) {
- result->SetL(receiver->Clone(self));
+ StackHandleScope<1> hs(self);
+ Handle<mirror::Object> h_receiver = hs.NewHandle(receiver);
+ result->SetL(mirror::Object::Clone(h_receiver, self));
}
void UnstartedRuntime::UnstartedJNIObjectNotifyAll(
diff --git a/runtime/mirror/array.cc b/runtime/mirror/array.cc
index 9bff169..e011e1c 100644
--- a/runtime/mirror/array.cc
+++ b/runtime/mirror/array.cc
@@ -138,16 +138,14 @@
art::ThrowArrayStoreException(object->GetClass(), this->GetClass());
}
-ObjPtr<Array> Array::CopyOf(Thread* self, int32_t new_length) {
- ObjPtr<Class> klass = GetClass();
+ObjPtr<Array> Array::CopyOf(Handle<Array> h_this, Thread* self, int32_t new_length) {
+ ObjPtr<Class> klass = h_this->GetClass();
CHECK(klass->IsPrimitiveArray()) << "Will miss write barriers";
DCHECK_GE(new_length, 0);
- // We may get copied by a compacting GC.
- StackHandleScope<1> hs(self);
- auto h_this(hs.NewHandle(this));
auto* heap = Runtime::Current()->GetHeap();
- gc::AllocatorType allocator_type = heap->IsMovableObject(this) ? heap->GetCurrentAllocator() :
- heap->GetCurrentNonMovingAllocator();
+ gc::AllocatorType allocator_type = heap->IsMovableObject(h_this.Get())
+ ? heap->GetCurrentAllocator()
+ : heap->GetCurrentNonMovingAllocator();
const auto component_size = klass->GetComponentSize();
const auto component_shift = klass->GetComponentSizeShift();
ObjPtr<Array> new_array =
diff --git a/runtime/mirror/array.h b/runtime/mirror/array.h
index dbc5d2a..19f9a92 100644
--- a/runtime/mirror/array.h
+++ b/runtime/mirror/array.h
@@ -125,8 +125,8 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
ALWAYS_INLINE bool CheckIsValidIndex(int32_t index) REQUIRES_SHARED(Locks::mutator_lock_);
- ObjPtr<Array> CopyOf(Thread* self, int32_t new_length) REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(!Roles::uninterruptible_);
+ static ObjPtr<Array> CopyOf(Handle<Array> h_this, Thread* self, int32_t new_length)
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
protected:
void ThrowArrayStoreException(ObjPtr<Object> object) REQUIRES_SHARED(Locks::mutator_lock_)
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index d6c10de..a36fe12 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -94,14 +94,12 @@
}
}
-ObjPtr<ClassExt> Class::EnsureExtDataPresent(Thread* self) {
- ObjPtr<ClassExt> existing(GetExtData());
+ObjPtr<ClassExt> Class::EnsureExtDataPresent(Handle<Class> h_this, Thread* self) {
+ ObjPtr<ClassExt> existing(h_this->GetExtData());
if (!existing.IsNull()) {
return existing;
}
- StackHandleScope<3> hs(self);
- // Handlerize 'this' since we are allocating here.
- Handle<Class> h_this(hs.NewHandle(this));
+ StackHandleScope<2> hs(self);
// Clear exception so we can allocate.
Handle<Throwable> throwable(hs.NewHandle(self->GetException()));
self->ClearException();
@@ -172,7 +170,7 @@
}
}
- ObjPtr<ClassExt> ext(h_this->EnsureExtDataPresent(self));
+ ObjPtr<ClassExt> ext(EnsureExtDataPresent(h_this, self));
if (!ext.IsNull()) {
self->AssertPendingException();
ext->SetVerifyError(self->GetException());
@@ -1205,12 +1203,13 @@
DISALLOW_COPY_AND_ASSIGN(CopyClassVisitor);
};
-ObjPtr<Class> Class::CopyOf(
- Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size) {
+ObjPtr<Class> Class::CopyOf(Handle<Class> h_this,
+ Thread* self,
+ int32_t new_length,
+ ImTable* imt,
+ PointerSize pointer_size) {
DCHECK_GE(new_length, static_cast<int32_t>(sizeof(Class)));
// We may get copied by a compacting GC.
- StackHandleScope<1> hs(self);
- Handle<Class> h_this(hs.NewHandle(this));
Runtime* runtime = Runtime::Current();
gc::Heap* heap = runtime->GetHeap();
// The num_bytes (3rd param) is sizeof(Class) as opposed to SizeOf()
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 2bae7e7..09d5532 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -1067,7 +1067,7 @@
// Returns the ExtData for this class, allocating one if necessary. This should be the only way
// to force ext_data_ to be set. No functions are available for changing an already set ext_data_
// since doing so is not allowed.
- ObjPtr<ClassExt> EnsureExtDataPresent(Thread* self)
+ static ObjPtr<ClassExt> EnsureExtDataPresent(Handle<Class> h_this, Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
uint16_t GetDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -1140,7 +1140,11 @@
void AssertInitializedOrInitializingInThread(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_);
- ObjPtr<Class> CopyOf(Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size)
+ static ObjPtr<Class> CopyOf(Handle<Class> h_this,
+ Thread* self,
+ int32_t new_length,
+ ImTable* imt,
+ PointerSize pointer_size)
REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
// For proxy class only.
diff --git a/runtime/mirror/class_ext.cc b/runtime/mirror/class_ext.cc
index d12f340..4c6cb4d 100644
--- a/runtime/mirror/class_ext.cc
+++ b/runtime/mirror/class_ext.cc
@@ -52,10 +52,9 @@
// We really need to be careful how we update this. If we ever in the future make it so that
// these arrays are written into without all threads being suspended we have a race condition! This
// race could cause obsolete methods to be missed.
-bool ClassExt::ExtendObsoleteArrays(Thread* self, uint32_t increase) {
+bool ClassExt::ExtendObsoleteArrays(Handle<ClassExt> h_this, Thread* self, uint32_t increase) {
// TODO It would be good to check that we have locked the class associated with this ClassExt.
- StackHandleScope<5> hs(self);
- Handle<ClassExt> h_this(hs.NewHandle(this));
+ StackHandleScope<4> hs(self);
Handle<PointerArray> old_methods(hs.NewHandle(h_this->GetObsoleteMethods()));
Handle<ObjectArray<DexCache>> old_dex_caches(hs.NewHandle(h_this->GetObsoleteDexCaches()));
ClassLinker* cl = Runtime::Current()->GetClassLinker();
diff --git a/runtime/mirror/class_ext.h b/runtime/mirror/class_ext.h
index 8fbbf5c..70bea33 100644
--- a/runtime/mirror/class_ext.h
+++ b/runtime/mirror/class_ext.h
@@ -72,7 +72,7 @@
REQUIRES_SHARED(Locks::mutator_lock_);
// Extend the obsolete arrays by the given amount.
- bool ExtendObsoleteArrays(Thread* self, uint32_t increase)
+ static bool ExtendObsoleteArrays(Handle<ClassExt> h_this, Thread* self, uint32_t increase)
REQUIRES_SHARED(Locks::mutator_lock_);
template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, class Visitor>
diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc
index 2348213..ede1c66 100644
--- a/runtime/mirror/object.cc
+++ b/runtime/mirror/object.cc
@@ -151,19 +151,17 @@
DISALLOW_COPY_AND_ASSIGN(CopyObjectVisitor);
};
-ObjPtr<Object> Object::Clone(Thread* self) {
- CHECK(!IsClass()) << "Can't clone classes.";
+ObjPtr<Object> Object::Clone(Handle<Object> h_this, Thread* self) {
+ CHECK(!h_this->IsClass()) << "Can't clone classes.";
// Object::SizeOf gets the right size even if we're an array. Using c->AllocObject() here would
// be wrong.
gc::Heap* heap = Runtime::Current()->GetHeap();
- size_t num_bytes = SizeOf();
- StackHandleScope<1> hs(self);
- Handle<Object> this_object(hs.NewHandle(this));
- CopyObjectVisitor visitor(&this_object, num_bytes);
- ObjPtr<Object> copy = heap->IsMovableObject(this)
- ? heap->AllocObject(self, GetClass(), num_bytes, visitor)
- : heap->AllocNonMovableObject(self, GetClass(), num_bytes, visitor);
- if (this_object->GetClass()->IsFinalizable()) {
+ size_t num_bytes = h_this->SizeOf();
+ CopyObjectVisitor visitor(&h_this, num_bytes);
+ ObjPtr<Object> copy = heap->IsMovableObject(h_this.Get())
+ ? heap->AllocObject(self, h_this->GetClass(), num_bytes, visitor)
+ : heap->AllocNonMovableObject(self, h_this->GetClass(), num_bytes, visitor);
+ if (h_this->GetClass()->IsFinalizable()) {
heap->AddFinalizerReference(self, ©);
}
return copy;
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index e6e9160..2eff560 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -33,6 +33,7 @@
class ArtField;
class ArtMethod;
+template <class T> class Handle;
class LockWord;
class Monitor;
struct ObjectOffsets;
@@ -130,7 +131,8 @@
template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
size_t SizeOf() REQUIRES_SHARED(Locks::mutator_lock_);
- ObjPtr<Object> Clone(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
+ static ObjPtr<Object> Clone(Handle<Object> h_this, Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
int32_t IdentityHashCode()
diff --git a/runtime/mirror/object_array-alloc-inl.h b/runtime/mirror/object_array-alloc-inl.h
index b417b62..594b0a6 100644
--- a/runtime/mirror/object_array-alloc-inl.h
+++ b/runtime/mirror/object_array-alloc-inl.h
@@ -61,15 +61,15 @@
}
template<class T>
-inline ObjPtr<ObjectArray<T>> ObjectArray<T>::CopyOf(Thread* self, int32_t new_length) {
+inline ObjPtr<ObjectArray<T>> ObjectArray<T>::CopyOf(Handle<ObjectArray<T>> h_this,
+ Thread* self,
+ int32_t new_length) {
DCHECK_GE(new_length, 0);
- // We may get copied by a compacting GC.
- StackHandleScope<1> hs(self);
- Handle<ObjectArray<T>> h_this(hs.NewHandle(this));
gc::Heap* heap = Runtime::Current()->GetHeap();
- gc::AllocatorType allocator_type = heap->IsMovableObject(this) ? heap->GetCurrentAllocator() :
- heap->GetCurrentNonMovingAllocator();
- ObjPtr<ObjectArray<T>> new_array = Alloc(self, GetClass(), new_length, allocator_type);
+ gc::AllocatorType allocator_type = heap->IsMovableObject(h_this.Get())
+ ? heap->GetCurrentAllocator()
+ : heap->GetCurrentNonMovingAllocator();
+ ObjPtr<ObjectArray<T>> new_array = Alloc(self, h_this->GetClass(), new_length, allocator_type);
if (LIKELY(new_array != nullptr)) {
new_array->AssignableMemcpy(0, h_this.Get(), 0, std::min(h_this->GetLength(), new_length));
}
diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h
index f7046d1..7f43cd2 100644
--- a/runtime/mirror/object_array.h
+++ b/runtime/mirror/object_array.h
@@ -99,7 +99,9 @@
bool throw_exception)
REQUIRES_SHARED(Locks::mutator_lock_);
- ObjPtr<ObjectArray<T>> CopyOf(Thread* self, int32_t new_length)
+ static ObjPtr<ObjectArray<T>> CopyOf(Handle<ObjectArray<T>> h_this,
+ Thread* self,
+ int32_t new_length)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
diff --git a/runtime/mirror/object_test.cc b/runtime/mirror/object_test.cc
index 45a0437..ee137f0 100644
--- a/runtime/mirror/object_test.cc
+++ b/runtime/mirror/object_test.cc
@@ -111,7 +111,7 @@
StackHandleScope<2> hs(soa.Self());
Handle<ObjectArray<Object>> a1(hs.NewHandle(AllocObjectArray<Object>(soa.Self(), 256)));
size_t s1 = a1->SizeOf();
- ObjPtr<Object> clone = a1->Clone(soa.Self());
+ ObjPtr<Object> clone = Object::Clone(a1, soa.Self());
EXPECT_EQ(s1, clone->SizeOf());
EXPECT_TRUE(clone->GetClass() == a1->GetClass());
}
diff --git a/runtime/mirror/string.cc b/runtime/mirror/string.cc
index b2b68d6..0356080 100644
--- a/runtime/mirror/string.cc
+++ b/runtime/mirror/string.cc
@@ -323,18 +323,16 @@
return count_diff;
}
-ObjPtr<CharArray> String::ToCharArray(Thread* self) {
- StackHandleScope<1> hs(self);
- Handle<String> string(hs.NewHandle(this));
- ObjPtr<CharArray> result = CharArray::Alloc(self, GetLength());
+ObjPtr<CharArray> String::ToCharArray(Handle<String> h_this, Thread* self) {
+ ObjPtr<CharArray> result = CharArray::Alloc(self, h_this->GetLength());
if (result != nullptr) {
- if (string->IsCompressed()) {
- int32_t length = string->GetLength();
+ if (h_this->IsCompressed()) {
+ int32_t length = h_this->GetLength();
for (int i = 0; i < length; ++i) {
- result->GetData()[i] = string->CharAt(i);
+ result->GetData()[i] = h_this->CharAt(i);
}
} else {
- memcpy(result->GetData(), string->GetValue(), string->GetLength() * sizeof(uint16_t));
+ memcpy(result->GetData(), h_this->GetValue(), h_this->GetLength() * sizeof(uint16_t));
}
} else {
self->AssertPendingOOMException();
diff --git a/runtime/mirror/string.h b/runtime/mirror/string.h
index 116ecd1..0e3c500 100644
--- a/runtime/mirror/string.h
+++ b/runtime/mirror/string.h
@@ -187,7 +187,8 @@
int32_t CompareTo(ObjPtr<String> other) REQUIRES_SHARED(Locks::mutator_lock_);
- ObjPtr<CharArray> ToCharArray(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
+ static ObjPtr<CharArray> ToCharArray(Handle<String> h_this, Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
void GetChars(int32_t start, int32_t end, Handle<CharArray> array, int32_t index)
diff --git a/runtime/native/java_lang_Class.cc b/runtime/native/java_lang_Class.cc
index f69d1bc..4967f9e 100644
--- a/runtime/native/java_lang_Class.cc
+++ b/runtime/native/java_lang_Class.cc
@@ -220,7 +220,11 @@
Handle<mirror::Class> klass = hs.NewHandle(DecodeClass(soa, javaThis));
if (klass->IsProxyClass()) {
- return soa.AddLocalReference<jobjectArray>(klass->GetProxyInterfaces()->Clone(soa.Self()));
+ StackHandleScope<1> hs2(soa.Self());
+ Handle<mirror::ObjectArray<mirror::Class>> interfaces =
+ hs2.NewHandle(klass->GetProxyInterfaces());
+ return soa.AddLocalReference<jobjectArray>(
+ mirror::ObjectArray<mirror::Class>::Clone(interfaces, soa.Self()));
}
const dex::TypeList* iface_list = klass->GetInterfaceTypeList();
diff --git a/runtime/native/java_lang_Object.cc b/runtime/native/java_lang_Object.cc
index 48540f8..8fc10d1 100644
--- a/runtime/native/java_lang_Object.cc
+++ b/runtime/native/java_lang_Object.cc
@@ -18,6 +18,7 @@
#include "nativehelper/jni_macros.h"
+#include "handle_scope-inl.h"
#include "jni/jni_internal.h"
#include "mirror/object-inl.h"
#include "native_util.h"
@@ -27,8 +28,9 @@
static jobject Object_internalClone(JNIEnv* env, jobject java_this) {
ScopedFastNativeObjectAccess soa(env);
- ObjPtr<mirror::Object> o = soa.Decode<mirror::Object>(java_this);
- return soa.AddLocalReference<jobject>(o->Clone(soa.Self()));
+ StackHandleScope<1u> hs(soa.Self());
+ Handle<mirror::Object> o = hs.NewHandle(soa.Decode<mirror::Object>(java_this));
+ return soa.AddLocalReference<jobject>(mirror::Class::Clone(o, soa.Self()));
}
static void Object_notify(JNIEnv* env, jobject java_this) {
diff --git a/runtime/native/java_lang_String.cc b/runtime/native/java_lang_String.cc
index 2d9e7dc..7c7c553 100644
--- a/runtime/native/java_lang_String.cc
+++ b/runtime/native/java_lang_String.cc
@@ -105,8 +105,9 @@
static jcharArray String_toCharArray(JNIEnv* env, jobject java_this) {
ScopedFastNativeObjectAccess soa(env);
- ObjPtr<mirror::String> s = soa.Decode<mirror::String>(java_this);
- return soa.AddLocalReference<jcharArray>(s->ToCharArray(soa.Self()));
+ StackHandleScope<1u> hs(soa.Self());
+ Handle<mirror::String> s = hs.NewHandle(soa.Decode<mirror::String>(java_this));
+ return soa.AddLocalReference<jcharArray>(mirror::String::ToCharArray(s, soa.Self()));
}
static JNINativeMethod gMethods[] = {
diff --git a/runtime/native/java_lang_reflect_Method.cc b/runtime/native/java_lang_reflect_Method.cc
index 4525157..0d9a257 100644
--- a/runtime/native/java_lang_reflect_Method.cc
+++ b/runtime/native/java_lang_reflect_Method.cc
@@ -59,9 +59,11 @@
++i;
}
CHECK_NE(throws_index, -1);
- ObjPtr<mirror::ObjectArray<mirror::Class>> declared_exceptions =
- klass->GetProxyThrows()->Get(throws_index);
- return soa.AddLocalReference<jobjectArray>(declared_exceptions->Clone(soa.Self()));
+ StackHandleScope<1u> hs(soa.Self());
+ Handle<mirror::ObjectArray<mirror::Class>> declared_exceptions =
+ hs.NewHandle(klass->GetProxyThrows()->Get(throws_index));
+ return soa.AddLocalReference<jobjectArray>(
+ mirror::ObjectArray<mirror::Class>::Clone(declared_exceptions, soa.Self()));
} else {
ObjPtr<mirror::ObjectArray<mirror::Class>> result_array =
annotations::GetExceptionTypesForMethod(method);