summaryrefslogtreecommitdiff
path: root/compiler/image_writer.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/image_writer.cc')
-rw-r--r--compiler/image_writer.cc395
1 files changed, 238 insertions, 157 deletions
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 73e121f1cd..93897aa228 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -73,7 +73,7 @@ static constexpr bool kBinObjects = true;
static constexpr bool kComputeEagerResolvedStrings = false;
static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ SHARED_REQUIRES(Locks::mutator_lock_) {
Class* klass = obj->GetClass();
CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
}
@@ -244,8 +244,8 @@ void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot
DCHECK(object != nullptr);
DCHECK_NE(image_objects_offset_begin_, 0u);
- size_t previous_bin_sizes = bin_slot_previous_sizes_[bin_slot.GetBin()];
- size_t new_offset = image_objects_offset_begin_ + previous_bin_sizes + bin_slot.GetIndex();
+ size_t bin_slot_offset = bin_slot_offsets_[bin_slot.GetBin()];
+ size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
DCHECK_ALIGNED(new_offset, kObjectAlignment);
SetImageOffset(object, new_offset);
@@ -539,16 +539,19 @@ bool ImageWriter::AllocMemory() {
return true;
}
+class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
+ public:
+ bool Visit(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
+ StackHandleScope<1> hs(Thread::Current());
+ mirror::Class::ComputeName(hs.NewHandle(c));
+ return true;
+ }
+};
+
void ImageWriter::ComputeLazyFieldsForImageClasses() {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
- class_linker->VisitClassesWithoutClassesLock(ComputeLazyFieldsForClassesVisitor, nullptr);
-}
-
-bool ImageWriter::ComputeLazyFieldsForClassesVisitor(Class* c, void* /*arg*/) {
- Thread* self = Thread::Current();
- StackHandleScope<1> hs(self);
- mirror::Class::ComputeName(hs.NewHandle(c));
- return true;
+ ComputeLazyFieldsForClassesVisitor visitor;
+ class_linker->VisitClassesWithoutClassesLock(&visitor);
}
void ImageWriter::ComputeEagerResolvedStringsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED) {
@@ -592,9 +595,20 @@ bool ImageWriter::IsImageClass(Class* klass) {
return compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
}
-struct NonImageClasses {
- ImageWriter* image_writer;
- std::set<std::string>* non_image_classes;
+class NonImageClassesVisitor : public ClassVisitor {
+ public:
+ explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
+
+ bool Visit(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
+ if (!image_writer_->IsImageClass(klass)) {
+ std::string temp;
+ non_image_classes_.insert(klass->GetDescriptor(&temp));
+ }
+ return true;
+ }
+
+ std::set<std::string> non_image_classes_;
+ ImageWriter* const image_writer_;
};
void ImageWriter::PruneNonImageClasses() {
@@ -606,14 +620,11 @@ void ImageWriter::PruneNonImageClasses() {
Thread* self = Thread::Current();
// Make a list of classes we would like to prune.
- std::set<std::string> non_image_classes;
- NonImageClasses context;
- context.image_writer = this;
- context.non_image_classes = &non_image_classes;
- class_linker->VisitClasses(NonImageClassesVisitor, &context);
+ NonImageClassesVisitor visitor(this);
+ class_linker->VisitClasses(&visitor);
// Remove the undesired classes from the class roots.
- for (const std::string& it : non_image_classes) {
+ for (const std::string& it : visitor.non_image_classes_) {
bool result = class_linker->RemoveClass(it.c_str(), nullptr);
DCHECK(result);
}
@@ -669,15 +680,6 @@ void ImageWriter::PruneNonImageClasses() {
class_linker->DropFindArrayClassCache();
}
-bool ImageWriter::NonImageClassesVisitor(Class* klass, void* arg) {
- NonImageClasses* context = reinterpret_cast<NonImageClasses*>(arg);
- if (!context->image_writer->IsImageClass(klass)) {
- std::string temp;
- context->non_image_classes->insert(klass->GetDescriptor(&temp));
- }
- return true;
-}
-
void ImageWriter::CheckNonImageClassesRemoved() {
if (compiler_driver_.GetImageClasses() != nullptr) {
gc::Heap* heap = Runtime::Current()->GetHeap();
@@ -715,8 +717,10 @@ void ImageWriter::CalculateObjectBinSlots(Object* obj) {
DCHECK_EQ(obj, obj->AsString()->Intern());
return;
}
- mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrong(
- obj->AsString()->Intern());
+ // InternImageString allows us to intern while holding the heap bitmap lock. This is safe since
+ // we are guaranteed to not have GC during image writing.
+ mirror::String* const interned = Runtime::Current()->GetInternTable()->InternStrongImageString(
+ obj->AsString());
if (obj != interned) {
if (!IsImageBinSlotAssigned(interned)) {
// interned obj is after us, allocate its location early
@@ -821,35 +825,72 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
field_offset = MemberOffset(field_offset.Uint32Value() +
sizeof(mirror::HeapReference<mirror::Object>));
}
- // Visit and assign offsets for fields.
+ // Visit and assign offsets for fields and field arrays.
auto* as_klass = h_obj->AsClass();
- ArtField* fields[] = { as_klass->GetSFields(), as_klass->GetIFields() };
- size_t num_fields[] = { as_klass->NumStaticFields(), as_klass->NumInstanceFields() };
- for (size_t i = 0; i < 2; ++i) {
- for (size_t j = 0; j < num_fields[i]; ++j) {
- auto* field = fields[i] + j;
- auto it = native_object_reloc_.find(field);
- CHECK(it == native_object_reloc_.end()) << "Field at index " << i << ":" << j
- << " already assigned " << PrettyField(field);
- native_object_reloc_.emplace(
- field, NativeObjectReloc { bin_slot_sizes_[kBinArtField], kBinArtField });
- bin_slot_sizes_[kBinArtField] += sizeof(ArtField);
+ LengthPrefixedArray<ArtField>* fields[] = {
+ as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
+ };
+ for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
+ // Total array length including header.
+ if (cur_fields != nullptr) {
+ const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
+ // Forward the entire array at once.
+ auto it = native_object_relocations_.find(cur_fields);
+ CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
+ << " already forwarded";
+ size_t& offset = bin_slot_sizes_[kBinArtField];
+ native_object_relocations_.emplace(
+ cur_fields, NativeObjectRelocation {
+ offset, kNativeObjectRelocationTypeArtFieldArray });
+ offset += header_size;
+ // Forward individual fields so that we can quickly find where they belong.
+ for (size_t i = 0, count = cur_fields->Length(); i < count; ++i) {
+ // Need to forward arrays separate of fields.
+ ArtField* field = &cur_fields->At(i);
+ auto it2 = native_object_relocations_.find(field);
+ CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
+ << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
+ native_object_relocations_.emplace(
+ field, NativeObjectRelocation {offset, kNativeObjectRelocationTypeArtField });
+ offset += sizeof(ArtField);
+ }
}
}
// Visit and assign offsets for methods.
- IterationRange<StrideIterator<ArtMethod>> method_arrays[] = {
- as_klass->GetDirectMethods(target_ptr_size_),
- as_klass->GetVirtualMethods(target_ptr_size_)
+ LengthPrefixedArray<ArtMethod>* method_arrays[] = {
+ as_klass->GetDirectMethodsPtr(), as_klass->GetVirtualMethodsPtr(),
};
- for (auto& array : method_arrays) {
+ for (LengthPrefixedArray<ArtMethod>* array : method_arrays) {
+ if (array == nullptr) {
+ continue;
+ }
bool any_dirty = false;
size_t count = 0;
- for (auto& m : array) {
+ const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
+ const size_t method_size = ArtMethod::Size(target_ptr_size_);
+ auto iteration_range =
+ MakeIterationRangeFromLengthPrefixedArray(array, method_size, method_alignment);
+ for (auto& m : iteration_range) {
any_dirty = any_dirty || WillMethodBeDirty(&m);
++count;
}
- for (auto& m : array) {
- AssignMethodOffset(&m, any_dirty ? kBinArtMethodDirty : kBinArtMethodClean);
+ NativeObjectRelocationType type = any_dirty ? kNativeObjectRelocationTypeArtMethodDirty :
+ kNativeObjectRelocationTypeArtMethodClean;
+ Bin bin_type = BinTypeForNativeRelocationType(type);
+ // Forward the entire array at once, but header first.
+ const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
+ method_size,
+ method_alignment);
+ auto it = native_object_relocations_.find(array);
+ CHECK(it == native_object_relocations_.end()) << "Method array " << array
+ << " already forwarded";
+ size_t& offset = bin_slot_sizes_[bin_type];
+ native_object_relocations_.emplace(array, NativeObjectRelocation { offset,
+ any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty :
+ kNativeObjectRelocationTypeArtMethodArrayClean });
+ offset += header_size;
+ for (auto& m : iteration_range) {
+ AssignMethodOffset(&m, type);
}
(any_dirty ? dirty_methods_ : clean_methods_) += count;
}
@@ -867,12 +908,13 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
}
}
-void ImageWriter::AssignMethodOffset(ArtMethod* method, Bin bin) {
- auto it = native_object_reloc_.find(method);
- CHECK(it == native_object_reloc_.end()) << "Method " << method << " already assigned "
+void ImageWriter::AssignMethodOffset(ArtMethod* method, NativeObjectRelocationType type) {
+ auto it = native_object_relocations_.find(method);
+ CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
<< PrettyMethod(method);
- native_object_reloc_.emplace(method, NativeObjectReloc { bin_slot_sizes_[bin], bin });
- bin_slot_sizes_[bin] += ArtMethod::ObjectSize(target_ptr_size_);
+ size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
+ native_object_relocations_.emplace(method, NativeObjectRelocation { offset, type });
+ offset += ArtMethod::Size(target_ptr_size_);
}
void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
@@ -926,19 +968,40 @@ void ImageWriter::CalculateNewObjectOffsets() {
runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
+
+ // Add room for fake length prefixed array.
+ const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
+ auto it = native_object_relocations_.find(&image_method_array_);
+ CHECK(it == native_object_relocations_.end());
+ size_t& offset = bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
+ native_object_relocations_.emplace(&image_method_array_,
+ NativeObjectRelocation { offset, image_method_type });
+ size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
+ const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
+ 0, ArtMethod::Size(target_ptr_size_), method_alignment);
+ CHECK_ALIGNED_PARAM(array_size, method_alignment);
+ offset += array_size;
for (auto* m : image_methods_) {
CHECK(m != nullptr);
CHECK(m->IsRuntimeMethod());
- AssignMethodOffset(m, kBinArtMethodDirty);
+ AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean);
}
- // Calculate cumulative bin slot sizes.
- size_t previous_sizes = 0u;
+ // Calculate bin slot offsets.
+ size_t bin_offset = image_objects_offset_begin_;
for (size_t i = 0; i != kBinSize; ++i) {
- bin_slot_previous_sizes_[i] = previous_sizes;
- previous_sizes += bin_slot_sizes_[i];
+ bin_slot_offsets_[i] = bin_offset;
+ bin_offset += bin_slot_sizes_[i];
+ if (i == kBinArtField) {
+ static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
+ static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
+ DCHECK_ALIGNED(bin_offset, 4u);
+ DCHECK(method_alignment == 4u || method_alignment == 8u);
+ bin_offset = RoundUp(bin_offset, method_alignment);
+ }
}
- DCHECK_EQ(previous_sizes, GetBinSizeSum());
+ // NOTE: There may be additional padding between the bin slots and the intern table.
+
DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
// Transform each object's bin slot into an offset which will be used to do the final copy.
@@ -949,10 +1012,10 @@ void ImageWriter::CalculateNewObjectOffsets() {
image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots.Get()));
// Update the native relocations by adding their bin sums.
- for (auto& pair : native_object_reloc_) {
- auto& native_reloc = pair.second;
- native_reloc.offset += image_objects_offset_begin_ +
- bin_slot_previous_sizes_[native_reloc.bin_type];
+ for (auto& pair : native_object_relocations_) {
+ NativeObjectRelocation& relocation = pair.second;
+ Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
+ relocation.offset += bin_slot_offsets_[bin_type];
}
// Calculate how big the intern table will be after being serialized.
@@ -979,16 +1042,18 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
// Add field section.
auto* field_section = &sections[ImageHeader::kSectionArtFields];
*field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
- CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtField],
- field_section->Offset());
+ CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
cur_pos = field_section->End();
+ // Round up to the alignment the required by the method section.
+ cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size_));
// Add method section.
auto* methods_section = &sections[ImageHeader::kSectionArtMethods];
*methods_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtMethodClean] +
bin_slot_sizes_[kBinArtMethodDirty]);
- CHECK_EQ(image_objects_offset_begin_ + bin_slot_previous_sizes_[kBinArtMethodClean],
- methods_section->Offset());
+ CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
cur_pos = methods_section->End();
+ // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
+ cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
// Calculate the size of the interned strings.
auto* interned_strings_section = &sections[ImageHeader::kSectionInternedStrings];
*interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
@@ -1019,8 +1084,8 @@ void ImageWriter::CreateHeader(size_t oat_loaded_size, size_t oat_data_offset) {
}
ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
- auto it = native_object_reloc_.find(method);
- CHECK(it != native_object_reloc_.end()) << PrettyMethod(method) << " @ " << method;
+ auto it = native_object_relocations_.find(method);
+ CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
CHECK_GE(it->second.offset, image_end_) << "ArtMethods should be after Objects";
return reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
}
@@ -1031,7 +1096,7 @@ class FixupRootVisitor : public RootVisitor {
}
void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
- OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
for (size_t i = 0; i < count; ++i) {
*roots[i] = ImageAddress(*roots[i]);
}
@@ -1039,7 +1104,7 @@ class FixupRootVisitor : public RootVisitor {
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
const RootInfo& info ATTRIBUTE_UNUSED)
- OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
for (size_t i = 0; i < count; ++i) {
roots[i]->Assign(ImageAddress(roots[i]->AsMirrorPtr()));
}
@@ -1048,7 +1113,7 @@ class FixupRootVisitor : public RootVisitor {
private:
ImageWriter* const image_writer_;
- mirror::Object* ImageAddress(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::Object* ImageAddress(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) {
const size_t offset = image_writer_->GetImageOffset(obj);
auto* const dest = reinterpret_cast<Object*>(image_writer_->image_begin_ + offset);
VLOG(compiler) << "Update root from " << obj << " to " << dest;
@@ -1058,20 +1123,37 @@ class FixupRootVisitor : public RootVisitor {
void ImageWriter::CopyAndFixupNativeData() {
// Copy ArtFields and methods to their locations and update the array for convenience.
- for (auto& pair : native_object_reloc_) {
- auto& native_reloc = pair.second;
- if (native_reloc.bin_type == kBinArtField) {
- auto* dest = image_->Begin() + native_reloc.offset;
- DCHECK_GE(dest, image_->Begin() + image_end_);
- memcpy(dest, pair.first, sizeof(ArtField));
- reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
- GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
- } else {
- CHECK(IsArtMethodBin(native_reloc.bin_type)) << native_reloc.bin_type;
- auto* dest = image_->Begin() + native_reloc.offset;
- DCHECK_GE(dest, image_->Begin() + image_end_);
- CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
- reinterpret_cast<ArtMethod*>(dest));
+ for (auto& pair : native_object_relocations_) {
+ NativeObjectRelocation& relocation = pair.second;
+ auto* dest = image_->Begin() + relocation.offset;
+ DCHECK_GE(dest, image_->Begin() + image_end_);
+ switch (relocation.type) {
+ case kNativeObjectRelocationTypeArtField: {
+ memcpy(dest, pair.first, sizeof(ArtField));
+ reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
+ GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
+ break;
+ }
+ case kNativeObjectRelocationTypeArtMethodClean:
+ case kNativeObjectRelocationTypeArtMethodDirty: {
+ CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
+ reinterpret_cast<ArtMethod*>(dest));
+ break;
+ }
+ // For arrays, copy just the header since the elements will get copied by their corresponding
+ // relocations.
+ case kNativeObjectRelocationTypeArtFieldArray: {
+ memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
+ break;
+ }
+ case kNativeObjectRelocationTypeArtMethodArrayClean:
+ case kNativeObjectRelocationTypeArtMethodArrayDirty: {
+ memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(
+ 0,
+ ArtMethod::Size(target_ptr_size_),
+ ArtMethod::Alignment(target_ptr_size_)));
+ break;
+ }
}
}
// Fixup the image method roots.
@@ -1080,12 +1162,12 @@ void ImageWriter::CopyAndFixupNativeData() {
for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
auto* m = image_methods_[i];
CHECK(m != nullptr);
- auto it = native_object_reloc_.find(m);
- CHECK(it != native_object_reloc_.end()) << "No fowarding for " << PrettyMethod(m);
- auto& native_reloc = it->second;
- CHECK(methods_section.Contains(native_reloc.offset)) << native_reloc.offset << " not in "
+ auto it = native_object_relocations_.find(m);
+ CHECK(it != native_object_relocations_.end()) << "No fowarding for " << PrettyMethod(m);
+ NativeObjectRelocation& relocation = it->second;
+ CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
<< methods_section;
- CHECK(IsArtMethodBin(native_reloc.bin_type)) << native_reloc.bin_type;
+ CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
auto* dest = reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset);
image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), dest);
}
@@ -1137,9 +1219,9 @@ void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* a
for (size_t i = 0, count = num_elements; i < count; ++i) {
auto* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
if (elem != nullptr) {
- auto it = native_object_reloc_.find(elem);
- if (it == native_object_reloc_.end()) {
- if (IsArtMethodBin(array_type)) {
+ auto it = native_object_relocations_.find(elem);
+ if (it == native_object_relocations_.end()) {
+ if (it->second.IsArtMethodRelocation()) {
auto* method = reinterpret_cast<ArtMethod*>(elem);
LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
<< method << " idx=" << i << "/" << num_elements << " with declaring class "
@@ -1185,8 +1267,15 @@ class FixupVisitor {
FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
}
+ // Ignore class roots since we don't have a way to map them to the destination. These are handled
+ // with other logic.
+ void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
+ const {}
+ void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
+
+
void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
- EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
+ REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
// Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
// image.
@@ -1196,8 +1285,7 @@ class FixupVisitor {
// java.lang.ref.Reference visitor.
void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
+ SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent()));
}
@@ -1213,70 +1301,56 @@ class FixupClassVisitor FINAL : public FixupVisitor {
}
void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
- EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
+ REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
DCHECK(obj->IsClass());
FixupVisitor::operator()(obj, offset, /*is_static*/false);
}
void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
mirror::Reference* ref ATTRIBUTE_UNUSED) const
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
+ SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
LOG(FATAL) << "Reference not expected here.";
}
};
+void* ImageWriter::NativeLocationInImage(void* obj) {
+ if (obj == nullptr) {
+ return nullptr;
+ }
+ auto it = native_object_relocations_.find(obj);
+ CHECK(it != native_object_relocations_.end()) << obj;
+ const NativeObjectRelocation& relocation = it->second;
+ return reinterpret_cast<void*>(image_begin_ + relocation.offset);
+}
+
void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
- // Copy and fix up ArtFields in the class.
- ArtField* fields[2] = { orig->GetSFields(), orig->GetIFields() };
- size_t num_fields[2] = { orig->NumStaticFields(), orig->NumInstanceFields() };
// Update the field arrays.
- for (size_t i = 0; i < 2; ++i) {
- if (num_fields[i] == 0) {
- CHECK(fields[i] == nullptr);
- continue;
- }
- auto it = native_object_reloc_.find(fields[i]);
- CHECK(it != native_object_reloc_.end()) << PrettyClass(orig) << " : " << PrettyField(fields[i]);
- auto* image_fields = reinterpret_cast<ArtField*>(image_begin_ + it->second.offset);
- if (i == 0) {
- copy->SetSFieldsUnchecked(image_fields);
- } else {
- copy->SetIFieldsUnchecked(image_fields);
- }
- }
- // Update direct / virtual method arrays.
- auto* direct_methods = orig->GetDirectMethodsPtr();
- if (direct_methods != nullptr) {
- auto it = native_object_reloc_.find(direct_methods);
- CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
- copy->SetDirectMethodsPtrUnchecked(
- reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
- }
- auto* virtual_methods = orig->GetVirtualMethodsPtr();
- if (virtual_methods != nullptr) {
- auto it = native_object_reloc_.find(virtual_methods);
- CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
- copy->SetVirtualMethodsPtr(
- reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
- }
+ copy->SetSFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
+ NativeLocationInImage(orig->GetSFieldsPtr())));
+ copy->SetIFieldsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtField>*>(
+ NativeLocationInImage(orig->GetIFieldsPtr())));
+ // Update direct and virtual method arrays.
+ copy->SetDirectMethodsPtrUnchecked(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
+ NativeLocationInImage(orig->GetDirectMethodsPtr())));
+ copy->SetVirtualMethodsPtr(reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(
+ NativeLocationInImage(orig->GetVirtualMethodsPtr())));
// Fix up embedded tables.
if (orig->ShouldHaveEmbeddedImtAndVTable()) {
for (int32_t i = 0; i < orig->GetEmbeddedVTableLength(); ++i) {
- auto it = native_object_reloc_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
- CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
+ auto it = native_object_relocations_.find(orig->GetEmbeddedVTableEntry(i, target_ptr_size_));
+ CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
copy->SetEmbeddedVTableEntryUnchecked(
i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
}
for (size_t i = 0; i < mirror::Class::kImtSize; ++i) {
- auto it = native_object_reloc_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
- CHECK(it != native_object_reloc_.end()) << PrettyClass(orig);
+ auto it = native_object_relocations_.find(orig->GetEmbeddedImTableEntry(i, target_ptr_size_));
+ CHECK(it != native_object_relocations_.end()) << PrettyClass(orig);
copy->SetEmbeddedImTableEntry(
i, reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset), target_ptr_size_);
}
}
FixupClassVisitor visitor(this, copy);
- static_cast<mirror::Object*>(orig)->VisitReferences<true /*visit class*/>(visitor, visitor);
+ static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
}
void ImageWriter::FixupObject(Object* orig, Object* copy) {
@@ -1311,14 +1385,19 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) {
auto* dest = down_cast<mirror::AbstractMethod*>(copy);
auto* src = down_cast<mirror::AbstractMethod*>(orig);
ArtMethod* src_method = src->GetArtMethod();
- auto it = native_object_reloc_.find(src_method);
- CHECK(it != native_object_reloc_.end()) << "Missing relocation for AbstractMethod.artMethod "
- << PrettyMethod(src_method);
+ auto it = native_object_relocations_.find(src_method);
+ CHECK(it != native_object_relocations_.end())
+ << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
dest->SetArtMethod(
reinterpret_cast<ArtMethod*>(image_begin_ + it->second.offset));
+ } else if (!klass->IsArrayClass() && klass->IsSubClass(down_cast<mirror::Class*>(
+ Thread::Current()->DecodeJObject(WellKnownClasses::java_lang_ClassLoader)))) {
+ // If src is a ClassLoader, set the class table to null so that it gets recreated by the
+ // ClassLoader.
+ down_cast<mirror::ClassLoader*>(copy)->SetClassTable(nullptr);
}
FixupVisitor visitor(this, copy);
- orig->VisitReferences<true /*visit class*/>(visitor, visitor);
+ orig->VisitReferences(visitor, visitor);
}
}
@@ -1381,7 +1460,7 @@ const uint8_t* ImageWriter::GetQuickEntryPoint(ArtMethod* method) {
}
void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
- memcpy(copy, orig, ArtMethod::ObjectSize(target_ptr_size_));
+ memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
copy->SetDexCacheResolvedMethods(GetImageAddress(orig->GetDexCacheResolvedMethods()));
@@ -1417,9 +1496,6 @@ void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
if (UNLIKELY(orig->IsAbstract())) {
copy->SetEntryPointFromQuickCompiledCodePtrSize(
GetOatAddress(quick_to_interpreter_bridge_offset_), target_ptr_size_);
- copy->SetEntryPointFromInterpreterPtrSize(
- reinterpret_cast<EntryPointFromInterpreter*>(const_cast<uint8_t*>(
- GetOatAddress(interpreter_to_interpreter_bridge_offset_))), target_ptr_size_);
} else {
bool quick_is_interpreted;
const uint8_t* quick_code = GetQuickCode(orig, &quick_is_interpreted);
@@ -1432,16 +1508,6 @@ void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy) {
copy->SetEntryPointFromJniPtrSize(
GetOatAddress(jni_dlsym_lookup_offset_), target_ptr_size_);
}
-
- // Interpreter entrypoint:
- // Set the interpreter entrypoint depending on whether there is compiled code or not.
- uint32_t interpreter_code = (quick_is_interpreted)
- ? interpreter_to_interpreter_bridge_offset_
- : interpreter_to_compiled_code_bridge_offset_;
- EntryPointFromInterpreter* interpreter_entrypoint =
- reinterpret_cast<EntryPointFromInterpreter*>(
- const_cast<uint8_t*>(GetOatAddress(interpreter_code)));
- copy->SetEntryPointFromInterpreterPtrSize(interpreter_entrypoint, target_ptr_size_);
}
}
}
@@ -1506,4 +1572,19 @@ uint8_t* ImageWriter::GetOatFileBegin() const {
bin_slot_sizes_[kBinArtMethodClean] + intern_table_bytes_, kPageSize);
}
+ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
+ switch (type) {
+ case kNativeObjectRelocationTypeArtField:
+ case kNativeObjectRelocationTypeArtFieldArray:
+ return kBinArtField;
+ case kNativeObjectRelocationTypeArtMethodClean:
+ case kNativeObjectRelocationTypeArtMethodArrayClean:
+ return kBinArtMethodClean;
+ case kNativeObjectRelocationTypeArtMethodDirty:
+ case kNativeObjectRelocationTypeArtMethodArrayDirty:
+ return kBinArtMethodDirty;
+ }
+ UNREACHABLE();
+}
+
} // namespace art