summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/optimizing/builder.cc1
-rw-r--r--dex2oat/dex2oat.cc6
-rw-r--r--dex2oat/dex2oat_test.cc36
-rw-r--r--dex2oat/driver/compiler_driver.cc12
-rw-r--r--dex2oat/driver/compiler_driver.h1
-rw-r--r--dex2oat/linker/image_test.h3
-rw-r--r--dex2oat/linker/image_writer.cc397
-rw-r--r--dex2oat/linker/image_writer.h40
-rw-r--r--dex2oat/linker/oat_writer.cc1
-rw-r--r--oatdump/oatdump.cc144
-rw-r--r--oatdump/oatdump_test.h7
-rw-r--r--openjdkjvmti/ti_redefine.cc11
-rw-r--r--openjdkjvmti/transform.cc1
-rw-r--r--runtime/class_linker.cc276
-rw-r--r--runtime/class_linker.h4
-rw-r--r--runtime/class_linker_test.cc7
-rw-r--r--runtime/gc/space/image_space.cc128
-rw-r--r--runtime/gc/space/image_space.h1
-rw-r--r--runtime/image.cc2
-rw-r--r--runtime/image.h74
-rw-r--r--runtime/mirror/dex_cache-inl.h8
-rw-r--r--runtime/mirror/dex_cache.cc151
-rw-r--r--runtime/mirror/dex_cache.h37
-rw-r--r--runtime/oat.h4
-rw-r--r--runtime/runtime.cc8
-rw-r--r--runtime/utils/dex_cache_arrays_layout-inl.h184
-rw-r--r--runtime/utils/dex_cache_arrays_layout.h138
27 files changed, 170 insertions, 1512 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index 33dbf4e45e..decc4a815a 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -33,7 +33,6 @@
#include "optimizing_compiler_stats.h"
#include "ssa_builder.h"
#include "thread.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
namespace art {
diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc
index 6e484b76b1..dcc9de45bc 100644
--- a/dex2oat/dex2oat.cc
+++ b/dex2oat/dex2oat.cc
@@ -2007,11 +2007,7 @@ class Dex2Oat final {
// We need to prepare method offsets in the image address space for resolving linker patches.
TimingLogger::ScopedTiming t2("dex2oat Prepare image address space", timings_);
- // Do not preload dex caches for "assume-verified". This filter is used for in-memory
- // compilation of boot image extension; in that scenario it is undesirable to use a lot
- // of time to look up things now in hope it will be somewhat useful later.
- bool preload_dex_caches = !compiler_options_->AssumeDexFilesAreVerified();
- if (!image_writer_->PrepareImageAddressSpace(preload_dex_caches, timings_)) {
+ if (!image_writer_->PrepareImageAddressSpace(timings_)) {
LOG(ERROR) << "Failed to prepare image address space.";
return false;
}
diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc
index f425fc9a2a..0d3d4a0275 100644
--- a/dex2oat/dex2oat_test.cc
+++ b/dex2oat/dex2oat_test.cc
@@ -2174,43 +2174,17 @@ TEST_F(Dex2oatTest, AppImageResolveStrings) {
seen.insert(str.Read()->ToModifiedUtf8());
}
});
- // Ensure that the dex cache has a preresolved string array.
- std::set<std::string> preresolved_seen;
- bool saw_dexcache = false;
- space->GetLiveBitmap()->VisitAllMarked(
- [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
- if (obj->IsDexCache<kVerifyNone>()) {
- ObjPtr<mirror::DexCache> dex_cache = obj->AsDexCache();
- GcRoot<mirror::String>* preresolved_strings = dex_cache->GetPreResolvedStrings();
- ASSERT_EQ(dex_file->NumStringIds(), dex_cache->NumPreResolvedStrings());
- for (size_t i = 0; i < dex_cache->NumPreResolvedStrings(); ++i) {
- ObjPtr<mirror::String> string = preresolved_strings[i].Read<kWithoutReadBarrier>();
- if (string != nullptr) {
- preresolved_seen.insert(string->ToModifiedUtf8());
- }
- }
- saw_dexcache = true;
- }
- });
- ASSERT_TRUE(saw_dexcache);
- // Everything in the preresolved array should also be in the intern table.
- for (const std::string& str : preresolved_seen) {
- EXPECT_TRUE(seen.find(str) != seen.end());
- }
// Normal methods
- EXPECT_TRUE(preresolved_seen.find("Loading ") != preresolved_seen.end());
- EXPECT_TRUE(preresolved_seen.find("Starting up") != preresolved_seen.end());
- EXPECT_TRUE(preresolved_seen.find("abcd.apk") != preresolved_seen.end());
+ EXPECT_TRUE(seen.find("Loading ") != seen.end());
+ EXPECT_TRUE(seen.find("Starting up") != seen.end());
+ EXPECT_TRUE(seen.find("abcd.apk") != seen.end());
EXPECT_TRUE(seen.find("Unexpected error") == seen.end());
EXPECT_TRUE(seen.find("Shutting down!") == seen.end());
- EXPECT_TRUE(preresolved_seen.find("Unexpected error") == preresolved_seen.end());
- EXPECT_TRUE(preresolved_seen.find("Shutting down!") == preresolved_seen.end());
// Classes initializers
- EXPECT_TRUE(preresolved_seen.find("Startup init") != preresolved_seen.end());
+ EXPECT_TRUE(seen.find("Startup init") != seen.end());
EXPECT_TRUE(seen.find("Other class init") == seen.end());
- EXPECT_TRUE(preresolved_seen.find("Other class init") == preresolved_seen.end());
// Expect the sets match.
- EXPECT_GE(seen.size(), preresolved_seen.size());
+ EXPECT_GE(seen.size(), seen.size());
// Verify what strings are marked as boot image.
std::set<std::string> boot_image_strings;
diff --git a/dex2oat/driver/compiler_driver.cc b/dex2oat/driver/compiler_driver.cc
index 6ea58b39e5..b487c4c6ab 100644
--- a/dex2oat/driver/compiler_driver.cc
+++ b/dex2oat/driver/compiler_driver.cc
@@ -84,7 +84,6 @@
#include "trampolines/trampoline_compiler.h"
#include "transaction.h"
#include "utils/atomic_dex_ref_map-inl.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
#include "utils/swap_space.h"
#include "vdex_file.h"
#include "verifier/class_verifier.h"
@@ -684,11 +683,6 @@ void CompilerDriver::ResolveConstStrings(const std::vector<const DexFile*>& dex_
for (const DexFile* dex_file : dex_files) {
dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file));
- bool added_preresolved_string_array = false;
- if (only_startup_strings) {
- // When resolving startup strings, create the preresolved strings array.
- added_preresolved_string_array = dex_cache->AddPreResolvedStringsArray();
- }
TimingLogger::ScopedTiming t("Resolve const-string Strings", timings);
// TODO: Implement a profile-based filter for the boot image. See b/76145463.
@@ -714,7 +708,7 @@ void CompilerDriver::ResolveConstStrings(const std::vector<const DexFile*>& dex_
if (profile_compilation_info != nullptr && !is_startup_clinit) {
ProfileCompilationInfo::MethodHotness hotness =
profile_compilation_info->GetMethodHotness(method.GetReference());
- if (added_preresolved_string_array ? !hotness.IsStartup() : !hotness.IsInProfile()) {
+ if (only_startup_strings ? !hotness.IsStartup() : !hotness.IsInProfile()) {
continue;
}
}
@@ -732,10 +726,6 @@ void CompilerDriver::ResolveConstStrings(const std::vector<const DexFile*>& dex_
: inst->VRegB_31c());
ObjPtr<mirror::String> string = class_linker->ResolveString(string_index, dex_cache);
CHECK(string != nullptr) << "Could not allocate a string when forcing determinism";
- if (added_preresolved_string_array) {
- dex_cache->GetPreResolvedStrings()[string_index.index_] =
- GcRoot<mirror::String>(string);
- }
++num_instructions;
break;
}
diff --git a/dex2oat/driver/compiler_driver.h b/dex2oat/driver/compiler_driver.h
index e869baec73..3008623ba2 100644
--- a/dex2oat/driver/compiler_driver.h
+++ b/dex2oat/driver/compiler_driver.h
@@ -40,7 +40,6 @@
#include "driver/compiled_method_storage.h"
#include "thread_pool.h"
#include "utils/atomic_dex_ref_map.h"
-#include "utils/dex_cache_arrays_layout.h"
namespace art {
diff --git a/dex2oat/linker/image_test.h b/dex2oat/linker/image_test.h
index ee897ed7c1..e7b0c03aed 100644
--- a/dex2oat/linker/image_test.h
+++ b/dex2oat/linker/image_test.h
@@ -276,8 +276,7 @@ inline void ImageTest::DoCompile(ImageHeader::StorageMode storage_mode,
ASSERT_TRUE(cur_opened_dex_files.empty());
}
}
- bool image_space_ok =
- writer->PrepareImageAddressSpace(/*preload_dex_caches=*/ true, &timings);
+ bool image_space_ok = writer->PrepareImageAddressSpace(&timings);
ASSERT_TRUE(image_space_ok);
DCHECK_EQ(out_helper.vdex_files.size(), out_helper.oat_files.size());
diff --git a/dex2oat/linker/image_writer.cc b/dex2oat/linker/image_writer.cc
index 42c570f0a0..f29b4cdc0c 100644
--- a/dex2oat/linker/image_writer.cc
+++ b/dex2oat/linker/image_writer.cc
@@ -80,7 +80,6 @@
#include "runtime.h"
#include "scoped_thread_state_change-inl.h"
#include "subtype_check.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
#include "well_known_classes.h"
using ::art::mirror::Class;
@@ -237,7 +236,7 @@ static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
Runtime::Current()->GetHeap()->VisitObjects(visitor);
}
-bool ImageWriter::PrepareImageAddressSpace(bool preload_dex_caches, TimingLogger* timings) {
+bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) {
target_ptr_size_ = InstructionSetPointerSize(compiler_options_.GetInstructionSet());
Thread* const self = Thread::Current();
@@ -277,20 +276,6 @@ bool ImageWriter::PrepareImageAddressSpace(bool preload_dex_caches, TimingLogger
Runtime::Current()->GetInternTable()->PromoteWeakToStrong();
}
- if (preload_dex_caches) {
- TimingLogger::ScopedTiming t("PreloadDexCaches", timings);
- // Preload deterministic contents to the dex cache arrays we're going to write.
- ScopedObjectAccess soa(self);
- ObjPtr<mirror::ClassLoader> class_loader = GetAppClassLoader();
- std::vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self);
- for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
- if (!IsImageDexCache(dex_cache)) {
- continue; // Boot image DexCache is not written to the app image.
- }
- PreloadDexCache(dex_cache, class_loader);
- }
- }
-
{
TimingLogger::ScopedTiming t("CalculateNewObjectOffsets", timings);
ScopedObjectAccess soa(self);
@@ -654,92 +639,6 @@ void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
DCHECK(IsImageBinSlotAssigned(object));
}
-void ImageWriter::PrepareDexCacheArraySlots() {
- // Prepare dex cache array starts based on the ordering specified in the CompilerOptions.
- // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
- // when AssignImageBinSlot() assigns their indexes out or order.
- for (const DexFile* dex_file : compiler_options_.GetDexFilesForOatFile()) {
- auto it = dex_file_oat_index_map_.find(dex_file);
- DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
- ImageInfo& image_info = GetImageInfo(it->second);
- image_info.dex_cache_array_starts_.Put(
- dex_file, image_info.GetBinSlotSize(Bin::kDexCacheArray));
- DexCacheArraysLayout layout(target_ptr_size_, dex_file);
- image_info.IncrementBinSlotSize(Bin::kDexCacheArray, layout.Size());
- }
-
- ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
- Thread* const self = Thread::Current();
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- ObjPtr<mirror::DexCache> dex_cache =
- ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
- if (dex_cache == nullptr || !IsImageDexCache(dex_cache)) {
- continue;
- }
- const DexFile* dex_file = dex_cache->GetDexFile();
- CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
- << "Dex cache should have been pruned " << dex_file->GetLocation()
- << "; possibly in class path";
- DexCacheArraysLayout layout(target_ptr_size_, dex_file);
- // Empty dex files will not have a "valid" DexCacheArraysLayout.
- if (dex_file->NumTypeIds() + dex_file->NumStringIds() + dex_file->NumMethodIds() +
- dex_file->NumFieldIds() + dex_file->NumProtoIds() + dex_file->NumCallSiteIds() != 0) {
- DCHECK(layout.Valid());
- }
- size_t oat_index = GetOatIndexForDexFile(dex_file);
- ImageInfo& image_info = GetImageInfo(oat_index);
- uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
- DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
- start + layout.TypesOffset(),
- oat_index);
- DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
- start + layout.MethodsOffset(),
- oat_index);
- DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
- start + layout.FieldsOffset(),
- oat_index);
- DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
- AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), oat_index);
-
- AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(),
- start + layout.MethodTypesOffset(),
- oat_index);
- AddDexCacheArrayRelocation(dex_cache->GetResolvedCallSites(),
- start + layout.CallSitesOffset(),
- oat_index);
-
- // Preresolved strings aren't part of the special layout.
- GcRoot<mirror::String>* preresolved_strings = dex_cache->GetPreResolvedStrings();
- if (preresolved_strings != nullptr) {
- DCHECK(!IsInBootImage(preresolved_strings));
- // Add the array to the metadata section.
- const size_t count = dex_cache->NumPreResolvedStrings();
- auto bin = BinTypeForNativeRelocationType(NativeObjectRelocationType::kGcRootPointer);
- for (size_t i = 0; i < count; ++i) {
- native_object_relocations_.emplace(&preresolved_strings[i],
- NativeObjectRelocation { oat_index,
- image_info.GetBinSlotSize(bin),
- NativeObjectRelocationType::kGcRootPointer });
- image_info.IncrementBinSlotSize(bin, sizeof(GcRoot<mirror::Object>));
- }
- }
- }
-}
-
-void ImageWriter::AddDexCacheArrayRelocation(void* array,
- size_t offset,
- size_t oat_index) {
- if (array != nullptr) {
- DCHECK(!IsInBootImage(array));
- native_object_relocations_.emplace(array,
- NativeObjectRelocation { oat_index, offset, NativeObjectRelocationType::kDexCacheArray });
- }
-}
-
void ImageWriter::AddMethodPointerArray(ObjPtr<mirror::PointerArray> arr) {
DCHECK(arr != nullptr);
if (kIsDebugBuild) {
@@ -780,12 +679,6 @@ ImageWriter::Bin ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t
// Memory analysis has determined that the following types of objects get dirtied
// the most:
//
- // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
- // a fixed layout which helps improve generated code (using PC-relative addressing),
- // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
- // Since these arrays are huge, most pages do not overlap other objects and it's not
- // really important where they are for the clean/dirty separation. Due to their
- // special PC-relative addressing, we arbitrarily keep them at the end.
// * Class'es which are verified [their clinit runs only at runtime]
// - classes in general [because their static fields get overwritten]
// - initialized classes with all-final statics are unlikely to be ever dirty,
@@ -1282,113 +1175,6 @@ void ImageWriter::ClearDexCache(ObjPtr<mirror::DexCache> dex_cache) {
GcRoot<mirror::CallSite>(nullptr));
}
-void ImageWriter::PreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
- ObjPtr<mirror::ClassLoader> class_loader) {
- // To ensure deterministic contents of the hash-based arrays, each slot shall contain
- // the candidate with the lowest index. As we're processing entries in increasing index
- // order, this means trying to look up the entry for the current index if the slot is
- // empty or if it contains a higher index.
-
- Runtime* runtime = Runtime::Current();
- ClassLinker* class_linker = runtime->GetClassLinker();
- const DexFile& dex_file = *dex_cache->GetDexFile();
- // Preload the methods array and make the contents deterministic.
- mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
- dex::TypeIndex last_class_idx; // Initialized to invalid index.
- ObjPtr<mirror::Class> last_class = nullptr;
- for (size_t i = 0, num = dex_cache->GetDexFile()->NumMethodIds(); i != num; ++i) {
- uint32_t slot_idx = dex_cache->MethodSlotIndex(i);
- auto pair =
- mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_);
- uint32_t stored_index = pair.index;
- ArtMethod* method = pair.object;
- if (method != nullptr && i > stored_index) {
- continue; // Already checked.
- }
- // Check if the referenced class is in the image. Note that we want to check the referenced
- // class rather than the declaring class to preserve the semantics, i.e. using a MethodId
- // results in resolving the referenced class and that can for example throw OOME.
- const dex::MethodId& method_id = dex_file.GetMethodId(i);
- if (method_id.class_idx_ != last_class_idx) {
- last_class_idx = method_id.class_idx_;
- last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
- }
- if (method == nullptr || i < stored_index) {
- if (last_class != nullptr) {
- // Try to resolve the method with the class linker, which will insert
- // it into the dex cache if successful.
- method = class_linker->FindResolvedMethod(last_class, dex_cache, class_loader, i);
- DCHECK(method == nullptr || dex_cache->GetResolvedMethod(i, target_ptr_size_) == method);
- }
- } else {
- DCHECK_EQ(i, stored_index);
- DCHECK(last_class != nullptr);
- }
- }
- // Preload the fields array and make the contents deterministic.
- mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
- last_class_idx = dex::TypeIndex(); // Initialized to invalid index.
- last_class = nullptr;
- for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
- uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
- auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
- uint32_t stored_index = pair.index;
- ArtField* field = pair.object;
- if (field != nullptr && i > stored_index) {
- continue; // Already checked.
- }
- // Check if the referenced class is in the image. Note that we want to check the referenced
- // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
- // results in resolving the referenced class and that can for example throw OOME.
- const dex::FieldId& field_id = dex_file.GetFieldId(i);
- if (field_id.class_idx_ != last_class_idx) {
- last_class_idx = field_id.class_idx_;
- last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
- if (last_class != nullptr && !KeepClass(last_class)) {
- last_class = nullptr;
- }
- }
- if (field == nullptr || i < stored_index) {
- if (last_class != nullptr) {
- // Try to resolve the field with the class linker, which will insert
- // it into the dex cache if successful.
- field = class_linker->FindResolvedFieldJLS(last_class, dex_cache, class_loader, i);
- DCHECK(field == nullptr || dex_cache->GetResolvedField(i, target_ptr_size_) == field);
- }
- } else {
- DCHECK_EQ(i, stored_index);
- DCHECK(last_class != nullptr);
- }
- }
- // Preload the types array and make the contents deterministic.
- // This is done after fields and methods as their lookup can touch the types array.
- for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) {
- dex::TypeIndex type_idx(i);
- uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
- mirror::TypeDexCachePair pair =
- dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
- uint32_t stored_index = pair.index;
- ObjPtr<mirror::Class> klass = pair.object.Read();
- if (klass == nullptr || i < stored_index) {
- klass = class_linker->LookupResolvedType(type_idx, dex_cache, class_loader);
- DCHECK(klass == nullptr || dex_cache->GetResolvedType(type_idx) == klass);
- }
- }
- // Preload the strings array and make the contents deterministic.
- for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) {
- dex::StringIndex string_idx(i);
- uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
- mirror::StringDexCachePair pair =
- dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
- uint32_t stored_index = pair.index;
- ObjPtr<mirror::String> string = pair.object.Read();
- if (string == nullptr || i < stored_index) {
- string = class_linker->LookupString(string_idx, dex_cache);
- DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string);
- }
- }
-}
-
void ImageWriter::PruneNonImageClasses() {
Runtime* runtime = Runtime::Current();
ClassLinker* class_linker = runtime->GetClassLinker();
@@ -1419,7 +1205,7 @@ void ImageWriter::PruneNonImageClasses() {
VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
}
- // Completely clear DexCaches. They shall be re-filled in PreloadDexCaches if requested.
+ // Completely clear DexCaches.
std::vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self);
for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
ClearDexCache(dex_cache);
@@ -1795,7 +1581,7 @@ class ImageWriter::LayoutHelper {
* string. To speed up the visiting of references at load time we include
* a list of offsets to string references in the AppImage.
*/
- void CollectStringReferenceInfo(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
+ void CollectStringReferenceInfo() REQUIRES_SHARED(Locks::mutator_lock_);
private:
class CollectClassesVisitor;
@@ -2200,10 +1986,6 @@ void ImageWriter::LayoutHelper::FinalizeBinSlotOffsets() {
bin_offset = RoundUp(bin_offset, ArtMethod::Alignment(image_writer_->target_ptr_size_));
break;
}
- case Bin::kDexCacheArray:
- bin_offset =
- RoundUp(bin_offset, DexCacheArraysLayout::Alignment(image_writer_->target_ptr_size_));
- break;
case Bin::kImTable:
case Bin::kIMTConflictTable: {
bin_offset = RoundUp(bin_offset, static_cast<size_t>(image_writer_->target_ptr_size_));
@@ -2285,8 +2067,7 @@ void ImageWriter::LayoutHelper::FinalizeBinSlotOffsets() {
VLOG(image) << "Space wasted for region alignment " << image_writer_->region_alignment_wasted_;
}
-void ImageWriter::LayoutHelper::CollectStringReferenceInfo(Thread* self) {
- size_t managed_string_refs = 0u;
+void ImageWriter::LayoutHelper::CollectStringReferenceInfo() {
size_t total_string_refs = 0u;
const size_t num_image_infos = image_writer_->image_infos_.size();
@@ -2316,49 +2097,13 @@ void ImageWriter::LayoutHelper::CollectStringReferenceInfo(Thread* self) {
}
}
- managed_string_refs += image_info.string_reference_offsets_.size();
-
- // Collect dex cache string arrays.
- for (const DexFile* dex_file : image_writer_->compiler_options_.GetDexFilesForOatFile()) {
- if (image_writer_->GetOatIndexForDexFile(dex_file) == oat_index) {
- ObjPtr<mirror::DexCache> dex_cache =
- Runtime::Current()->GetClassLinker()->FindDexCache(self, *dex_file);
- DCHECK(dex_cache != nullptr);
- size_t base_offset = image_writer_->GetImageOffset(dex_cache.Ptr(), oat_index);
-
- // Visit all string cache entries.
- mirror::StringDexCacheType* strings = dex_cache->GetStrings();
- const size_t num_strings = dex_cache->NumStrings();
- for (uint32_t index = 0; index != num_strings; ++index) {
- ObjPtr<mirror::String> referred_string = strings[index].load().object.Read();
- if (image_writer_->IsInternedAppImageStringReference(referred_string)) {
- image_info.string_reference_offsets_.emplace_back(
- SetDexCacheStringNativeRefTag(base_offset), index);
- }
- }
-
- // Visit all pre-resolved string entries.
- GcRoot<mirror::String>* preresolved_strings = dex_cache->GetPreResolvedStrings();
- const size_t num_pre_resolved_strings = dex_cache->NumPreResolvedStrings();
- for (uint32_t index = 0; index != num_pre_resolved_strings; ++index) {
- ObjPtr<mirror::String> referred_string = preresolved_strings[index].Read();
- if (image_writer_->IsInternedAppImageStringReference(referred_string)) {
- image_info.string_reference_offsets_.emplace_back(
- SetDexCachePreResolvedStringNativeRefTag(base_offset), index);
- }
- }
- }
- }
-
total_string_refs += image_info.string_reference_offsets_.size();
// Check that we collected the same number of string references as we saw in the previous pass.
CHECK_EQ(image_info.string_reference_offsets_.size(), image_info.num_string_references_);
}
- VLOG(compiler) << "Dex2Oat:AppImage:stringReferences = " << total_string_refs
- << " (managed: " << managed_string_refs
- << ", native: " << (total_string_refs - managed_string_refs) << ")";
+ VLOG(compiler) << "Dex2Oat:AppImage:stringReferences = " << total_string_refs;
}
void ImageWriter::LayoutHelper::VisitReferences(ObjPtr<mirror::Object> obj, size_t oat_index) {
@@ -2463,9 +2208,6 @@ void ImageWriter::CalculateNewObjectOffsets() {
// Verify that all objects have assigned image bin slots.
layout_helper.VerifyImageBinSlotsAssigned();
- // Calculate size of the dex cache arrays slot and prepare offsets.
- PrepareDexCacheArraySlots();
-
// Calculate the sizes of the intern tables, class tables, and fixup tables.
for (ImageInfo& image_info : image_infos_) {
// Calculate how big the intern table will be after being serialized.
@@ -2488,7 +2230,7 @@ void ImageWriter::CalculateNewObjectOffsets() {
// Collect string reference info for app images.
if (ClassLinker::kAppImageMayContainStrings && compiler_options_.IsAppImage()) {
- layout_helper.CollectStringReferenceInfo(self);
+ layout_helper.CollectStringReferenceInfo();
}
// Calculate image offsets.
@@ -2564,19 +2306,11 @@ std::pair<size_t, std::vector<ImageSection>> ImageWriter::ImageInfo::CreateImage
ImageSection(GetBinSlotOffset(Bin::kRuntimeMethod), GetBinSlotSize(Bin::kRuntimeMethod));
/*
- * DexCache Arrays section.
- */
- const ImageSection& dex_cache_arrays_section =
- sections[ImageHeader::kSectionDexCacheArrays] =
- ImageSection(GetBinSlotOffset(Bin::kDexCacheArray),
- GetBinSlotSize(Bin::kDexCacheArray));
-
- /*
* Interned Strings section
*/
// Round up to the alignment the string table expects. See HashSet::WriteToMemory.
- size_t cur_pos = RoundUp(dex_cache_arrays_section.End(), sizeof(uint64_t));
+ size_t cur_pos = RoundUp(sections[ImageHeader::kSectionRuntimeMethods].End(), sizeof(uint64_t));
const ImageSection& interned_strings_section =
sections[ImageHeader::kSectionInternedStrings] =
@@ -2613,8 +2347,7 @@ std::pair<size_t, std::vector<ImageSection>> ImageWriter::ImageInfo::CreateImage
*/
// Round up to the alignment of the offsets we are going to store.
- cur_pos = RoundUp(string_reference_offsets.End(),
- mirror::DexCache::PreResolvedStringsAlignment());
+ cur_pos = RoundUp(string_reference_offsets.End(), sizeof(uint32_t));
const ImageSection& metadata_section =
sections[ImageHeader::kSectionMetadata] =
@@ -2841,9 +2574,6 @@ void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
break;
}
- case NativeObjectRelocationType::kDexCacheArray:
- // Nothing to copy here, everything is done in FixupDexCache().
- break;
case NativeObjectRelocationType::kIMTable: {
ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
@@ -3211,7 +2941,7 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) {
ArtMethod* src_method = src->GetArtMethod();
CopyAndFixupPointer(dest, mirror::Executable::ArtMethodOffset(), src_method);
} else if (klass == GetClassRoot<mirror::DexCache>(class_roots)) {
- FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
+ down_cast<mirror::DexCache*>(copy)->ResetNativeFields();
} else if (klass->IsClassLoaderClass()) {
mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
// If src is a ClassLoader, set the class table to null so that it gets recreated by the
@@ -3227,113 +2957,6 @@ void ImageWriter::FixupObject(Object* orig, Object* copy) {
}
}
-template <typename T>
-void ImageWriter::FixupDexCacheArrayEntry(std::atomic<mirror::DexCachePair<T>>* orig_array,
- std::atomic<mirror::DexCachePair<T>>* new_array,
- uint32_t array_index) {
- static_assert(sizeof(std::atomic<mirror::DexCachePair<T>>) == sizeof(mirror::DexCachePair<T>),
- "Size check for removing std::atomic<>.");
- mirror::DexCachePair<T>* orig_pair =
- reinterpret_cast<mirror::DexCachePair<T>*>(&orig_array[array_index]);
- mirror::DexCachePair<T>* new_pair =
- reinterpret_cast<mirror::DexCachePair<T>*>(&new_array[array_index]);
- CopyAndFixupReference(
- new_pair->object.AddressWithoutBarrier(), orig_pair->object.Read());
- new_pair->index = orig_pair->index;
-}
-
-template <typename T>
-void ImageWriter::FixupDexCacheArrayEntry(std::atomic<mirror::NativeDexCachePair<T>>* orig_array,
- std::atomic<mirror::NativeDexCachePair<T>>* new_array,
- uint32_t array_index) {
- static_assert(
- sizeof(std::atomic<mirror::NativeDexCachePair<T>>) == sizeof(mirror::NativeDexCachePair<T>),
- "Size check for removing std::atomic<>.");
- if (target_ptr_size_ == PointerSize::k64) {
- DexCache::ConversionPair64* orig_pair =
- reinterpret_cast<DexCache::ConversionPair64*>(orig_array) + array_index;
- DexCache::ConversionPair64* new_pair =
- reinterpret_cast<DexCache::ConversionPair64*>(new_array) + array_index;
- *new_pair = *orig_pair; // Copy original value and index.
- if (orig_pair->first != 0u) {
- CopyAndFixupPointer(
- reinterpret_cast<void**>(&new_pair->first), reinterpret_cast64<void*>(orig_pair->first));
- }
- } else {
- DexCache::ConversionPair32* orig_pair =
- reinterpret_cast<DexCache::ConversionPair32*>(orig_array) + array_index;
- DexCache::ConversionPair32* new_pair =
- reinterpret_cast<DexCache::ConversionPair32*>(new_array) + array_index;
- *new_pair = *orig_pair; // Copy original value and index.
- if (orig_pair->first != 0u) {
- CopyAndFixupPointer(
- reinterpret_cast<void**>(&new_pair->first), reinterpret_cast32<void*>(orig_pair->first));
- }
- }
-}
-
-void ImageWriter::FixupDexCacheArrayEntry(GcRoot<mirror::CallSite>* orig_array,
- GcRoot<mirror::CallSite>* new_array,
- uint32_t array_index) {
- CopyAndFixupReference(
- new_array[array_index].AddressWithoutBarrier(), orig_array[array_index].Read());
-}
-
-template <typename EntryType>
-void ImageWriter::FixupDexCacheArray(DexCache* orig_dex_cache,
- DexCache* copy_dex_cache,
- MemberOffset array_offset,
- uint32_t size) {
- EntryType* orig_array = orig_dex_cache->GetFieldPtr64<EntryType*>(array_offset);
- DCHECK_EQ(orig_array != nullptr, size != 0u);
- if (orig_array != nullptr) {
- // Though the DexCache array fields are usually treated as native pointers, we clear
- // the top 32 bits for 32-bit targets.
- CopyAndFixupPointer(copy_dex_cache, array_offset, orig_array, PointerSize::k64);
- EntryType* new_array = NativeCopyLocation(orig_array);
- for (uint32_t i = 0; i != size; ++i) {
- FixupDexCacheArrayEntry(orig_array, new_array, i);
- }
- }
-}
-
-void ImageWriter::FixupDexCache(DexCache* orig_dex_cache, DexCache* copy_dex_cache) {
- FixupDexCacheArray<mirror::StringDexCacheType>(orig_dex_cache,
- copy_dex_cache,
- DexCache::StringsOffset(),
- orig_dex_cache->NumStrings());
- FixupDexCacheArray<mirror::TypeDexCacheType>(orig_dex_cache,
- copy_dex_cache,
- DexCache::ResolvedTypesOffset(),
- orig_dex_cache->NumResolvedTypes());
- FixupDexCacheArray<mirror::MethodDexCacheType>(orig_dex_cache,
- copy_dex_cache,
- DexCache::ResolvedMethodsOffset(),
- orig_dex_cache->NumResolvedMethods());
- FixupDexCacheArray<mirror::FieldDexCacheType>(orig_dex_cache,
- copy_dex_cache,
- DexCache::ResolvedFieldsOffset(),
- orig_dex_cache->NumResolvedFields());
- FixupDexCacheArray<mirror::MethodTypeDexCacheType>(orig_dex_cache,
- copy_dex_cache,
- DexCache::ResolvedMethodTypesOffset(),
- orig_dex_cache->NumResolvedMethodTypes());
- FixupDexCacheArray<GcRoot<mirror::CallSite>>(orig_dex_cache,
- copy_dex_cache,
- DexCache::ResolvedCallSitesOffset(),
- orig_dex_cache->NumResolvedCallSites());
- if (orig_dex_cache->GetPreResolvedStrings() != nullptr) {
- CopyAndFixupPointer(copy_dex_cache,
- DexCache::PreResolvedStringsOffset(),
- orig_dex_cache->GetPreResolvedStrings(),
- PointerSize::k64);
- }
-
- // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
- // compiler pointers in here will make the output non-deterministic.
- copy_dex_cache->SetDexFile(nullptr);
-}
-
const uint8_t* ImageWriter::GetOatAddress(StubType type) const {
DCHECK_LE(type, StubType::kLast);
// If we are compiling a boot image extension or app image,
@@ -3526,8 +3149,6 @@ ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocat
case NativeObjectRelocationType::kArtMethodDirty:
case NativeObjectRelocationType::kArtMethodArrayDirty:
return Bin::kArtMethodDirty;
- case NativeObjectRelocationType::kDexCacheArray:
- return Bin::kDexCacheArray;
case NativeObjectRelocationType::kRuntimeMethod:
return Bin::kRuntimeMethod;
case NativeObjectRelocationType::kIMTable:
diff --git a/dex2oat/linker/image_writer.h b/dex2oat/linker/image_writer.h
index 84ca88e334..3cb5098526 100644
--- a/dex2oat/linker/image_writer.h
+++ b/dex2oat/linker/image_writer.h
@@ -102,7 +102,7 @@ class ImageWriter final {
* image have been initialized and all native methods have been generated. In
* addition, no other thread should be modifying the heap.
*/
- bool PrepareImageAddressSpace(bool preload_dex_caches, TimingLogger* timings);
+ bool PrepareImageAddressSpace(TimingLogger* timings);
bool IsImageAddressSpaceReady() const {
DCHECK(!image_infos_.empty());
@@ -208,11 +208,7 @@ class ImageWriter final {
kRuntimeMethod,
// Metadata bin for data that is temporary during image lifetime.
kMetadata,
- // Dex cache arrays have a special slot for PC-relative addressing. Since they are
- // huge, and as such their dirtiness is not important for the clean/dirty separation,
- // we arbitrarily keep them at the end of the native data.
- kDexCacheArray, // Arrays belonging to dex cache.
- kLast = kDexCacheArray,
+ kLast = kMetadata,
// Number of bins which are for mirror objects.
kMirrorCount = kArtField,
};
@@ -229,7 +225,6 @@ class ImageWriter final {
kRuntimeMethod,
kIMTable,
kIMTConflictTable,
- kDexCacheArray,
};
friend std::ostream& operator<<(std::ostream& stream, NativeObjectRelocationType type);
@@ -409,7 +404,6 @@ class ImageWriter final {
size_t GetImageOffset(mirror::Object* object, size_t oat_index) const
REQUIRES_SHARED(Locks::mutator_lock_);
- void PrepareDexCacheArraySlots() REQUIRES_SHARED(Locks::mutator_lock_);
Bin AssignImageBinSlot(mirror::Object* object, size_t oat_index)
REQUIRES_SHARED(Locks::mutator_lock_);
void RecordNativeRelocations(ObjPtr<mirror::Object> obj, size_t oat_index)
@@ -423,8 +417,6 @@ class ImageWriter final {
void UpdateImageBinSlotOffset(mirror::Object* object, size_t oat_index, size_t new_offset)
REQUIRES_SHARED(Locks::mutator_lock_);
- void AddDexCacheArrayRelocation(void* array, size_t offset, size_t oat_index)
- REQUIRES_SHARED(Locks::mutator_lock_);
void AddMethodPointerArray(ObjPtr<mirror::PointerArray> arr)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -455,11 +447,6 @@ class ImageWriter final {
void ClearDexCache(ObjPtr<mirror::DexCache> dex_cache)
REQUIRES_SHARED(Locks::mutator_lock_);
- // Preload deterministic DexCache contents.
- void PreloadDexCache(ObjPtr<mirror::DexCache> dex_cache, ObjPtr<mirror::ClassLoader> class_loader)
- REQUIRES_SHARED(Locks::mutator_lock_)
- REQUIRES(!Locks::classlinker_classes_lock_);
-
// Find dex caches for pruning or preloading.
std::vector<ObjPtr<mirror::DexCache>> FindDexCaches(Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_)
@@ -511,29 +498,6 @@ class ImageWriter final {
REQUIRES_SHARED(Locks::mutator_lock_);
void FixupObject(mirror::Object* orig, mirror::Object* copy)
REQUIRES_SHARED(Locks::mutator_lock_);
- template <typename T>
- void FixupDexCacheArrayEntry(std::atomic<mirror::DexCachePair<T>>* orig_array,
- std::atomic<mirror::DexCachePair<T>>* new_array,
- uint32_t array_index)
- REQUIRES_SHARED(Locks::mutator_lock_);
- template <typename T>
- void FixupDexCacheArrayEntry(std::atomic<mirror::NativeDexCachePair<T>>* orig_array,
- std::atomic<mirror::NativeDexCachePair<T>>* new_array,
- uint32_t array_index)
- REQUIRES_SHARED(Locks::mutator_lock_);
- void FixupDexCacheArrayEntry(GcRoot<mirror::CallSite>* orig_array,
- GcRoot<mirror::CallSite>* new_array,
- uint32_t array_index)
- REQUIRES_SHARED(Locks::mutator_lock_);
- template <typename EntryType>
- void FixupDexCacheArray(mirror::DexCache* orig_dex_cache,
- mirror::DexCache* copy_dex_cache,
- MemberOffset array_offset,
- uint32_t size)
- REQUIRES_SHARED(Locks::mutator_lock_);
- void FixupDexCache(mirror::DexCache* orig_dex_cache,
- mirror::DexCache* copy_dex_cache)
- REQUIRES_SHARED(Locks::mutator_lock_);
void FixupPointerArray(mirror::Object* dst,
mirror::PointerArray* arr,
Bin array_type)
diff --git a/dex2oat/linker/oat_writer.cc b/dex2oat/linker/oat_writer.cc
index 88aac561de..aa1ed936c8 100644
--- a/dex2oat/linker/oat_writer.cc
+++ b/dex2oat/linker/oat_writer.cc
@@ -70,7 +70,6 @@
#include "stream/buffered_output_stream.h"
#include "stream/file_output_stream.h"
#include "stream/output_stream.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
#include "vdex_file.h"
#include "verifier/verifier_deps.h"
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 02636e24ea..2d95a8aa04 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1894,7 +1894,6 @@ class ImageDumper {
os << "\n";
Runtime* const runtime = Runtime::Current();
- ClassLinker* class_linker = runtime->GetClassLinker();
std::string image_filename = image_space_.GetImageFilename();
std::string oat_location = ImageHeader::GetOatLocationFromImageLocation(image_filename);
os << "OAT LOCATION: " << oat_location;
@@ -1946,18 +1945,6 @@ class ImageDumper {
heap->RevokeAllThreadLocalAllocationStacks(self);
}
{
- // Mark dex caches.
- dex_caches_.clear();
- {
- ReaderMutexLock mu(self, *Locks::dex_lock_);
- for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
- ObjPtr<mirror::DexCache> dex_cache =
- ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
- if (dex_cache != nullptr) {
- dex_caches_.insert(dex_cache.Ptr());
- }
- }
- }
auto dump_visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
DumpObject(obj);
};
@@ -1996,7 +1983,7 @@ class ImageDumper {
const auto& object_section = image_header_.GetObjectsSection();
const auto& field_section = image_header_.GetFieldsSection();
const auto& method_section = image_header_.GetMethodsSection();
- const auto& dex_cache_arrays_section = image_header_.GetDexCacheArraysSection();
+ const auto& runtime_method_section = image_header_.GetRuntimeMethodsSection();
const auto& intern_section = image_header_.GetInternedStringsSection();
const auto& class_table_section = image_header_.GetClassTableSection();
const auto& sro_section = image_header_.GetImageStringReferenceOffsetsSection();
@@ -2022,15 +2009,10 @@ class ImageDumper {
CHECK_ALIGNED(method_section.Offset(), 4);
stats_.alignment_bytes += method_section.Offset() - end_fields;
- // Dex cache arrays section is aligned depending on the target. Just check for 4-byte alignment.
- uint32_t end_methods = method_section.Offset() + method_section.Size();
- CHECK_ALIGNED(dex_cache_arrays_section.Offset(), 4);
- stats_.alignment_bytes += dex_cache_arrays_section.Offset() - end_methods;
-
// Intern table is 8-byte aligned.
- uint32_t end_caches = dex_cache_arrays_section.Offset() + dex_cache_arrays_section.Size();
- CHECK_EQ(RoundUp(end_caches, 8U), intern_section.Offset());
- stats_.alignment_bytes += intern_section.Offset() - end_caches;
+ uint32_t end_methods = runtime_method_section.Offset() + runtime_method_section.Size();
+ CHECK_EQ(RoundUp(end_methods, 8U), intern_section.Offset());
+ stats_.alignment_bytes += intern_section.Offset() - end_methods;
// Add space between intern table and class table.
uint32_t end_intern = intern_section.Offset() + intern_section.Size();
@@ -2044,7 +2026,6 @@ class ImageDumper {
stats_.bitmap_bytes += bitmap_section.Size();
stats_.art_field_bytes += field_section.Size();
stats_.art_method_bytes += method_section.Size();
- stats_.dex_cache_arrays_bytes += dex_cache_arrays_section.Size();
stats_.interned_strings_bytes += intern_section.Size();
stats_.class_table_bytes += class_table_section.Size();
stats_.sro_offset_bytes += sro_section.Size();
@@ -2219,7 +2200,6 @@ class ImageDumper {
}
ScopedIndentation indent1(&vios_);
DumpFields(os, obj, obj_class);
- const PointerSize image_pointer_size = image_header_.GetPointerSize();
if (obj->IsObjectArray()) {
ObjPtr<mirror::ObjectArray<mirror::Object>> obj_array = obj->AsObjectArray<mirror::Object>();
for (int32_t i = 0, length = obj_array->GetLength(); i < length; i++) {
@@ -2258,113 +2238,6 @@ class ImageDumper {
PrintField(os, &field, field.GetDeclaringClass());
}
}
- } else {
- auto it = dex_caches_.find(obj);
- if (it != dex_caches_.end()) {
- auto* dex_cache = down_cast<mirror::DexCache*>(obj);
- const auto& field_section = image_header_.GetFieldsSection();
- const auto& method_section = image_header_.GetMethodsSection();
- size_t num_methods = dex_cache->NumResolvedMethods();
- if (num_methods != 0u) {
- os << "Methods (size=" << num_methods << "):\n";
- ScopedIndentation indent2(&vios_);
- mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
- for (size_t i = 0, length = dex_cache->NumResolvedMethods(); i < length; ++i) {
- ArtMethod* elem = mirror::DexCache::GetNativePairPtrSize(
- resolved_methods, i, image_pointer_size).object;
- size_t run = 0;
- for (size_t j = i + 1;
- j != length &&
- elem == mirror::DexCache::GetNativePairPtrSize(
- resolved_methods, j, image_pointer_size).object;
- ++j) {
- ++run;
- }
- if (run == 0) {
- os << StringPrintf("%zd: ", i);
- } else {
- os << StringPrintf("%zd to %zd: ", i, i + run);
- i = i + run;
- }
- std::string msg;
- if (elem == nullptr) {
- msg = "null";
- } else if (method_section.Contains(
- reinterpret_cast<uint8_t*>(elem) - image_space_.Begin())) {
- msg = reinterpret_cast<ArtMethod*>(elem)->PrettyMethod();
- } else {
- msg = "<not in method section>";
- }
- os << StringPrintf("%p %s\n", elem, msg.c_str());
- }
- }
- size_t num_fields = dex_cache->NumResolvedFields();
- if (num_fields != 0u) {
- os << "Fields (size=" << num_fields << "):\n";
- ScopedIndentation indent2(&vios_);
- auto* resolved_fields = dex_cache->GetResolvedFields();
- for (size_t i = 0, length = dex_cache->NumResolvedFields(); i < length; ++i) {
- ArtField* elem = mirror::DexCache::GetNativePairPtrSize(
- resolved_fields, i, image_pointer_size).object;
- size_t run = 0;
- for (size_t j = i + 1;
- j != length &&
- elem == mirror::DexCache::GetNativePairPtrSize(
- resolved_fields, j, image_pointer_size).object;
- ++j) {
- ++run;
- }
- if (run == 0) {
- os << StringPrintf("%zd: ", i);
- } else {
- os << StringPrintf("%zd to %zd: ", i, i + run);
- i = i + run;
- }
- std::string msg;
- if (elem == nullptr) {
- msg = "null";
- } else if (field_section.Contains(
- reinterpret_cast<uint8_t*>(elem) - image_space_.Begin())) {
- msg = reinterpret_cast<ArtField*>(elem)->PrettyField();
- } else {
- msg = "<not in field section>";
- }
- os << StringPrintf("%p %s\n", elem, msg.c_str());
- }
- }
- size_t num_types = dex_cache->NumResolvedTypes();
- if (num_types != 0u) {
- os << "Types (size=" << num_types << "):\n";
- ScopedIndentation indent2(&vios_);
- auto* resolved_types = dex_cache->GetResolvedTypes();
- for (size_t i = 0; i < num_types; ++i) {
- auto pair = resolved_types[i].load(std::memory_order_relaxed);
- size_t run = 0;
- for (size_t j = i + 1; j != num_types; ++j) {
- auto other_pair = resolved_types[j].load(std::memory_order_relaxed);
- if (pair.index != other_pair.index ||
- pair.object.Read() != other_pair.object.Read()) {
- break;
- }
- ++run;
- }
- if (run == 0) {
- os << StringPrintf("%zd: ", i);
- } else {
- os << StringPrintf("%zd to %zd: ", i, i + run);
- i = i + run;
- }
- std::string msg;
- auto* elem = pair.object.Read();
- if (elem == nullptr) {
- msg = "null";
- } else {
- msg = elem->PrettyClass();
- }
- os << StringPrintf("%p %u %s\n", elem, pair.index, msg.c_str());
- }
- }
- }
}
std::string temp;
stats_.Update(obj_class->GetDescriptor(&temp), object_bytes);
@@ -2482,7 +2355,6 @@ class ImageDumper {
size_t object_bytes = 0u;
size_t art_field_bytes = 0u;
size_t art_method_bytes = 0u;
- size_t dex_cache_arrays_bytes = 0u;
size_t interned_strings_bytes = 0u;
size_t class_table_bytes = 0u;
size_t sro_offset_bytes = 0u;
@@ -2657,7 +2529,6 @@ class ImageDumper {
"object_bytes = %8zd (%2.0f%% of art file bytes)\n"
"art_field_bytes = %8zd (%2.0f%% of art file bytes)\n"
"art_method_bytes = %8zd (%2.0f%% of art file bytes)\n"
- "dex_cache_arrays_bytes = %8zd (%2.0f%% of art file bytes)\n"
"interned_string_bytes = %8zd (%2.0f%% of art file bytes)\n"
"class_table_bytes = %8zd (%2.0f%% of art file bytes)\n"
"sro_bytes = %8zd (%2.0f%% of art file bytes)\n"
@@ -2668,8 +2539,6 @@ class ImageDumper {
object_bytes, PercentOfFileBytes(object_bytes),
art_field_bytes, PercentOfFileBytes(art_field_bytes),
art_method_bytes, PercentOfFileBytes(art_method_bytes),
- dex_cache_arrays_bytes,
- PercentOfFileBytes(dex_cache_arrays_bytes),
interned_strings_bytes,
PercentOfFileBytes(interned_strings_bytes),
class_table_bytes, PercentOfFileBytes(class_table_bytes),
@@ -2678,10 +2547,6 @@ class ImageDumper {
bitmap_bytes, PercentOfFileBytes(bitmap_bytes),
alignment_bytes, PercentOfFileBytes(alignment_bytes))
<< std::flush;
- CHECK_EQ(file_bytes,
- header_bytes + object_bytes + art_field_bytes + art_method_bytes +
- dex_cache_arrays_bytes + interned_strings_bytes + class_table_bytes +
- sro_offset_bytes + metadata_bytes + bitmap_bytes + alignment_bytes);
}
os << "object_bytes breakdown:\n";
@@ -2760,7 +2625,6 @@ class ImageDumper {
const ImageHeader& image_header_;
std::unique_ptr<OatDumper> oat_dumper_;
OatDumperOptions* oat_dumper_options_;
- std::set<mirror::Object*> dex_caches_;
DISALLOW_COPY_AND_ASSIGN(ImageDumper);
};
diff --git a/oatdump/oatdump_test.h b/oatdump/oatdump_test.h
index 6f4616e8ba..ece7128240 100644
--- a/oatdump/oatdump_test.h
+++ b/oatdump/oatdump_test.h
@@ -342,8 +342,11 @@ class OatDumpTest : public CommonRuntimeTest {
// Avoid crash as valid exit.
return ::testing::AssertionSuccess();
}
- return ::testing::AssertionFailure() << "Did not terminate successfully: " << res.status_code
- << " " << error_buf.data();
+ std::ostringstream cmd;
+ std::copy(exec_argv.begin(), exec_argv.end(), std::ostream_iterator<std::string>(cmd, " "));
+ LOG(ERROR) << "Output: " << error_buf.data(); // Output first as it might be extremely long.
+ LOG(ERROR) << "Failed command: " << cmd.str(); // Useful to reproduce the failure separately.
+ return ::testing::AssertionFailure() << "Did not terminate successfully: " << res.status_code;
} else if (expect_failure) {
return ::testing::AssertionFailure() << "Expected failure";
}
diff --git a/openjdkjvmti/ti_redefine.cc b/openjdkjvmti/ti_redefine.cc
index afaea6279d..58c1c4e926 100644
--- a/openjdkjvmti/ti_redefine.cc
+++ b/openjdkjvmti/ti_redefine.cc
@@ -758,13 +758,10 @@ art::mirror::DexCache* Redefiner::ClassRedefinition::CreateNewDexCache(
return nullptr;
}
art::WriterMutexLock mu(driver_->self_, *art::Locks::dex_lock_);
- art::mirror::DexCache::InitializeDexCache(driver_->self_,
- cache.Get(),
- location.Get(),
- dex_file_.get(),
- loader.IsNull() ? driver_->runtime_->GetLinearAlloc()
- : loader->GetAllocator(),
- art::kRuntimePointerSize);
+ cache->SetLocation(location.Get());
+ cache->InitializeNativeFields(dex_file_.get(),
+ loader.IsNull() ? driver_->runtime_->GetLinearAlloc()
+ : loader->GetAllocator());
return cache.Get();
}
diff --git a/openjdkjvmti/transform.cc b/openjdkjvmti/transform.cc
index 715a98c932..011bd717b4 100644
--- a/openjdkjvmti/transform.cc
+++ b/openjdkjvmti/transform.cc
@@ -68,7 +68,6 @@
#include "ti_redefine.h"
#include "ti_logging.h"
#include "transform.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
namespace openjdkjvmti {
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index b03755c810..ee64eda7bc 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -143,7 +143,6 @@
#include "thread_list.h"
#include "trace.h"
#include "transaction.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
#include "verifier/class_verifier.h"
#include "well_known_classes.h"
@@ -1518,7 +1517,6 @@ size_t CountInternedStringReferences(gc::space::ImageSpace& space,
template <typename Visitor>
static void VisitInternedStringReferences(
gc::space::ImageSpace* space,
- bool use_preresolved_strings,
const Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
const uint8_t* target_base = space->Begin();
const ImageSection& sro_section =
@@ -1535,75 +1533,26 @@ static void VisitInternedStringReferences(
for (size_t offset_index = 0; offset_index < num_string_offsets; ++offset_index) {
uint32_t base_offset = sro_base[offset_index].first;
- if (HasDexCacheStringNativeRefTag(base_offset)) {
- base_offset = ClearDexCacheNativeRefTags(base_offset);
- DCHECK_ALIGNED(base_offset, 2);
-
- ObjPtr<mirror::DexCache> dex_cache =
- reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
- uint32_t string_slot_index = sro_base[offset_index].second;
-
- mirror::StringDexCachePair source =
- dex_cache->GetStrings()[string_slot_index].load(std::memory_order_relaxed);
- ObjPtr<mirror::String> referred_string = source.object.Read();
- DCHECK(referred_string != nullptr);
-
- ObjPtr<mirror::String> visited = visitor(referred_string);
- if (visited != referred_string) {
- // Because we are not using a helper function we need to mark the GC card manually.
- WriteBarrier::ForEveryFieldWrite(dex_cache);
- dex_cache->GetStrings()[string_slot_index].store(
- mirror::StringDexCachePair(visited, source.index), std::memory_order_relaxed);
- }
- } else if (HasDexCachePreResolvedStringNativeRefTag(base_offset)) {
- if (use_preresolved_strings) {
- base_offset = ClearDexCacheNativeRefTags(base_offset);
- DCHECK_ALIGNED(base_offset, 2);
-
- ObjPtr<mirror::DexCache> dex_cache =
- reinterpret_cast<mirror::DexCache*>(space->Begin() + base_offset);
- uint32_t string_index = sro_base[offset_index].second;
-
- GcRoot<mirror::String>* preresolved_strings =
- dex_cache->GetPreResolvedStrings();
- // Handle calls to ClearPreResolvedStrings that might occur concurrently by the profile
- // saver that runs shortly after startup. In case the strings are cleared, there is nothing
- // to fix up.
- if (preresolved_strings != nullptr) {
- ObjPtr<mirror::String> referred_string =
- preresolved_strings[string_index].Read();
- if (referred_string != nullptr) {
- ObjPtr<mirror::String> visited = visitor(referred_string);
- if (visited != referred_string) {
- // Because we are not using a helper function we need to mark the GC card manually.
- WriteBarrier::ForEveryFieldWrite(dex_cache);
- preresolved_strings[string_index] = GcRoot<mirror::String>(visited);
- }
- }
- }
- }
- } else {
- uint32_t raw_member_offset = sro_base[offset_index].second;
- DCHECK_ALIGNED(base_offset, 2);
- DCHECK_ALIGNED(raw_member_offset, 2);
-
- ObjPtr<mirror::Object> obj_ptr =
- reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
- MemberOffset member_offset(raw_member_offset);
- ObjPtr<mirror::String> referred_string =
- obj_ptr->GetFieldObject<mirror::String,
- kVerifyNone,
- kWithoutReadBarrier,
- /* kIsVolatile= */ false>(member_offset);
- DCHECK(referred_string != nullptr);
-
- ObjPtr<mirror::String> visited = visitor(referred_string);
- if (visited != referred_string) {
- obj_ptr->SetFieldObject</* kTransactionActive= */ false,
- /* kCheckTransaction= */ false,
+ uint32_t raw_member_offset = sro_base[offset_index].second;
+ DCHECK_ALIGNED(base_offset, 2);
+ DCHECK_ALIGNED(raw_member_offset, 2);
+
+ ObjPtr<mirror::Object> obj_ptr =
+ reinterpret_cast<mirror::Object*>(space->Begin() + base_offset);
+ MemberOffset member_offset(raw_member_offset);
+ ObjPtr<mirror::String> referred_string =
+ obj_ptr->GetFieldObject<mirror::String,
kVerifyNone,
- /* kIsVolatile= */ false>(member_offset, visited);
- }
+ kWithoutReadBarrier,
+ /* kIsVolatile= */ false>(member_offset);
+ DCHECK(referred_string != nullptr);
+
+ ObjPtr<mirror::String> visited = visitor(referred_string);
+ if (visited != referred_string) {
+ obj_ptr->SetFieldObject</* kTransactionActive= */ false,
+ /* kCheckTransaction= */ false,
+ kVerifyNone,
+ /* kIsVolatile= */ false>(member_offset, visited);
}
}
}
@@ -1621,7 +1570,6 @@ static void VerifyInternedStringReferences(gc::space::ImageSpace* space)
size_t num_recorded_refs = 0u;
VisitInternedStringReferences(
space,
- /*use_preresolved_strings=*/ true,
[&image_interns, &num_recorded_refs](ObjPtr<mirror::String> str)
REQUIRES_SHARED(Locks::mutator_lock_) {
auto it = image_interns.find(GcRoot<mirror::String>(str));
@@ -1643,8 +1591,7 @@ class AppImageLoadingHelper {
ClassLinker* class_linker,
gc::space::ImageSpace* space,
Handle<mirror::ClassLoader> class_loader,
- Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
- ClassTable::ClassSet* new_class_set)
+ Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1656,8 +1603,7 @@ void AppImageLoadingHelper::Update(
ClassLinker* class_linker,
gc::space::ImageSpace* space,
Handle<mirror::ClassLoader> class_loader,
- Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches,
- ClassTable::ClassSet* new_class_set)
+ Handle<mirror::ObjectArray<mirror::DexCache>> dex_caches)
REQUIRES(!Locks::dex_lock_)
REQUIRES_SHARED(Locks::mutator_lock_) {
ScopedTrace app_image_timing("AppImage:Updating");
@@ -1672,7 +1618,6 @@ void AppImageLoadingHelper::Update(
Runtime* const runtime = Runtime::Current();
gc::Heap* const heap = runtime->GetHeap();
const ImageHeader& header = space->GetImageHeader();
- bool load_app_image_startup_cache = runtime->LoadAppImageStartupCache();
{
// Register dex caches with the class loader.
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
@@ -1683,56 +1628,6 @@ void AppImageLoadingHelper::Update(
CHECK(class_linker->FindDexCacheDataLocked(*dex_file) == nullptr);
class_linker->RegisterDexFileLocked(*dex_file, dex_cache, class_loader.Get());
}
-
- if (!load_app_image_startup_cache) {
- dex_cache->ClearPreResolvedStrings();
- }
-
- if (kIsDebugBuild) {
- CHECK(new_class_set != nullptr);
- mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
- const size_t num_types = dex_cache->NumResolvedTypes();
- for (size_t j = 0; j != num_types; ++j) {
- // The image space is not yet added to the heap, avoid read barriers.
- ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
-
- if (space->HasAddress(klass.Ptr())) {
- DCHECK(!klass->IsErroneous()) << klass->GetStatus();
- auto it = new_class_set->find(ClassTable::TableSlot(klass));
- DCHECK(it != new_class_set->end());
- DCHECK_EQ(it->Read(), klass);
- ObjPtr<mirror::Class> super_class = klass->GetSuperClass();
-
- if (super_class != nullptr && !heap->ObjectIsInBootImageSpace(super_class)) {
- auto it2 = new_class_set->find(ClassTable::TableSlot(super_class));
- DCHECK(it2 != new_class_set->end());
- DCHECK_EQ(it2->Read(), super_class);
- }
-
- for (ArtMethod& m : klass->GetDirectMethods(kRuntimePointerSize)) {
- const void* code = m.GetEntryPointFromQuickCompiledCode();
- const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
- if (!class_linker->IsQuickResolutionStub(code) &&
- !class_linker->IsQuickGenericJniStub(code) &&
- !class_linker->IsQuickToInterpreterBridge(code) &&
- !m.IsNative()) {
- DCHECK_EQ(code, oat_code) << m.PrettyMethod();
- }
- }
-
- for (ArtMethod& m : klass->GetVirtualMethods(kRuntimePointerSize)) {
- const void* code = m.GetEntryPointFromQuickCompiledCode();
- const void* oat_code = m.IsInvokable() ? class_linker->GetQuickOatCodeFor(&m) : code;
- if (!class_linker->IsQuickResolutionStub(code) &&
- !class_linker->IsQuickGenericJniStub(code) &&
- !class_linker->IsQuickToInterpreterBridge(code) &&
- !m.IsNative()) {
- DCHECK_EQ(code, oat_code) << m.PrettyMethod();
- }
- }
- }
- }
- }
}
}
@@ -1762,8 +1657,6 @@ void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space)
Runtime* const runtime = Runtime::Current();
InternTable* const intern_table = runtime->GetInternTable();
- const bool load_startup_cache = runtime->LoadAppImageStartupCache();
-
// Add the intern table, removing any conflicts. For conflicts, store the new address in a map
// for faster lookup.
// TODO: Optimize with a bitmap or bloom filter
@@ -1817,7 +1710,6 @@ void AppImageLoadingHelper::HandleAppImageStrings(gc::space::ImageSpace* space)
VLOG(image) << "AppImage:conflictingInternStrings = " << intern_remap.size();
VisitInternedStringReferences(
space,
- load_startup_cache,
[&intern_remap](ObjPtr<mirror::String> str) REQUIRES_SHARED(Locks::mutator_lock_) {
auto it = intern_remap.find(str.Ptr());
if (it != intern_remap.end()) {
@@ -1931,15 +1823,6 @@ class ImageChecker final {
heap->VisitObjects(visitor);
}
- static void CheckArtMethodDexCacheArray(gc::Heap* heap,
- ClassLinker* class_linker,
- mirror::MethodDexCacheType* arr,
- size_t size)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- ImageChecker ic(heap, class_linker);
- ic.CheckArtMethodDexCacheArray(arr, size);
- }
-
private:
ImageChecker(gc::Heap* heap, ClassLinker* class_linker)
: spaces_(heap->GetBootImageSpaces()),
@@ -1992,30 +1875,6 @@ class ImageChecker final {
}
}
- void CheckArtMethodDexCacheArray(mirror::MethodDexCacheType* arr, size_t size)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- CHECK_EQ(arr != nullptr, size != 0u);
- if (arr != nullptr) {
- bool contains = false;
- for (auto space : spaces_) {
- auto offset = reinterpret_cast<uint8_t*>(arr) - space->Begin();
- if (space->GetImageHeader().GetDexCacheArraysSection().Contains(offset)) {
- contains = true;
- break;
- }
- }
- CHECK(contains);
- }
- for (size_t j = 0; j < size; ++j) {
- auto pair = mirror::DexCache::GetNativePairPtrSize(arr, j, pointer_size_);
- ArtMethod* method = pair.object;
- // expected_class == null means we are a dex cache.
- if (method != nullptr) {
- CheckArtMethod(method, nullptr);
- }
- }
- }
-
const std::vector<gc::space::ImageSpace*>& spaces_;
const PointerSize pointer_size_;
@@ -2027,8 +1886,8 @@ class ImageChecker final {
static void VerifyAppImage(const ImageHeader& header,
const Handle<mirror::ClassLoader>& class_loader,
- const Handle<mirror::ObjectArray<mirror::DexCache> >& dex_caches,
- ClassTable* class_table, gc::space::ImageSpace* space)
+ ClassTable* class_table,
+ gc::space::ImageSpace* space)
REQUIRES_SHARED(Locks::mutator_lock_) {
header.VisitPackedArtMethods([&](ArtMethod& method) REQUIRES_SHARED(Locks::mutator_lock_) {
ObjPtr<mirror::Class> klass = method.GetDeclaringClass();
@@ -2056,17 +1915,6 @@ static void VerifyAppImage(const ImageHeader& header,
}
}
}
- // Check that all non-primitive classes in dex caches are also in the class table.
- for (auto dex_cache : dex_caches.ConstIterate<mirror::DexCache>()) {
- mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
- for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
- ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
- if (klass != nullptr && !klass->IsPrimitive()) {
- CHECK(class_table->Contains(klass))
- << klass->PrettyDescriptor() << " " << dex_cache->GetDexFile()->GetLocation();
- }
- }
- }
}
bool ClassLinker::AddImageSpace(
@@ -2138,24 +1986,15 @@ bool ClassLinker::AddImageSpace(
return false;
}
- if (app_image) {
- // The current dex file field is bogus, overwrite it so that we can get the dex file in the
- // loop below.
- dex_cache->SetDexFile(dex_file.get());
- mirror::TypeDexCacheType* const types = dex_cache->GetResolvedTypes();
- for (int32_t j = 0, num_types = dex_cache->NumResolvedTypes(); j < num_types; j++) {
- ObjPtr<mirror::Class> klass = types[j].load(std::memory_order_relaxed).object.Read();
- if (klass != nullptr) {
- DCHECK(!klass->IsErroneous()) << klass->GetStatus();
- }
- }
- } else {
- if (kCheckImageObjects) {
- ImageChecker::CheckArtMethodDexCacheArray(heap,
- this,
- dex_cache->GetResolvedMethods(),
- dex_cache->NumResolvedMethods());
- }
+ LinearAlloc* linear_alloc = GetOrCreateAllocatorForClassLoader(class_loader.Get());
+ DCHECK(linear_alloc != nullptr);
+ DCHECK_EQ(linear_alloc == Runtime::Current()->GetLinearAlloc(), !app_image);
+ {
+ // Native fields are all null. Initialize them and allocate native memory.
+ WriterMutexLock mu(self, *Locks::dex_lock_);
+ dex_cache->InitializeNativeFields(dex_file.get(), linear_alloc);
+ }
+ if (!app_image) {
// Register dex files, keep track of existing ones that are conflicts.
AppendToBootClassPath(dex_file.get(), dex_cache);
}
@@ -2172,14 +2011,6 @@ bool ClassLinker::AddImageSpace(
}
if (kCheckImageObjects) {
- for (auto dex_cache : dex_caches.Iterate<mirror::DexCache>()) {
- for (size_t j = 0; j < dex_cache->NumResolvedFields(); ++j) {
- auto* field = dex_cache->GetResolvedField(j, image_pointer_size_);
- if (field != nullptr) {
- CHECK(field->GetDeclaringClass()->GetClass() != nullptr);
- }
- }
- }
if (!app_image) {
ImageChecker::CheckObjects(heap, this);
}
@@ -2244,7 +2075,7 @@ bool ClassLinker::AddImageSpace(
VLOG(image) << "Adding class table classes took " << PrettyDuration(NanoTime() - start_time2);
}
if (app_image) {
- AppImageLoadingHelper::Update(this, space, class_loader, dex_caches, &temp_set);
+ AppImageLoadingHelper::Update(this, space, class_loader, dex_caches);
{
ScopedTrace trace("AppImage:UpdateClassLoaders");
@@ -2297,7 +2128,7 @@ bool ClassLinker::AddImageSpace(
// This verification needs to happen after the classes have been added to the class loader.
// Since it ensures classes are in the class table.
ScopedTrace trace("AppImage:Verify");
- VerifyAppImage(header, class_loader, dex_caches, class_table, space);
+ VerifyAppImage(header, class_loader, class_table, space);
}
VLOG(class_linker) << "Adding image space took " << PrettyDuration(NanoTime() - start_time);
@@ -2596,11 +2427,8 @@ ObjPtr<mirror::PointerArray> ClassLinker::AllocPointerArray(Thread* self, size_t
: ObjPtr<mirror::Array>(mirror::IntArray::Alloc(self, length)));
}
-ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(/*out*/ ObjPtr<mirror::String>* out_location,
- Thread* self,
- const DexFile& dex_file) {
+ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(Thread* self, const DexFile& dex_file) {
StackHandleScope<1> hs(self);
- DCHECK(out_location != nullptr);
auto dex_cache(hs.NewHandle(ObjPtr<mirror::DexCache>::DownCast(
GetClassRoot<mirror::DexCache>(this)->AllocObject(self))));
if (dex_cache == nullptr) {
@@ -2614,24 +2442,17 @@ ObjPtr<mirror::DexCache> ClassLinker::AllocDexCache(/*out*/ ObjPtr<mirror::Strin
self->AssertPendingOOMException();
return nullptr;
}
- *out_location = location;
+ dex_cache->SetLocation(location);
return dex_cache.Get();
}
ObjPtr<mirror::DexCache> ClassLinker::AllocAndInitializeDexCache(Thread* self,
const DexFile& dex_file,
LinearAlloc* linear_alloc) {
- ObjPtr<mirror::String> location = nullptr;
- ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(&location, self, dex_file);
+ ObjPtr<mirror::DexCache> dex_cache = AllocDexCache(self, dex_file);
if (dex_cache != nullptr) {
WriterMutexLock mu(self, *Locks::dex_lock_);
- DCHECK(location != nullptr);
- mirror::DexCache::InitializeDexCache(self,
- dex_cache,
- location,
- &dex_file,
- linear_alloc,
- image_pointer_size_);
+ dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
}
return dex_cache;
}
@@ -4073,6 +3894,7 @@ void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
Thread* const self = Thread::Current();
Locks::dex_lock_->AssertExclusiveHeld(self);
CHECK(dex_cache != nullptr) << dex_file.GetLocation();
+ CHECK_EQ(dex_cache->GetDexFile(), &dex_file) << dex_file.GetLocation();
// For app images, the dex cache location may be a suffix of the dex file location since the
// dex file location is an absolute path.
const std::string dex_cache_location = dex_cache->GetLocation()->ToModifiedUtf8();
@@ -4119,7 +3941,6 @@ void ClassLinker::RegisterDexFileLocked(const DexFile& dex_file,
hiddenapi::InitializeDexFileDomain(dex_file, class_loader);
jweak dex_cache_jweak = vm->AddWeakGlobalRef(self, dex_cache);
- dex_cache->SetDexFile(&dex_file);
DexCacheData data;
data.weak_root = dex_cache_jweak;
data.dex_file = dex_cache->GetDexFile();
@@ -4233,11 +4054,7 @@ ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
// get to a suspend point.
StackHandleScope<3> hs(self);
Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(class_loader));
- ObjPtr<mirror::String> location;
- Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(/*out*/&location,
- self,
- dex_file)));
- Handle<mirror::String> h_location(hs.NewHandle(location));
+ Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(AllocDexCache(self, dex_file)));
{
// Avoid a deadlock between a garbage collecting thread running a checkpoint,
// a thread holding the dex lock and blocking on a condition variable regarding
@@ -4247,15 +4064,10 @@ ObjPtr<mirror::DexCache> ClassLinker::RegisterDexFile(const DexFile& dex_file,
const DexCacheData* old_data = FindDexCacheDataLocked(dex_file);
old_dex_cache = DecodeDexCacheLocked(self, old_data);
if (old_dex_cache == nullptr && h_dex_cache != nullptr) {
- // Do InitializeDexCache while holding dex lock to make sure two threads don't call it at the
- // same time with the same dex cache. Since the .bss is shared this can cause failing DCHECK
- // that the arrays are null.
- mirror::DexCache::InitializeDexCache(self,
- h_dex_cache.Get(),
- h_location.Get(),
- &dex_file,
- linear_alloc,
- image_pointer_size_);
+ // Do InitializeNativeFields while holding dex lock to make sure two threads don't call it
+ // at the same time with the same dex cache. Since the .bss is shared this can cause failing
+ // DCHECK that the arrays are null.
+ h_dex_cache->InitializeNativeFields(&dex_file, linear_alloc);
RegisterDexFileLocked(dex_file, h_dex_cache.Get(), h_class_loader.Get());
}
if (old_dex_cache != nullptr) {
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 33cd2f9746..df9c20936f 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -926,9 +926,7 @@ class ClassLinker {
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
- ObjPtr<mirror::DexCache> AllocDexCache(/*out*/ ObjPtr<mirror::String>* out_location,
- Thread* self,
- const DexFile& dex_file)
+ ObjPtr<mirror::DexCache> AllocDexCache(Thread* self, const DexFile& dex_file)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Roles::uninterruptible_);
diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc
index ef851910bf..c677601c0d 100644
--- a/runtime/class_linker_test.cc
+++ b/runtime/class_linker_test.cc
@@ -1528,13 +1528,10 @@ TEST_F(ClassLinkerTest, RegisterDexFileName) {
}
ASSERT_TRUE(dex_cache != nullptr);
}
- // Make a copy of the dex cache and change the name.
- dex_cache.Assign(mirror::Object::Clone(dex_cache, soa.Self())->AsDexCache());
const uint16_t data[] = { 0x20AC, 0x20A1 };
Handle<mirror::String> location(hs.NewHandle(mirror::String::AllocFromUtf16(soa.Self(),
arraysize(data),
data)));
- dex_cache->SetLocation(location.Get());
const DexFile* old_dex_file = dex_cache->GetDexFile();
std::unique_ptr<DexFile> dex_file(new StandardDexFile(old_dex_file->Begin(),
@@ -1543,6 +1540,10 @@ TEST_F(ClassLinkerTest, RegisterDexFileName) {
0u,
nullptr,
nullptr));
+ // Make a copy of the dex cache with changed name.
+ LinearAlloc* alloc = Runtime::Current()->GetLinearAlloc();
+ dex_cache.Assign(class_linker->AllocAndInitializeDexCache(Thread::Current(), *dex_file, alloc));
+ DCHECK_EQ(dex_cache->GetLocation()->CompareTo(location.Get()), 0);
{
WriterMutexLock mu(soa.Self(), *Locks::dex_lock_);
// Check that inserting with a UTF16 name works.
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index a76e3667b5..99aee0b10b 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -411,35 +411,6 @@ class ImageSpace::PatchObjectVisitor final {
const {}
void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
- void VisitDexCacheArrays(ObjPtr<mirror::DexCache> dex_cache)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- ScopedTrace st("VisitDexCacheArrays");
- FixupDexCacheArray<mirror::StringDexCacheType>(dex_cache,
- mirror::DexCache::StringsOffset(),
- dex_cache->NumStrings<kVerifyNone>());
- FixupDexCacheArray<mirror::TypeDexCacheType>(dex_cache,
- mirror::DexCache::ResolvedTypesOffset(),
- dex_cache->NumResolvedTypes<kVerifyNone>());
- FixupDexCacheArray<mirror::MethodDexCacheType>(dex_cache,
- mirror::DexCache::ResolvedMethodsOffset(),
- dex_cache->NumResolvedMethods<kVerifyNone>());
- FixupDexCacheArray<mirror::FieldDexCacheType>(dex_cache,
- mirror::DexCache::ResolvedFieldsOffset(),
- dex_cache->NumResolvedFields<kVerifyNone>());
- FixupDexCacheArray<mirror::MethodTypeDexCacheType>(
- dex_cache,
- mirror::DexCache::ResolvedMethodTypesOffset(),
- dex_cache->NumResolvedMethodTypes<kVerifyNone>());
- FixupDexCacheArray<GcRoot<mirror::CallSite>>(
- dex_cache,
- mirror::DexCache::ResolvedCallSitesOffset(),
- dex_cache->NumResolvedCallSites<kVerifyNone>());
- FixupDexCacheArray<GcRoot<mirror::String>>(
- dex_cache,
- mirror::DexCache::PreResolvedStringsOffset(),
- dex_cache->NumPreResolvedStrings<kVerifyNone>());
- }
-
template <bool kMayBeNull = true, typename T>
ALWAYS_INLINE void PatchGcRoot(/*inout*/GcRoot<T>* root) const
REQUIRES_SHARED(Locks::mutator_lock_) {
@@ -486,54 +457,6 @@ class ImageSpace::PatchObjectVisitor final {
}
}
- template <typename T>
- void FixupDexCacheArrayEntry(std::atomic<mirror::DexCachePair<T>>* array, uint32_t index)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- static_assert(sizeof(std::atomic<mirror::DexCachePair<T>>) == sizeof(mirror::DexCachePair<T>),
- "Size check for removing std::atomic<>.");
- PatchGcRoot(&(reinterpret_cast<mirror::DexCachePair<T>*>(array)[index].object));
- }
-
- template <typename T>
- void FixupDexCacheArrayEntry(std::atomic<mirror::NativeDexCachePair<T>>* array, uint32_t index)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- static_assert(sizeof(std::atomic<mirror::NativeDexCachePair<T>>) ==
- sizeof(mirror::NativeDexCachePair<T>),
- "Size check for removing std::atomic<>.");
- mirror::NativeDexCachePair<T> pair =
- mirror::DexCache::GetNativePairPtrSize(array, index, kPointerSize);
- if (pair.object != nullptr) {
- pair.object = native_visitor_(pair.object);
- mirror::DexCache::SetNativePairPtrSize(array, index, pair, kPointerSize);
- }
- }
-
- void FixupDexCacheArrayEntry(GcRoot<mirror::CallSite>* array, uint32_t index)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- PatchGcRoot(&array[index]);
- }
-
- void FixupDexCacheArrayEntry(GcRoot<mirror::String>* array, uint32_t index)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- PatchGcRoot(&array[index]);
- }
-
- template <typename EntryType>
- void FixupDexCacheArray(ObjPtr<mirror::DexCache> dex_cache,
- MemberOffset array_offset,
- uint32_t size) REQUIRES_SHARED(Locks::mutator_lock_) {
- EntryType* old_array =
- reinterpret_cast64<EntryType*>(dex_cache->GetField64<kVerifyNone>(array_offset));
- DCHECK_EQ(old_array != nullptr, size != 0u);
- if (old_array != nullptr) {
- EntryType* new_array = native_visitor_(old_array);
- dex_cache->SetField64<kVerifyNone>(array_offset, reinterpret_cast64<uint64_t>(new_array));
- for (uint32_t i = 0; i != size; ++i) {
- FixupDexCacheArrayEntry(new_array, i);
- }
- }
- }
-
private:
// Heap objects visitor.
HeapVisitor heap_visitor_;
@@ -1399,15 +1322,6 @@ class ImageSpace::Loader {
image_header->RelocateImageReferences(app_image_objects.Delta());
image_header->RelocateBootImageReferences(boot_image.Delta());
CHECK_EQ(image_header->GetImageBegin(), target_base);
- // Fix up dex cache DexFile pointers.
- ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
- image_header->GetImageRoot<kWithoutReadBarrier>(ImageHeader::kDexCaches)
- ->AsObjectArray<mirror::DexCache, kVerifyNone>();
- for (int32_t i = 0, count = dex_caches->GetLength(); i < count; ++i) {
- ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get<kVerifyNone, kWithoutReadBarrier>(i);
- CHECK(dex_cache != nullptr);
- patch_object_visitor.VisitDexCacheArrays(dex_cache);
- }
}
{
// Only touches objects in the app image, no need for mutator lock.
@@ -2835,12 +2749,7 @@ class ImageSpace::BootImageLoader {
// This is the last pass over objects, so we do not need to Set().
main_patch_object_visitor.VisitObject(object);
ObjPtr<mirror::Class> klass = object->GetClass<kVerifyNone, kWithoutReadBarrier>();
- if (klass->IsDexCacheClass<kVerifyNone>()) {
- // Patch dex cache array pointers and elements.
- ObjPtr<mirror::DexCache> dex_cache =
- object->AsDexCache<kVerifyNone, kWithoutReadBarrier>();
- main_patch_object_visitor.VisitDexCacheArrays(dex_cache);
- } else if (klass == method_class || klass == constructor_class) {
+ if (klass == method_class || klass == constructor_class) {
// Patch the ArtMethod* in the mirror::Executable subobject.
ObjPtr<mirror::Executable> as_executable =
ObjPtr<mirror::Executable>::DownCast(object);
@@ -3919,39 +3828,14 @@ void ImageSpace::DumpSections(std::ostream& os) const {
}
}
-void ImageSpace::DisablePreResolvedStrings() {
- // Clear dex cache pointers.
- ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
- GetImageHeader().GetImageRoot(ImageHeader::kDexCaches)->AsObjectArray<mirror::DexCache>();
- for (size_t len = dex_caches->GetLength(), i = 0; i < len; ++i) {
- ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
- dex_cache->ClearPreResolvedStrings();
- }
-}
-
void ImageSpace::ReleaseMetadata() {
const ImageSection& metadata = GetImageHeader().GetMetadataSection();
VLOG(image) << "Releasing " << metadata.Size() << " image metadata bytes";
- // In the case where new app images may have been added around the checkpoint, ensure that we
- // don't madvise the cache for these.
- ObjPtr<mirror::ObjectArray<mirror::DexCache>> dex_caches =
- GetImageHeader().GetImageRoot(ImageHeader::kDexCaches)->AsObjectArray<mirror::DexCache>();
- bool have_startup_cache = false;
- for (size_t len = dex_caches->GetLength(), i = 0; i < len; ++i) {
- ObjPtr<mirror::DexCache> dex_cache = dex_caches->Get(i);
- if (dex_cache->NumPreResolvedStrings() != 0u) {
- have_startup_cache = true;
- }
- }
- // Only safe to do for images that have their preresolved strings caches disabled. This is because
- // uncompressed images madvise to the original unrelocated image contents.
- if (!have_startup_cache) {
- // Avoid using ZeroAndReleasePages since the zero fill might not be word atomic.
- uint8_t* const page_begin = AlignUp(Begin() + metadata.Offset(), kPageSize);
- uint8_t* const page_end = AlignDown(Begin() + metadata.End(), kPageSize);
- if (page_begin < page_end) {
- CHECK_NE(madvise(page_begin, page_end - page_begin, MADV_DONTNEED), -1) << "madvise failed";
- }
+ // Avoid using ZeroAndReleasePages since the zero fill might not be word atomic.
+ uint8_t* const page_begin = AlignUp(Begin() + metadata.Offset(), kPageSize);
+ uint8_t* const page_end = AlignDown(Begin() + metadata.End(), kPageSize);
+ if (page_begin < page_end) {
+ CHECK_NE(madvise(page_begin, page_end - page_begin, MADV_DONTNEED), -1) << "madvise failed";
}
}
diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h
index 81ae724ec4..36889fe183 100644
--- a/runtime/gc/space/image_space.h
+++ b/runtime/gc/space/image_space.h
@@ -273,7 +273,6 @@ class ImageSpace : public MemMapSpace {
// De-initialize the image-space by undoing the effects in Init().
virtual ~ImageSpace();
- void DisablePreResolvedStrings() REQUIRES_SHARED(Locks::mutator_lock_);
void ReleaseMetadata() REQUIRES_SHARED(Locks::mutator_lock_);
protected:
diff --git a/runtime/image.cc b/runtime/image.cc
index d91106a8ac..6f88481f89 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -29,7 +29,7 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '8', '7', '\0' }; // Long.divideUnsigned
+const uint8_t ImageHeader::kImageVersion[] = { '0', '8', '8', '\0' }; // Remove DexCache arrays.
ImageHeader::ImageHeader(uint32_t image_reservation_size,
uint32_t component_count,
diff --git a/runtime/image.h b/runtime/image.h
index cdeb79b87f..61db627052 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -258,7 +258,6 @@ class PACKED(8) ImageHeader {
kSectionRuntimeMethods,
kSectionImTables,
kSectionIMTConflictTables,
- kSectionDexCacheArrays,
kSectionInternedStrings,
kSectionClassTable,
kSectionStringReferenceOffsets,
@@ -309,10 +308,6 @@ class PACKED(8) ImageHeader {
return GetImageSection(kSectionIMTConflictTables);
}
- const ImageSection& GetDexCacheArraysSection() const {
- return GetImageSection(kSectionDexCacheArrays);
- }
-
const ImageSection& GetInternedStringsSection() const {
return GetImageSection(kSectionInternedStrings);
}
@@ -509,76 +504,11 @@ class PACKED(8) ImageHeader {
* This type holds the information necessary to fix up AppImage string
* references.
*
- * The first element of the pair is an offset into the image space. If the
- * offset is tagged (testable using HasDexCacheNativeRefTag) it indicates the location
- * of a DexCache object that has one or more native references to managed
- * strings that need to be fixed up. In this case the second element has no
- * meaningful value.
- *
- * If the first element isn't tagged then it indicates the location of a
- * managed object with a field that needs fixing up. In this case the second
- * element of the pair is an object-relative offset to the field in question.
+ * The first element indicates the location of a managed object with a field that needs fixing up.
+ * The second element of the pair is an object-relative offset to the field in question.
*/
typedef std::pair<uint32_t, uint32_t> AppImageReferenceOffsetInfo;
-/*
- * Tags the last bit. Used by AppImage logic to differentiate between pointers
- * to managed objects and pointers to native reference arrays.
- */
-template<typename T>
-T SetDexCacheStringNativeRefTag(T val) {
- static_assert(std::is_integral<T>::value, "Expected integral type.");
-
- return val | 1u;
-}
-
-/*
- * Tags the second last bit. Used by AppImage logic to differentiate between pointers
- * to managed objects and pointers to native reference arrays.
- */
-template<typename T>
-T SetDexCachePreResolvedStringNativeRefTag(T val) {
- static_assert(std::is_integral<T>::value, "Expected integral type.");
-
- return val | 2u;
-}
-
-/*
- * Retrieves the value of the last bit. Used by AppImage logic to
- * differentiate between pointers to managed objects and pointers to native
- * reference arrays.
- */
-template<typename T>
-bool HasDexCacheStringNativeRefTag(T val) {
- static_assert(std::is_integral<T>::value, "Expected integral type.");
-
- return (val & 1u) != 0u;
-}
-
-/*
- * Retrieves the value of the second last bit. Used by AppImage logic to
- * differentiate between pointers to managed objects and pointers to native
- * reference arrays.
- */
-template<typename T>
-bool HasDexCachePreResolvedStringNativeRefTag(T val) {
- static_assert(std::is_integral<T>::value, "Expected integral type.");
-
- return (val & 2u) != 0u;
-}
-
-/*
- * Sets the last bit of the value to 0. Used by AppImage logic to
- * differentiate between pointers to managed objects and pointers to native
- * reference arrays.
- */
-template<typename T>
-T ClearDexCacheNativeRefTags(T val) {
- static_assert(std::is_integral<T>::value, "Expected integral type.");
-
- return val & ~3u;
-}
-
std::ostream& operator<<(std::ostream& os, ImageHeader::ImageMethod method);
std::ostream& operator<<(std::ostream& os, ImageHeader::ImageRoot root);
std::ostream& operator<<(std::ostream& os, ImageHeader::ImageSections section);
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 010c5a52e4..7736f47066 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -282,8 +282,8 @@ inline void DexCache::SetResolvedMethod(uint32_t method_idx,
template <typename T>
NativeDexCachePair<T> DexCache::GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
size_t idx,
- PointerSize ptr_size) {
- if (ptr_size == PointerSize::k64) {
+ PointerSize ptr_size ATTRIBUTE_UNUSED) {
+ if (kRuntimePointerSize == PointerSize::k64) {
auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
ConversionPair64 value = AtomicLoadRelaxed16B(&array[idx]);
return NativeDexCachePair<T>(reinterpret_cast64<T*>(value.first),
@@ -299,8 +299,8 @@ template <typename T>
void DexCache::SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
size_t idx,
NativeDexCachePair<T> pair,
- PointerSize ptr_size) {
- if (ptr_size == PointerSize::k64) {
+ PointerSize ptr_size ATTRIBUTE_UNUSED) {
+ if (kRuntimePointerSize == PointerSize::k64) {
auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
ConversionPair64 v(reinterpret_cast64<uint64_t>(pair.object), pair.index);
AtomicStoreRelease16B(&array[idx], v);
diff --git a/runtime/mirror/dex_cache.cc b/runtime/mirror/dex_cache.cc
index b7adcc2d78..20f4a40353 100644
--- a/runtime/mirror/dex_cache.cc
+++ b/runtime/mirror/dex_cache.cc
@@ -30,88 +30,54 @@
#include "runtime_globals.h"
#include "string.h"
#include "thread.h"
-#include "utils/dex_cache_arrays_layout-inl.h"
#include "write_barrier.h"
namespace art {
namespace mirror {
-void DexCache::InitializeDexCache(Thread* self,
- ObjPtr<mirror::DexCache> dex_cache,
- ObjPtr<mirror::String> location,
- const DexFile* dex_file,
- LinearAlloc* linear_alloc,
- PointerSize image_pointer_size) {
- DCHECK(dex_file != nullptr);
- ScopedAssertNoThreadSuspension sants(__FUNCTION__);
- DexCacheArraysLayout layout(image_pointer_size, dex_file);
- uint8_t* raw_arrays = nullptr;
-
- if (dex_file->NumStringIds() != 0u ||
- dex_file->NumTypeIds() != 0u ||
- dex_file->NumMethodIds() != 0u ||
- dex_file->NumFieldIds() != 0u) {
- static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8.");
- DCHECK(layout.Alignment() == 8u || layout.Alignment() == 16u);
- // Zero-initialized.
- raw_arrays = (layout.Alignment() == 16u)
- ? reinterpret_cast<uint8_t*>(linear_alloc->AllocAlign16(self, layout.Size()))
- : reinterpret_cast<uint8_t*>(linear_alloc->Alloc(self, layout.Size()));
- }
-
- StringDexCacheType* strings = (dex_file->NumStringIds() == 0u) ? nullptr :
- reinterpret_cast<StringDexCacheType*>(raw_arrays + layout.StringsOffset());
- TypeDexCacheType* types = (dex_file->NumTypeIds() == 0u) ? nullptr :
- reinterpret_cast<TypeDexCacheType*>(raw_arrays + layout.TypesOffset());
- MethodDexCacheType* methods = (dex_file->NumMethodIds() == 0u) ? nullptr :
- reinterpret_cast<MethodDexCacheType*>(raw_arrays + layout.MethodsOffset());
- FieldDexCacheType* fields = (dex_file->NumFieldIds() == 0u) ? nullptr :
- reinterpret_cast<FieldDexCacheType*>(raw_arrays + layout.FieldsOffset());
-
- size_t num_strings = kDexCacheStringCacheSize;
- if (dex_file->NumStringIds() < num_strings) {
- num_strings = dex_file->NumStringIds();
- }
- size_t num_types = kDexCacheTypeCacheSize;
- if (dex_file->NumTypeIds() < num_types) {
- num_types = dex_file->NumTypeIds();
- }
- size_t num_fields = kDexCacheFieldCacheSize;
- if (dex_file->NumFieldIds() < num_fields) {
- num_fields = dex_file->NumFieldIds();
- }
- size_t num_methods = kDexCacheMethodCacheSize;
- if (dex_file->NumMethodIds() < num_methods) {
- num_methods = dex_file->NumMethodIds();
+template<typename T>
+static T* AllocArray(Thread* self, LinearAlloc* alloc, size_t num) {
+ if (num == 0) {
+ return nullptr;
}
+ return reinterpret_cast<T*>(alloc->AllocAlign16(self, RoundUp(num * sizeof(T), 16)));
+}
- // Note that we allocate the method type dex caches regardless of this flag,
- // and we make sure here that they're not used by the runtime. This is in the
- // interest of simplicity and to avoid extensive compiler and layout class changes.
- //
- // If this needs to be mitigated in a production system running this code,
- // DexCache::kDexCacheMethodTypeCacheSize can be set to zero.
- MethodTypeDexCacheType* method_types = nullptr;
- size_t num_method_types = 0;
+void DexCache::InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc) {
+ DCHECK(GetDexFile() == nullptr);
+ DCHECK(GetStrings() == nullptr);
+ DCHECK(GetResolvedTypes() == nullptr);
+ DCHECK(GetResolvedMethods() == nullptr);
+ DCHECK(GetResolvedFields() == nullptr);
+ DCHECK(GetResolvedMethodTypes() == nullptr);
+ DCHECK(GetResolvedCallSites() == nullptr);
- if (dex_file->NumProtoIds() < kDexCacheMethodTypeCacheSize) {
- num_method_types = dex_file->NumProtoIds();
- } else {
- num_method_types = kDexCacheMethodTypeCacheSize;
- }
+ ScopedAssertNoThreadSuspension sants(__FUNCTION__);
+ Thread* self = Thread::Current();
+ const PointerSize image_pointer_size = kRuntimePointerSize;
- if (num_method_types > 0) {
- method_types = reinterpret_cast<MethodTypeDexCacheType*>(
- raw_arrays + layout.MethodTypesOffset());
- }
+ size_t num_strings = std::min<size_t>(kDexCacheStringCacheSize, dex_file->NumStringIds());
+ size_t num_types = std::min<size_t>(kDexCacheTypeCacheSize, dex_file->NumTypeIds());
+ size_t num_fields = std::min<size_t>(kDexCacheFieldCacheSize, dex_file->NumFieldIds());
+ size_t num_methods = std::min<size_t>(kDexCacheMethodCacheSize, dex_file->NumMethodIds());
+ size_t num_method_types = std::min<size_t>(kDexCacheMethodTypeCacheSize, dex_file->NumProtoIds());
+ size_t num_call_sites = dex_file->NumCallSiteIds(); // Full size.
- GcRoot<mirror::CallSite>* call_sites = (dex_file->NumCallSiteIds() == 0)
- ? nullptr
- : reinterpret_cast<GcRoot<CallSite>*>(raw_arrays + layout.CallSitesOffset());
+ static_assert(ArenaAllocator::kAlignment == 8, "Expecting arena alignment of 8.");
+ StringDexCacheType* strings =
+ AllocArray<StringDexCacheType>(self, linear_alloc, num_strings);
+ TypeDexCacheType* types =
+ AllocArray<TypeDexCacheType>(self, linear_alloc, num_types);
+ MethodDexCacheType* methods =
+ AllocArray<MethodDexCacheType>(self, linear_alloc, num_methods);
+ FieldDexCacheType* fields =
+ AllocArray<FieldDexCacheType>(self, linear_alloc, num_fields);
+ MethodTypeDexCacheType* method_types =
+ AllocArray<MethodTypeDexCacheType>(self, linear_alloc, num_method_types);
+ GcRoot<mirror::CallSite>* call_sites =
+ AllocArray<GcRoot<CallSite>>(self, linear_alloc, num_call_sites);
- DCHECK_ALIGNED(raw_arrays, alignof(StringDexCacheType)) <<
- "Expected raw_arrays to align to StringDexCacheType.";
- DCHECK_ALIGNED(layout.StringsOffset(), alignof(StringDexCacheType)) <<
+ DCHECK_ALIGNED(types, alignof(StringDexCacheType)) <<
"Expected StringsOffset() to align to StringDexCacheType.";
DCHECK_ALIGNED(strings, alignof(StringDexCacheType)) <<
"Expected strings to align to StringDexCacheType.";
@@ -158,9 +124,8 @@ void DexCache::InitializeDexCache(Thread* self,
if (method_types != nullptr) {
mirror::MethodTypeDexCachePair::Initialize(method_types);
}
- dex_cache->Init(dex_file,
- location,
- strings,
+ SetDexFile(dex_file);
+ SetNativeArrays(strings,
num_strings,
types,
num_types,
@@ -171,7 +136,12 @@ void DexCache::InitializeDexCache(Thread* self,
method_types,
num_method_types,
call_sites,
- dex_file->NumCallSiteIds());
+ num_call_sites);
+}
+
+void DexCache::ResetNativeFields() {
+ SetDexFile(nullptr);
+ SetNativeArrays(nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0, nullptr, 0);
}
void DexCache::VisitReflectiveTargets(ReflectiveValueVisitor* visitor) {
@@ -238,31 +208,24 @@ bool DexCache::AddPreResolvedStringsArray() {
return true;
}
-void DexCache::Init(const DexFile* dex_file,
- ObjPtr<String> location,
- StringDexCacheType* strings,
- uint32_t num_strings,
- TypeDexCacheType* resolved_types,
- uint32_t num_resolved_types,
- MethodDexCacheType* resolved_methods,
- uint32_t num_resolved_methods,
- FieldDexCacheType* resolved_fields,
- uint32_t num_resolved_fields,
- MethodTypeDexCacheType* resolved_method_types,
- uint32_t num_resolved_method_types,
- GcRoot<CallSite>* resolved_call_sites,
- uint32_t num_resolved_call_sites) {
- CHECK(dex_file != nullptr);
- CHECK(location != nullptr);
+void DexCache::SetNativeArrays(StringDexCacheType* strings,
+ uint32_t num_strings,
+ TypeDexCacheType* resolved_types,
+ uint32_t num_resolved_types,
+ MethodDexCacheType* resolved_methods,
+ uint32_t num_resolved_methods,
+ FieldDexCacheType* resolved_fields,
+ uint32_t num_resolved_fields,
+ MethodTypeDexCacheType* resolved_method_types,
+ uint32_t num_resolved_method_types,
+ GcRoot<CallSite>* resolved_call_sites,
+ uint32_t num_resolved_call_sites) {
CHECK_EQ(num_strings != 0u, strings != nullptr);
CHECK_EQ(num_resolved_types != 0u, resolved_types != nullptr);
CHECK_EQ(num_resolved_methods != 0u, resolved_methods != nullptr);
CHECK_EQ(num_resolved_fields != 0u, resolved_fields != nullptr);
CHECK_EQ(num_resolved_method_types != 0u, resolved_method_types != nullptr);
CHECK_EQ(num_resolved_call_sites != 0u, resolved_call_sites != nullptr);
-
- SetDexFile(dex_file);
- SetLocation(location);
SetStrings(strings);
SetResolvedTypes(resolved_types);
SetResolvedMethods(resolved_methods);
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index 80cca4e737..2a16879efa 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -186,15 +186,14 @@ class MANAGED DexCache final : public Object {
return sizeof(DexCache);
}
- static void InitializeDexCache(Thread* self,
- ObjPtr<mirror::DexCache> dex_cache,
- ObjPtr<mirror::String> location,
- const DexFile* dex_file,
- LinearAlloc* linear_alloc,
- PointerSize image_pointer_size)
+ // Initialize native fields and allocate memory.
+ void InitializeNativeFields(const DexFile* dex_file, LinearAlloc* linear_alloc)
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::dex_lock_);
+ // Clear all native fields.
+ void ResetNativeFields() REQUIRES_SHARED(Locks::mutator_lock_);
+
template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -479,20 +478,18 @@ class MANAGED DexCache final : public Object {
void SetClassLoader(ObjPtr<ClassLoader> class_loader) REQUIRES_SHARED(Locks::mutator_lock_);
private:
- void Init(const DexFile* dex_file,
- ObjPtr<String> location,
- StringDexCacheType* strings,
- uint32_t num_strings,
- TypeDexCacheType* resolved_types,
- uint32_t num_resolved_types,
- MethodDexCacheType* resolved_methods,
- uint32_t num_resolved_methods,
- FieldDexCacheType* resolved_fields,
- uint32_t num_resolved_fields,
- MethodTypeDexCacheType* resolved_method_types,
- uint32_t num_resolved_method_types,
- GcRoot<CallSite>* resolved_call_sites,
- uint32_t num_resolved_call_sites)
+ void SetNativeArrays(StringDexCacheType* strings,
+ uint32_t num_strings,
+ TypeDexCacheType* resolved_types,
+ uint32_t num_resolved_types,
+ MethodDexCacheType* resolved_methods,
+ uint32_t num_resolved_methods,
+ FieldDexCacheType* resolved_fields,
+ uint32_t num_resolved_fields,
+ MethodTypeDexCacheType* resolved_method_types,
+ uint32_t num_resolved_method_types,
+ GcRoot<CallSite>* resolved_call_sites,
+ uint32_t num_resolved_call_sites)
REQUIRES_SHARED(Locks::mutator_lock_);
// std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
diff --git a/runtime/oat.h b/runtime/oat.h
index f43aa11c23..17d3838850 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,8 +32,8 @@ class InstructionSetFeatures;
class PACKED(4) OatHeader {
public:
static constexpr std::array<uint8_t, 4> kOatMagic { { 'o', 'a', 't', '\n' } };
- // Last oat version changed reason: Deprecation of 'quicken'.
- static constexpr std::array<uint8_t, 4> kOatVersion { { '1', '8', '8', '\0' } };
+ // Last oat version changed reason: Remove DexCache arrays.
+ static constexpr std::array<uint8_t, 4> kOatVersion { { '1', '8', '9', '\0' } };
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
static constexpr const char* kDebuggableKey = "debuggable";
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index f42318be98..ac3c39219e 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -2975,14 +2975,6 @@ class Runtime::NotifyStartupCompletedTask : public gc::HeapTask {
{
ScopedTrace trace("Releasing app image spaces metadata");
ScopedObjectAccess soa(Thread::Current());
- for (gc::space::ContinuousSpace* space : runtime->GetHeap()->GetContinuousSpaces()) {
- if (space->IsImageSpace()) {
- gc::space::ImageSpace* image_space = space->AsImageSpace();
- if (image_space->GetImageHeader().IsAppImage()) {
- image_space->DisablePreResolvedStrings();
- }
- }
- }
// Request empty checkpoints to make sure no threads are accessing the image space metadata
// section when we madvise it. Use GC exclusion to prevent deadlocks that may happen if
// multiple threads are attempting to run empty checkpoints at the same time.
diff --git a/runtime/utils/dex_cache_arrays_layout-inl.h b/runtime/utils/dex_cache_arrays_layout-inl.h
deleted file mode 100644
index 3512efe080..0000000000
--- a/runtime/utils/dex_cache_arrays_layout-inl.h
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
-#define ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
-
-#include "dex_cache_arrays_layout.h"
-
-#include <android-base/logging.h>
-
-#include "base/bit_utils.h"
-#include "dex/primitive.h"
-#include "gc_root.h"
-#include "mirror/dex_cache.h"
-#include "runtime_globals.h"
-
-namespace art {
-
-inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size,
- const DexFile::Header& header,
- uint32_t num_call_sites)
- : pointer_size_(pointer_size),
- /* types_offset_ is always 0u, so it's constexpr */
- methods_offset_(
- RoundUp(types_offset_ + TypesSize(header.type_ids_size_), MethodsAlignment())),
- strings_offset_(
- RoundUp(methods_offset_ + MethodsSize(header.method_ids_size_), StringsAlignment())),
- fields_offset_(
- RoundUp(strings_offset_ + StringsSize(header.string_ids_size_), FieldsAlignment())),
- method_types_offset_(
- RoundUp(fields_offset_ + FieldsSize(header.field_ids_size_), MethodTypesAlignment())),
- call_sites_offset_(
- RoundUp(method_types_offset_ + MethodTypesSize(header.proto_ids_size_),
- MethodTypesAlignment())),
- size_(RoundUp(call_sites_offset_ + CallSitesSize(num_call_sites), Alignment())) {
-}
-
-inline DexCacheArraysLayout::DexCacheArraysLayout(PointerSize pointer_size, const DexFile* dex_file)
- : DexCacheArraysLayout(pointer_size, dex_file->GetHeader(), dex_file->NumCallSiteIds()) {
-}
-
-inline size_t DexCacheArraysLayout::Alignment() const {
- return Alignment(pointer_size_);
-}
-
-inline constexpr size_t DexCacheArraysLayout::Alignment(PointerSize pointer_size) {
- // mirror::Type/String/MethodTypeDexCacheType alignment is 8,
- // i.e. higher than or equal to the pointer alignment.
- static_assert(alignof(mirror::TypeDexCacheType) == 8,
- "Expecting alignof(ClassDexCacheType) == 8");
- static_assert(alignof(mirror::StringDexCacheType) == 8,
- "Expecting alignof(StringDexCacheType) == 8");
- static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
- "Expecting alignof(MethodTypeDexCacheType) == 8");
- // This is the same as alignof({Field,Method}DexCacheType) for the given pointer size.
- return 2u * static_cast<size_t>(pointer_size);
-}
-
-template <typename T>
-constexpr PointerSize GcRootAsPointerSize() {
- static_assert(sizeof(GcRoot<T>) == 4U, "Unexpected GcRoot size");
- return PointerSize::k32;
-}
-
-inline size_t DexCacheArraysLayout::TypeOffset(dex::TypeIndex type_idx) const {
- return types_offset_ + ElementOffset(PointerSize::k64,
- type_idx.index_ % mirror::DexCache::kDexCacheTypeCacheSize);
-}
-
-inline size_t DexCacheArraysLayout::TypesSize(size_t num_elements) const {
- size_t cache_size = mirror::DexCache::kDexCacheTypeCacheSize;
- if (num_elements < cache_size) {
- cache_size = num_elements;
- }
- return PairArraySize(GcRootAsPointerSize<mirror::Class>(), cache_size);
-}
-
-inline size_t DexCacheArraysLayout::TypesAlignment() const {
- return alignof(GcRoot<mirror::Class>);
-}
-
-inline size_t DexCacheArraysLayout::MethodOffset(uint32_t method_idx) const {
- return methods_offset_ + ElementOffset(pointer_size_, method_idx);
-}
-
-inline size_t DexCacheArraysLayout::MethodsSize(size_t num_elements) const {
- size_t cache_size = mirror::DexCache::kDexCacheMethodCacheSize;
- if (num_elements < cache_size) {
- cache_size = num_elements;
- }
- return PairArraySize(pointer_size_, cache_size);
-}
-
-inline size_t DexCacheArraysLayout::MethodsAlignment() const {
- return 2u * static_cast<size_t>(pointer_size_);
-}
-
-inline size_t DexCacheArraysLayout::StringOffset(uint32_t string_idx) const {
- uint32_t string_hash = string_idx % mirror::DexCache::kDexCacheStringCacheSize;
- return strings_offset_ + ElementOffset(PointerSize::k64, string_hash);
-}
-
-inline size_t DexCacheArraysLayout::StringsSize(size_t num_elements) const {
- size_t cache_size = mirror::DexCache::kDexCacheStringCacheSize;
- if (num_elements < cache_size) {
- cache_size = num_elements;
- }
- return PairArraySize(GcRootAsPointerSize<mirror::String>(), cache_size);
-}
-
-inline size_t DexCacheArraysLayout::StringsAlignment() const {
- static_assert(alignof(mirror::StringDexCacheType) == 8,
- "Expecting alignof(StringDexCacheType) == 8");
- return alignof(mirror::StringDexCacheType);
-}
-
-inline size_t DexCacheArraysLayout::FieldOffset(uint32_t field_idx) const {
- uint32_t field_hash = field_idx % mirror::DexCache::kDexCacheFieldCacheSize;
- return fields_offset_ + 2u * static_cast<size_t>(pointer_size_) * field_hash;
-}
-
-inline size_t DexCacheArraysLayout::FieldsSize(size_t num_elements) const {
- size_t cache_size = mirror::DexCache::kDexCacheFieldCacheSize;
- if (num_elements < cache_size) {
- cache_size = num_elements;
- }
- return PairArraySize(pointer_size_, cache_size);
-}
-
-inline size_t DexCacheArraysLayout::FieldsAlignment() const {
- return 2u * static_cast<size_t>(pointer_size_);
-}
-
-inline size_t DexCacheArraysLayout::MethodTypesSize(size_t num_elements) const {
- size_t cache_size = mirror::DexCache::kDexCacheMethodTypeCacheSize;
- if (num_elements < cache_size) {
- cache_size = num_elements;
- }
-
- return ArraySize(PointerSize::k64, cache_size);
-}
-
-inline size_t DexCacheArraysLayout::MethodTypesAlignment() const {
- static_assert(alignof(mirror::MethodTypeDexCacheType) == 8,
- "Expecting alignof(MethodTypeDexCacheType) == 8");
- return alignof(mirror::MethodTypeDexCacheType);
-}
-
-inline size_t DexCacheArraysLayout::CallSitesSize(size_t num_elements) const {
- return ArraySize(GcRootAsPointerSize<mirror::CallSite>(), num_elements);
-}
-
-inline size_t DexCacheArraysLayout::CallSitesAlignment() const {
- return alignof(GcRoot<mirror::CallSite>);
-}
-
-inline size_t DexCacheArraysLayout::ElementOffset(PointerSize element_size, uint32_t idx) {
- return static_cast<size_t>(element_size) * idx;
-}
-
-inline size_t DexCacheArraysLayout::ArraySize(PointerSize element_size, uint32_t num_elements) {
- return static_cast<size_t>(element_size) * num_elements;
-}
-
-inline size_t DexCacheArraysLayout::PairArraySize(PointerSize element_size, uint32_t num_elements) {
- return 2u * static_cast<size_t>(element_size) * num_elements;
-}
-
-} // namespace art
-
-#endif // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_INL_H_
diff --git a/runtime/utils/dex_cache_arrays_layout.h b/runtime/utils/dex_cache_arrays_layout.h
deleted file mode 100644
index 6f689f334a..0000000000
--- a/runtime/utils/dex_cache_arrays_layout.h
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (C) 2015 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_
-#define ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_
-
-#include "dex/dex_file.h"
-#include "dex/dex_file_types.h"
-
-namespace art {
-
-/**
- * @class DexCacheArraysLayout
- * @details This class provides the layout information for the type, method, field and
- * string arrays for a DexCache with a fixed arrays' layout (such as in the boot image),
- */
-class DexCacheArraysLayout {
- public:
- // Construct an invalid layout.
- DexCacheArraysLayout()
- : /* types_offset_ is always 0u */
- pointer_size_(kRuntimePointerSize),
- methods_offset_(0u),
- strings_offset_(0u),
- fields_offset_(0u),
- method_types_offset_(0u),
- call_sites_offset_(0u),
- size_(0u) {
- }
-
- // Construct a layout for a particular dex file header.
- DexCacheArraysLayout(PointerSize pointer_size,
- const DexFile::Header& header,
- uint32_t num_call_sites);
-
- // Construct a layout for a particular dex file.
- DexCacheArraysLayout(PointerSize pointer_size, const DexFile* dex_file);
-
- bool Valid() const {
- return Size() != 0u;
- }
-
- size_t Size() const {
- return size_;
- }
-
- size_t Alignment() const;
-
- static constexpr size_t Alignment(PointerSize pointer_size);
-
- size_t TypesOffset() const {
- return types_offset_;
- }
-
- size_t TypeOffset(dex::TypeIndex type_idx) const;
-
- size_t TypesSize(size_t num_elements) const;
-
- size_t TypesAlignment() const;
-
- size_t MethodsOffset() const {
- return methods_offset_;
- }
-
- size_t MethodOffset(uint32_t method_idx) const;
-
- size_t MethodsSize(size_t num_elements) const;
-
- size_t MethodsAlignment() const;
-
- size_t StringsOffset() const {
- return strings_offset_;
- }
-
- size_t StringOffset(uint32_t string_idx) const;
-
- size_t StringsSize(size_t num_elements) const;
-
- size_t StringsAlignment() const;
-
- size_t FieldsOffset() const {
- return fields_offset_;
- }
-
- size_t FieldOffset(uint32_t field_idx) const;
-
- size_t FieldsSize(size_t num_elements) const;
-
- size_t FieldsAlignment() const;
-
- size_t MethodTypesOffset() const {
- return method_types_offset_;
- }
-
- size_t MethodTypesSize(size_t num_elements) const;
-
- size_t MethodTypesAlignment() const;
-
- size_t CallSitesOffset() const {
- return call_sites_offset_;
- }
-
- size_t CallSitesSize(size_t num_elements) const;
-
- size_t CallSitesAlignment() const;
-
- private:
- static constexpr size_t types_offset_ = 0u;
- const PointerSize pointer_size_; // Must be first for construction initialization order.
- const size_t methods_offset_;
- const size_t strings_offset_;
- const size_t fields_offset_;
- const size_t method_types_offset_;
- const size_t call_sites_offset_;
- const size_t size_;
-
- static size_t ElementOffset(PointerSize element_size, uint32_t idx);
-
- static size_t ArraySize(PointerSize element_size, uint32_t num_elements);
- static size_t PairArraySize(PointerSize element_size, uint32_t num_elements);
-};
-
-} // namespace art
-
-#endif // ART_RUNTIME_UTILS_DEX_CACHE_ARRAYS_LAYOUT_H_