blob: 39113c8143bb92179e6ea8235f37d7066dc12f4a [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_COMPILER_IMAGE_WRITER_H_
18#define ART_COMPILER_IMAGE_WRITER_H_
Brian Carlstrom7940e442013-07-12 13:46:57 -070019
20#include <stdint.h>
Evgenii Stepanov1e133742015-05-20 12:30:59 -070021#include "base/memory_tool.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022
23#include <cstddef>
Ian Rogers700a4022014-05-19 16:49:03 -070024#include <memory>
Brian Carlstrom7940e442013-07-12 13:46:57 -070025#include <set>
Mathieu Chartier496577f2016-09-20 15:33:31 -070026#include <stack>
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include <string>
Igor Murashkinf5b4c502014-11-14 15:01:59 -080028#include <ostream>
Brian Carlstrom7940e442013-07-12 13:46:57 -070029
Alex Lightdba61482016-12-21 08:20:29 -080030#include "art_method.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010031#include "base/bit_utils.h"
Vladimir Marko944da602016-02-19 12:27:55 +000032#include "base/dchecked_vector.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070033#include "base/enums.h"
Alex Lighte64300b2015-12-15 15:02:47 -080034#include "base/length_prefixed_array.h"
Igor Murashkin46774762014-10-22 11:37:02 -070035#include "base/macros.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070036#include "driver/compiler_driver.h"
Mathieu Chartierfd04b6f2014-11-14 19:34:18 -080037#include "gc/space/space.h"
Mathieu Chartierceb07b32015-12-10 09:33:21 -080038#include "image.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070039#include "lock_word.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070040#include "mem_map.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070041#include "mirror/dex_cache.h"
Mathieu Chartier8c19d242017-03-06 12:35:10 -080042#include "obj_ptr.h"
43#include "oat_file.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070044#include "os.h"
45#include "safe_map.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070046#include "utils.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070047
48namespace art {
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080049namespace gc {
50namespace space {
51class ImageSpace;
52} // namespace space
53} // namespace gc
Brian Carlstrom7940e442013-07-12 13:46:57 -070054
Vladimir Marko8d6768d2017-03-14 10:13:21 +000055namespace mirror {
56class ClassLoader;
57} // namespace mirror
58
Vladimir Markoc5798bf2016-12-09 10:20:54 +000059class ClassLoaderVisitor;
Mathieu Chartier1f47b672016-01-07 16:29:01 -080060class ClassTable;
Vladimir Marko8d6768d2017-03-14 10:13:21 +000061class ImtConflictTable;
Mathieu Chartier1f47b672016-01-07 16:29:01 -080062
Mathieu Chartierfbc31082016-01-24 11:59:56 -080063static constexpr int kInvalidFd = -1;
Mathieu Chartiera90c7722015-10-29 15:41:36 -070064
Brian Carlstrom7940e442013-07-12 13:46:57 -070065// Write a Space built during compilation for use during execution.
Igor Murashkin46774762014-10-22 11:37:02 -070066class ImageWriter FINAL {
Brian Carlstrom7940e442013-07-12 13:46:57 -070067 public:
Mathieu Chartierda5b28a2015-11-05 08:03:47 -080068 ImageWriter(const CompilerDriver& compiler_driver,
69 uintptr_t image_begin,
70 bool compile_pic,
Mathieu Chartierceb07b32015-12-10 09:33:21 -080071 bool compile_app_image,
Jeff Haodcdc85b2015-12-04 14:06:18 -080072 ImageHeader::StorageMode image_storage_mode,
Vladimir Marko944da602016-02-19 12:27:55 +000073 const std::vector<const char*>& oat_filenames,
74 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map);
Brian Carlstrom7940e442013-07-12 13:46:57 -070075
Vladimir Markof4da6752014-08-01 19:04:18 +010076 bool PrepareImageAddressSpace();
77
78 bool IsImageAddressSpaceReady() const {
Vladimir Marko944da602016-02-19 12:27:55 +000079 DCHECK(!image_infos_.empty());
80 for (const ImageInfo& image_info : image_infos_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -080081 if (image_info.image_roots_address_ == 0u) {
82 return false;
83 }
84 }
Vladimir Marko944da602016-02-19 12:27:55 +000085 return true;
Vladimir Markof4da6752014-08-01 19:04:18 +010086 }
87
Vladimir Marko8d6768d2017-03-14 10:13:21 +000088 ObjPtr<mirror::ClassLoader> GetClassLoader() {
89 CHECK_EQ(class_loaders_.size(), compile_app_image_ ? 1u : 0u);
90 return compile_app_image_ ? *class_loaders_.begin() : nullptr;
91 }
92
Mathieu Chartiere401d142015-04-22 13:56:20 -070093 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070094 T* GetImageAddress(T* object) const REQUIRES_SHARED(Locks::mutator_lock_) {
Jeff Haodcdc85b2015-12-04 14:06:18 -080095 if (object == nullptr || IsInBootImage(object)) {
96 return object;
97 } else {
Vladimir Marko944da602016-02-19 12:27:55 +000098 size_t oat_index = GetOatIndex(object);
99 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800100 return reinterpret_cast<T*>(image_info.image_begin_ + GetImageOffset(object));
101 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100102 }
103
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700104 ArtMethod* GetImageMethodAddress(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700105
Vladimir Marko05792b92015-08-03 11:56:49 +0100106 template <typename PtrType>
107 PtrType GetDexCacheArrayElementImageAddress(const DexFile* dex_file, uint32_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700108 const REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko944da602016-02-19 12:27:55 +0000109 auto oat_it = dex_file_oat_index_map_.find(dex_file);
110 DCHECK(oat_it != dex_file_oat_index_map_.end());
111 const ImageInfo& image_info = GetImageInfo(oat_it->second);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800112 auto it = image_info.dex_cache_array_starts_.find(dex_file);
113 DCHECK(it != image_info.dex_cache_array_starts_.end());
Vladimir Marko05792b92015-08-03 11:56:49 +0100114 return reinterpret_cast<PtrType>(
Jeff Haodcdc85b2015-12-04 14:06:18 -0800115 image_info.image_begin_ + image_info.bin_slot_offsets_[kBinDexCacheArray] +
116 it->second + offset);
Vladimir Marko20f85592015-03-19 10:07:02 +0000117 }
118
Vladimir Marko944da602016-02-19 12:27:55 +0000119 size_t GetOatFileOffset(size_t oat_index) const {
120 return GetImageInfo(oat_index).oat_offset_;
121 }
122
123 const uint8_t* GetOatFileBegin(size_t oat_index) const {
124 return GetImageInfo(oat_index).oat_file_begin_;
125 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100126
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800127 // If image_fd is not kInvalidFd, then we use that for the image file. Otherwise we open
Jeff Haodcdc85b2015-12-04 14:06:18 -0800128 // the names in image_filenames.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800129 // If oat_fd is not kInvalidFd, then we use that for the oat file. Otherwise we open
130 // the names in oat_filenames.
Mathieu Chartiera90c7722015-10-29 15:41:36 -0700131 bool Write(int image_fd,
Jeff Haodcdc85b2015-12-04 14:06:18 -0800132 const std::vector<const char*>& image_filenames,
Vladimir Marko944da602016-02-19 12:27:55 +0000133 const std::vector<const char*>& oat_filenames)
Mathieu Chartier90443472015-07-16 20:32:27 -0700134 REQUIRES(!Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700135
Vladimir Marko944da602016-02-19 12:27:55 +0000136 uintptr_t GetOatDataBegin(size_t oat_index) {
137 return reinterpret_cast<uintptr_t>(GetImageInfo(oat_index).oat_data_begin_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700138 }
139
Vladimir Marko944da602016-02-19 12:27:55 +0000140 // Get the index of the oat file containing the dex file.
141 //
142 // This "oat_index" is used to retrieve information about the the memory layout
143 // of the oat file and its associated image file, needed for link-time patching
144 // of references to the image or across oat files.
145 size_t GetOatIndexForDexFile(const DexFile* dex_file) const;
146
147 // Get the index of the oat file containing the dex file served by the dex cache.
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700148 size_t GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700149 REQUIRES_SHARED(Locks::mutator_lock_);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800150
Vladimir Marko944da602016-02-19 12:27:55 +0000151 // Update the oat layout for the given oat file.
152 // This will make the oat_offset for the next oat file valid.
153 void UpdateOatFileLayout(size_t oat_index,
154 size_t oat_loaded_size,
155 size_t oat_data_offset,
156 size_t oat_data_size);
157 // Update information about the oat header, i.e. checksum and trampoline offsets.
158 void UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800159
Brian Carlstrom7940e442013-07-12 13:46:57 -0700160 private:
Mathieu Chartier496577f2016-09-20 15:33:31 -0700161 using WorkStack = std::stack<std::pair<mirror::Object*, size_t>>;
162
Brian Carlstrom7940e442013-07-12 13:46:57 -0700163 bool AllocMemory();
164
Mathieu Chartier31e89252013-08-28 11:29:12 -0700165 // Mark the objects defined in this space in the given live bitmap.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700166 void RecordImageAllocations() REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier31e89252013-08-28 11:29:12 -0700167
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800168 // Classify different kinds of bins that objects end up getting packed into during image writing.
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700169 // Ordered from dirtiest to cleanest (until ArtMethods).
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800170 enum Bin {
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700171 kBinMiscDirty, // Dex caches, object locks, etc...
172 kBinClassVerified, // Class verified, but initializers haven't been run
Mathieu Chartierd464fa12016-04-08 18:54:36 -0700173 // Unknown mix of clean/dirty:
174 kBinRegular,
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700175 kBinClassInitialized, // Class initializers have been run
Mathieu Chartierd464fa12016-04-08 18:54:36 -0700176 // All classes get their own bins since their fields often dirty
177 kBinClassInitializedFinalStatics, // Class initializers have been run, no non-final statics
Mathieu Chartier2ba04ea2016-04-08 19:01:05 -0700178 // Likely-clean:
179 kBinString, // [String] Almost always immutable (except for obj header).
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800180 // Add more bins here if we add more segregation code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700181 // Non mirror fields must be below.
182 // ArtFields should be always clean.
Mathieu Chartierc7853442015-03-27 14:35:38 -0700183 kBinArtField,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700184 // If the class is initialized, then the ArtMethods are probably clean.
185 kBinArtMethodClean,
186 // ArtMethods may be dirty if the class has native methods or a declaring class that isn't
187 // initialized.
188 kBinArtMethodDirty,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +0000189 // IMT (clean)
190 kBinImTable,
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700191 // Conflict tables (clean).
192 kBinIMTConflictTable,
193 // Runtime methods (always clean, do not have a length prefix array).
194 kBinRuntimeMethod,
Vladimir Marko05792b92015-08-03 11:56:49 +0100195 // Dex cache arrays have a special slot for PC-relative addressing. Since they are
196 // huge, and as such their dirtiness is not important for the clean/dirty separation,
197 // we arbitrarily keep them at the end of the native data.
198 kBinDexCacheArray, // Arrays belonging to dex cache.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800199 kBinSize,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700200 // Number of bins which are for mirror objects.
201 kBinMirrorCount = kBinArtField,
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800202 };
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800203 friend std::ostream& operator<<(std::ostream& stream, const Bin& bin);
204
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700205 enum NativeObjectRelocationType {
206 kNativeObjectRelocationTypeArtField,
207 kNativeObjectRelocationTypeArtFieldArray,
208 kNativeObjectRelocationTypeArtMethodClean,
209 kNativeObjectRelocationTypeArtMethodArrayClean,
210 kNativeObjectRelocationTypeArtMethodDirty,
211 kNativeObjectRelocationTypeArtMethodArrayDirty,
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700212 kNativeObjectRelocationTypeRuntimeMethod,
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +0000213 kNativeObjectRelocationTypeIMTable,
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700214 kNativeObjectRelocationTypeIMTConflictTable,
Vladimir Marko05792b92015-08-03 11:56:49 +0100215 kNativeObjectRelocationTypeDexCacheArray,
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700216 };
217 friend std::ostream& operator<<(std::ostream& stream, const NativeObjectRelocationType& type);
218
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800219 enum OatAddress {
220 kOatAddressInterpreterToInterpreterBridge,
221 kOatAddressInterpreterToCompiledCodeBridge,
222 kOatAddressJNIDlsymLookup,
223 kOatAddressQuickGenericJNITrampoline,
224 kOatAddressQuickIMTConflictTrampoline,
225 kOatAddressQuickResolutionTrampoline,
226 kOatAddressQuickToInterpreterBridge,
227 // Number of elements in the enum.
228 kOatAddressCount,
229 };
230 friend std::ostream& operator<<(std::ostream& stream, const OatAddress& oat_address);
231
Vladimir Marko80afd022015-05-19 18:08:00 +0100232 static constexpr size_t kBinBits = MinimumBitsToStore<uint32_t>(kBinMirrorCount - 1);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800233 // uint32 = typeof(lockword_)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700234 // Subtract read barrier bits since we want these to remain 0, or else it may result in DCHECK
235 // failures due to invalid read barrier bits during object field reads.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700236 static const size_t kBinShift = BitSizeOf<uint32_t>() - kBinBits - LockWord::kGCStateSize;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800237 // 111000.....0
Mathieu Chartiere401d142015-04-22 13:56:20 -0700238 static const size_t kBinMask = ((static_cast<size_t>(1) << kBinBits) - 1) << kBinShift;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800239
240 // We use the lock word to store the bin # and bin index of the object in the image.
241 //
242 // The struct size must be exactly sizeof(LockWord), currently 32-bits, since this will end up
243 // stored in the lock word bit-for-bit when object forwarding addresses are being calculated.
244 struct BinSlot {
245 explicit BinSlot(uint32_t lockword);
246 BinSlot(Bin bin, uint32_t index);
247
248 // The bin an object belongs to, i.e. regular, class/verified, class/initialized, etc.
249 Bin GetBin() const;
250 // The offset in bytes from the beginning of the bin. Aligned to object size.
251 uint32_t GetIndex() const;
252 // Pack into a single uint32_t, for storing into a lock word.
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700253 uint32_t Uint32Value() const { return lockword_; }
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800254 // Comparison operator for map support
255 bool operator<(const BinSlot& other) const { return lockword_ < other.lockword_; }
256
257 private:
258 // Must be the same size as LockWord, any larger and we would truncate the data.
259 const uint32_t lockword_;
260 };
261
Jeff Haodcdc85b2015-12-04 14:06:18 -0800262 struct ImageInfo {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800263 ImageInfo();
264 ImageInfo(ImageInfo&&) = default;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800265
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800266 // Create the image sections into the out sections variable, returns the size of the image
267 // excluding the bitmap.
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700268 size_t CreateImageSections(ImageSection* out_sections) const;
Mathieu Chartiera06ba052016-01-06 13:51:52 -0800269
Jeff Haodcdc85b2015-12-04 14:06:18 -0800270 std::unique_ptr<MemMap> image_; // Memory mapped for generating the image.
271
272 // Target begin of this image. Notes: It is not valid to write here, this is the address
273 // of the target image, not necessarily where image_ is mapped. The address is only valid
274 // after layouting (otherwise null).
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800275 uint8_t* image_begin_ = nullptr;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800276
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800277 // Offset to the free space in image_, initially size of image header.
278 size_t image_end_ = RoundUp(sizeof(ImageHeader), kObjectAlignment);
279 uint32_t image_roots_address_ = 0; // The image roots address in the image.
280 size_t image_offset_ = 0; // Offset of this image from the start of the first image.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800281
282 // Image size is the *address space* covered by this image. As the live bitmap is aligned
283 // to the page size, the live bitmap will cover more address space than necessary. But live
284 // bitmaps may not overlap, so an image has a "shadow," which is accounted for in the size.
285 // The next image may only start at image_begin_ + image_size_ (which is guaranteed to be
286 // page-aligned).
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800287 size_t image_size_ = 0;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800288
289 // Oat data.
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800290 // Offset of the oat file for this image from start of oat files. This is
291 // valid when the previous oat file has been written.
292 size_t oat_offset_ = 0;
Vladimir Marko944da602016-02-19 12:27:55 +0000293 // Layout of the loaded ELF file containing the oat file, valid after UpdateOatFileLayout().
294 const uint8_t* oat_file_begin_ = nullptr;
295 size_t oat_loaded_size_ = 0;
296 const uint8_t* oat_data_begin_ = nullptr;
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800297 size_t oat_size_ = 0; // Size of the corresponding oat data.
Vladimir Marko944da602016-02-19 12:27:55 +0000298 // The oat header checksum, valid after UpdateOatFileHeader().
299 uint32_t oat_checksum_ = 0u;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800300
301 // Image bitmap which lets us know where the objects inside of the image reside.
302 std::unique_ptr<gc::accounting::ContinuousSpaceBitmap> image_bitmap_;
303
304 // The start offsets of the dex cache arrays.
305 SafeMap<const DexFile*, size_t> dex_cache_array_starts_;
306
307 // Offset from oat_data_begin_ to the stubs.
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800308 uint32_t oat_address_offsets_[kOatAddressCount] = {};
Jeff Haodcdc85b2015-12-04 14:06:18 -0800309
310 // Bin slot tracking for dirty object packing.
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800311 size_t bin_slot_sizes_[kBinSize] = {}; // Number of bytes in a bin.
312 size_t bin_slot_offsets_[kBinSize] = {}; // Number of bytes in previous bins.
313 size_t bin_slot_count_[kBinSize] = {}; // Number of objects in a bin.
314
315 // Cached size of the intern table for when we allocate memory.
316 size_t intern_table_bytes_ = 0;
317
Mathieu Chartier1f47b672016-01-07 16:29:01 -0800318 // Number of image class table bytes.
319 size_t class_table_bytes_ = 0;
320
Mathieu Chartier8c19d242017-03-06 12:35:10 -0800321 // Number of object fixup bytes.
322 size_t object_fixup_bytes_ = 0;
323
324 // Number of pointer fixup bytes.
325 size_t pointer_fixup_bytes_ = 0;
326
Mathieu Chartier1f47b672016-01-07 16:29:01 -0800327 // Intern table associated with this image for serialization.
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800328 std::unique_ptr<InternTable> intern_table_;
Mathieu Chartier1f47b672016-01-07 16:29:01 -0800329
330 // Class table associated with this image for serialization.
331 std::unique_ptr<ClassTable> class_table_;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800332 };
333
Mathieu Chartier31e89252013-08-28 11:29:12 -0700334 // We use the lock word to store the offset of the object in the image.
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800335 void AssignImageOffset(mirror::Object* object, BinSlot bin_slot)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700336 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700337 void SetImageOffset(mirror::Object* object, size_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700338 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700339 bool IsImageOffsetAssigned(mirror::Object* object) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700340 REQUIRES_SHARED(Locks::mutator_lock_);
341 size_t GetImageOffset(mirror::Object* object) const REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700342 void UpdateImageOffset(mirror::Object* obj, uintptr_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700343 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700344
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700345 void PrepareDexCacheArraySlots() REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier496577f2016-09-20 15:33:31 -0700346 void AssignImageBinSlot(mirror::Object* object, size_t oat_index)
347 REQUIRES_SHARED(Locks::mutator_lock_);
348 mirror::Object* TryAssignBinSlot(WorkStack& work_stack, mirror::Object* obj, size_t oat_index)
349 REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800350 void SetImageBinSlot(mirror::Object* object, BinSlot bin_slot)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700351 REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800352 bool IsImageBinSlotAssigned(mirror::Object* object) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700353 REQUIRES_SHARED(Locks::mutator_lock_);
354 BinSlot GetImageBinSlot(mirror::Object* object) const REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800355
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700356 void AddDexCacheArrayRelocation(void* array, size_t offset, ObjPtr<mirror::DexCache> dex_cache)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700357 REQUIRES_SHARED(Locks::mutator_lock_);
358 void AddMethodPointerArray(mirror::PointerArray* arr) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700359
Alex Lighta59dd802014-07-02 16:28:08 -0700360 static void* GetImageAddressCallback(void* writer, mirror::Object* obj)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700361 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lighta59dd802014-07-02 16:28:08 -0700362 return reinterpret_cast<ImageWriter*>(writer)->GetImageAddress(obj);
363 }
364
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700365 mirror::Object* GetLocalAddress(mirror::Object* object) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700366 REQUIRES_SHARED(Locks::mutator_lock_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700367 size_t offset = GetImageOffset(object);
Vladimir Marko944da602016-02-19 12:27:55 +0000368 size_t oat_index = GetOatIndex(object);
369 const ImageInfo& image_info = GetImageInfo(oat_index);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800370 uint8_t* dst = image_info.image_->Begin() + offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700371 return reinterpret_cast<mirror::Object*>(dst);
372 }
373
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800374 // Returns the address in the boot image if we are compiling the app image.
375 const uint8_t* GetOatAddress(OatAddress type) const;
376
Jeff Haodcdc85b2015-12-04 14:06:18 -0800377 const uint8_t* GetOatAddressForOffset(uint32_t offset, const ImageInfo& image_info) const {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700378 // With Quick, code is within the OatFile, as there are all in one
Jeff Haodcdc85b2015-12-04 14:06:18 -0800379 // .o ELF object. But interpret it as signed.
380 DCHECK_LE(static_cast<int32_t>(offset), static_cast<int32_t>(image_info.oat_size_));
381 DCHECK(image_info.oat_data_begin_ != nullptr);
382 return offset == 0u ? nullptr : image_info.oat_data_begin_ + static_cast<int32_t>(offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700383 }
384
Brian Carlstrom7940e442013-07-12 13:46:57 -0700385 // Returns true if the class was in the original requested image classes list.
Vladimir Markof25cc732017-03-16 16:18:15 +0000386 bool KeepClass(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700387
388 // Debug aid that list of requested image classes.
389 void DumpImageClasses();
390
391 // Preinitializes some otherwise lazy fields (such as Class name) to avoid runtime image dirtying.
392 void ComputeLazyFieldsForImageClasses()
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700393 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700394
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000395 // Visit all class loaders.
396 void VisitClassLoaders(ClassLoaderVisitor* visitor) REQUIRES_SHARED(Locks::mutator_lock_);
397
Brian Carlstrom7940e442013-07-12 13:46:57 -0700398 // Remove unwanted classes from various roots.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700399 void PruneNonImageClasses() REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700400
Vladimir Markof25cc732017-03-16 16:18:15 +0000401 // Remove unwanted classes from the DexCache roots and preload deterministic DexCache contents.
402 void PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
403 ObjPtr<mirror::ClassLoader> class_loader)
404 REQUIRES_SHARED(Locks::mutator_lock_)
405 REQUIRES(!Locks::classlinker_classes_lock_);
406
Brian Carlstrom7940e442013-07-12 13:46:57 -0700407 // Verify unwanted classes removed.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700408 void CheckNonImageClassesRemoved() REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700409 static void CheckNonImageClassesRemovedCallback(mirror::Object* obj, void* arg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700410 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700411
412 // Lays out where the image objects will be at runtime.
Vladimir Markof4da6752014-08-01 19:04:18 +0100413 void CalculateNewObjectOffsets()
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700414 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier496577f2016-09-20 15:33:31 -0700415 void ProcessWorkStack(WorkStack* work_stack)
416 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko944da602016-02-19 12:27:55 +0000417 void CreateHeader(size_t oat_index)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700418 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko944da602016-02-19 12:27:55 +0000419 mirror::ObjectArray<mirror::Object>* CreateImageRoots(size_t oat_index) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700420 REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800421 void CalculateObjectBinSlots(mirror::Object* obj)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700422 REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800423 void UnbinObjectsIntoOffset(mirror::Object* obj)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700424 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700425
Mathieu Chartier496577f2016-09-20 15:33:31 -0700426 static void EnsureBinSlotAssignedCallback(mirror::Object* obj, void* arg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700427 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier496577f2016-09-20 15:33:31 -0700428 static void DeflateMonitorCallback(mirror::Object* obj, void* arg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700429 REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800430 static void UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700431 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700432
433 // Creates the contiguous image in memory and adjusts pointers.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700434 void CopyAndFixupNativeData(size_t oat_index) REQUIRES_SHARED(Locks::mutator_lock_);
435 void CopyAndFixupObjects() REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700436 static void CopyAndFixupObjectsCallback(mirror::Object* obj, void* arg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700437 REQUIRES_SHARED(Locks::mutator_lock_);
438 void CopyAndFixupObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800439 void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy, const ImageInfo& image_info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700440 REQUIRES_SHARED(Locks::mutator_lock_);
441 void CopyAndFixupImTable(ImTable* orig, ImTable* copy) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700442 void CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700443 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700444 void FixupClass(mirror::Class* orig, mirror::Class* copy)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700445 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800446 void FixupObject(mirror::Object* orig, mirror::Object* copy)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700447 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100448 void FixupDexCache(mirror::DexCache* orig_dex_cache, mirror::DexCache* copy_dex_cache)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700449 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800450 void FixupPointerArray(mirror::Object* dst,
451 mirror::PointerArray* arr,
452 mirror::Class* klass,
453 Bin array_type)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700454 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700455
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700456 // Get quick code for non-resolution/imt_conflict/abstract method.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800457 const uint8_t* GetQuickCode(ArtMethod* method,
458 const ImageInfo& image_info,
459 bool* quick_is_interpreted)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700460 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700461
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800462 // Calculate the sum total of the bin slot sizes in [0, up_to). Defaults to all bins.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800463 size_t GetBinSizeSum(ImageInfo& image_info, Bin up_to = kBinSize) const;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800464
Mathieu Chartiere401d142015-04-22 13:56:20 -0700465 // Return true if a method is likely to be dirtied at runtime.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700466 bool WillMethodBeDirty(ArtMethod* m) const REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700467
468 // Assign the offset for an ArtMethod.
Jeff Haodcdc85b2015-12-04 14:06:18 -0800469 void AssignMethodOffset(ArtMethod* method,
470 NativeObjectRelocationType type,
Vladimir Marko944da602016-02-19 12:27:55 +0000471 size_t oat_index)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700472 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700473
Mathieu Chartier8c19d242017-03-06 12:35:10 -0800474 // Return true if imt was newly inserted.
475 bool TryAssignImTableOffset(ImTable* imt, size_t oat_index) REQUIRES_SHARED(Locks::mutator_lock_);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +0000476
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700477 // Assign the offset for an IMT conflict table. Does nothing if the table already has a native
478 // relocation.
479 void TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700480 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700481
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800482 // Return true if klass is loaded by the boot class loader but not in the boot image.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700483 bool IsBootClassLoaderNonImageClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800484
Mathieu Chartier901e0702016-02-19 13:42:48 -0800485 // Return true if klass depends on a boot class loader non image class. We want to prune these
486 // classes since we do not want any boot class loader classes in the image. This means that
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800487 // we also cannot have any classes which refer to these boot class loader non image classes.
Mathieu Chartier901e0702016-02-19 13:42:48 -0800488 // PruneAppImageClass also prunes if klass depends on a non-image class according to the compiler
489 // driver.
Vladimir Markof25cc732017-03-16 16:18:15 +0000490 bool PruneAppImageClass(ObjPtr<mirror::Class> klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700491 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800492
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800493 // early_exit is true if we had a cyclic dependency anywhere down the chain.
Vladimir Markof25cc732017-03-16 16:18:15 +0000494 bool PruneAppImageClassInternal(ObjPtr<mirror::Class> klass,
Mathieu Chartier901e0702016-02-19 13:42:48 -0800495 bool* early_exit,
496 std::unordered_set<mirror::Class*>* visited)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700497 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier945c1c12015-11-24 15:37:12 -0800498
Mathieu Chartier496577f2016-09-20 15:33:31 -0700499 bool IsMultiImage() const {
500 return image_infos_.size() > 1;
501 }
502
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700503 static Bin BinTypeForNativeRelocationType(NativeObjectRelocationType type);
504
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700505 uintptr_t NativeOffsetInImage(void* obj) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100506
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800507 // Location of where the object will be when the image is loaded at runtime.
Vladimir Marko05792b92015-08-03 11:56:49 +0100508 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700509 T* NativeLocationInImage(T* obj) REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe245ee002014-12-04 21:25:04 -0800510
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800511 // Location of where the temporary copy of the object currently is.
512 template <typename T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700513 T* NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800514
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800515 // Return true of obj is inside of the boot image space. This may only return true if we are
516 // compiling an app image.
517 bool IsInBootImage(const void* obj) const;
518
519 // Return true if ptr is within the boot oat file.
520 bool IsInBootOatFile(const void* ptr) const;
521
Vladimir Marko944da602016-02-19 12:27:55 +0000522 // Get the index of the oat file associated with the object.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700523 size_t GetOatIndex(mirror::Object* object) const REQUIRES_SHARED(Locks::mutator_lock_);
Jeff Haodcdc85b2015-12-04 14:06:18 -0800524
Vladimir Marko944da602016-02-19 12:27:55 +0000525 // The oat index for shared data in multi-image and all data in single-image compilation.
526 size_t GetDefaultOatIndex() const {
527 return 0u;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800528 }
529
Vladimir Marko944da602016-02-19 12:27:55 +0000530 ImageInfo& GetImageInfo(size_t oat_index) {
531 return image_infos_[oat_index];
532 }
533
534 const ImageInfo& GetImageInfo(size_t oat_index) const {
535 return image_infos_[oat_index];
536 }
Jeff Haodcdc85b2015-12-04 14:06:18 -0800537
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800538 // Find an already strong interned string in the other images or in the boot image. Used to
539 // remove duplicates in the multi image and app image case.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700540 mirror::String* FindInternedString(mirror::String* string) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800541
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700542 // Return true if there already exists a native allocation for an object.
543 bool NativeRelocationAssigned(void* ptr) const;
544
Mathieu Chartier8c19d242017-03-06 12:35:10 -0800545 void CopyReference(mirror::HeapReference<mirror::Object>* dest, ObjPtr<mirror::Object> src)
546 REQUIRES_SHARED(Locks::mutator_lock_);
547
548 void CopyReference(mirror::CompressedReference<mirror::Object>* dest, ObjPtr<mirror::Object> src)
549 REQUIRES_SHARED(Locks::mutator_lock_);
550
551 void CopyAndFixupPointer(void** target, void* value);
552
Brian Carlstrom7940e442013-07-12 13:46:57 -0700553 const CompilerDriver& compiler_driver_;
554
Jeff Haodcdc85b2015-12-04 14:06:18 -0800555 // Beginning target image address for the first image.
556 uint8_t* global_image_begin_;
Vladimir Markof4da6752014-08-01 19:04:18 +0100557
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800558 // Offset from image_begin_ to where the first object is in image_.
559 size_t image_objects_offset_begin_;
560
Mathieu Chartiere401d142015-04-22 13:56:20 -0700561 // Pointer arrays that need to be updated. Since these are only some int and long arrays, we need
562 // to keep track. These include vtable arrays, iftable arrays, and dex caches.
563 std::unordered_map<mirror::PointerArray*, Bin> pointer_arrays_;
564
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700565 // Saved hash codes. We use these to restore lockwords which were temporarily used to have
566 // forwarding addresses as well as copying over hash codes.
567 std::unordered_map<mirror::Object*, uint32_t> saved_hashcode_map_;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800568
Mathieu Chartier496577f2016-09-20 15:33:31 -0700569 // Oat index map for objects.
570 std::unordered_map<mirror::Object*, uint32_t> oat_index_map_;
571
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800572 // Boolean flags.
Igor Murashkin46774762014-10-22 11:37:02 -0700573 const bool compile_pic_;
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800574 const bool compile_app_image_;
575
Mathieu Chartier2d721012014-11-10 11:08:06 -0800576 // Size of pointers on the target architecture.
Andreas Gampe542451c2016-07-26 09:02:02 -0700577 PointerSize target_ptr_size_;
Mathieu Chartier2d721012014-11-10 11:08:06 -0800578
Vladimir Marko944da602016-02-19 12:27:55 +0000579 // Image data indexed by the oat file index.
580 dchecked_vector<ImageInfo> image_infos_;
Igor Murashkinf5b4c502014-11-14 15:01:59 -0800581
Mathieu Chartiere401d142015-04-22 13:56:20 -0700582 // ArtField, ArtMethod relocating map. These are allocated as array of structs but we want to
583 // have one entry per art field for convenience. ArtFields are placed right after the end of the
584 // image objects (aka sum of bin_slot_sizes_). ArtMethods are placed right after the ArtFields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700585 struct NativeObjectRelocation {
Vladimir Marko944da602016-02-19 12:27:55 +0000586 size_t oat_index;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700587 uintptr_t offset;
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700588 NativeObjectRelocationType type;
589
590 bool IsArtMethodRelocation() const {
591 return type == kNativeObjectRelocationTypeArtMethodClean ||
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700592 type == kNativeObjectRelocationTypeArtMethodDirty ||
593 type == kNativeObjectRelocationTypeRuntimeMethod;
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700594 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700595 };
Mathieu Chartier54d220e2015-07-30 16:20:06 -0700596 std::unordered_map<void*, NativeObjectRelocation> native_object_relocations_;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700597
Mathieu Chartiere401d142015-04-22 13:56:20 -0700598 // Runtime ArtMethods which aren't reachable from any Class but need to be copied into the image.
599 ArtMethod* image_methods_[ImageHeader::kImageMethodsCount];
600
601 // Counters for measurements, used for logging only.
602 uint64_t dirty_methods_;
603 uint64_t clean_methods_;
Andreas Gampe245ee002014-12-04 21:25:04 -0800604
Mathieu Chartiera808bac2015-11-05 16:33:15 -0800605 // Prune class memoization table to speed up ContainsBootClassLoaderNonImageClass.
Mathieu Chartierda5b28a2015-11-05 08:03:47 -0800606 std::unordered_map<mirror::Class*, bool> prune_class_memo_;
607
Mathieu Chartier67ad20e2015-12-09 15:41:09 -0800608 // Class loaders with a class table to write out. There should only be one class loader because
609 // dex2oat loads the dex files to be compiled into a single class loader. For the boot image,
610 // null is a valid entry.
Mathieu Chartier208a5cb2015-12-02 15:44:07 -0800611 std::unordered_set<mirror::ClassLoader*> class_loaders_;
612
Mathieu Chartierceb07b32015-12-10 09:33:21 -0800613 // Which mode the image is stored as, see image.h
614 const ImageHeader::StorageMode image_storage_mode_;
615
Vladimir Marko944da602016-02-19 12:27:55 +0000616 // The file names of oat files.
617 const std::vector<const char*>& oat_filenames_;
618
619 // Map of dex files to the indexes of oat files that they were compiled into.
620 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map_;
Jeff Haodcdc85b2015-12-04 14:06:18 -0800621
Vladimir Markoad06b982016-11-17 16:38:59 +0000622 class ComputeLazyFieldsForClassesVisitor;
623 class FixupClassVisitor;
624 class FixupRootVisitor;
625 class FixupVisitor;
Mathieu Chartier496577f2016-09-20 15:33:31 -0700626 class GetRootsVisitor;
Mathieu Chartier8c19d242017-03-06 12:35:10 -0800627 class ImageAddressVisitorForDexCacheArray;
Vladimir Markoad06b982016-11-17 16:38:59 +0000628 class NativeLocationVisitor;
Vladimir Markoc5798bf2016-12-09 10:20:54 +0000629 class PruneClassesVisitor;
630 class PruneClassLoaderClassesVisitor;
Mathieu Chartier8c19d242017-03-06 12:35:10 -0800631 class RegisterBootClassPathClassesVisitor;
Mathieu Chartier496577f2016-09-20 15:33:31 -0700632 class VisitReferencesVisitor;
Vladimir Markoad06b982016-11-17 16:38:59 +0000633
Mathieu Chartierb7ea3ac2014-03-24 16:54:46 -0700634 DISALLOW_COPY_AND_ASSIGN(ImageWriter);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700635};
636
637} // namespace art
638
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700639#endif // ART_COMPILER_IMAGE_WRITER_H_