1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
|
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ART_SRC_HEAP_H_
#define ART_SRC_HEAP_H_
#include <vector>
#include "globals.h"
#include "object_bitmap.h"
#include "offsets.h"
#define VERIFY_OBJECT_ENABLED 0
namespace art {
class Class;
class Mutex;
class Object;
class Space;
class HeapBitmap;
class Heap {
public:
static const size_t kInitialSize = 4 * MB;
static const size_t kMaximumSize = 16 * MB;
typedef void (RootVisitor)(const Object* root, void* arg);
// Create a heap with the requested sizes. The possible empty
// image_file_names names specify Spaces to load based on
// ImageWriter output.
static void Init(size_t starting_size, size_t maximum_size,
const std::vector<std::string>& image_file_names);
static void Destroy();
// Allocates and initializes storage for an object instance.
static Object* AllocObject(Class* klass, size_t num_bytes);
// Check sanity of given reference. Requires the heap lock.
#if VERIFY_OBJECT_ENABLED
static void VerifyObject(const Object *obj);
#else
static void VerifyObject(const Object *obj) {}
#endif
// Check sanity of all live references. Requires the heap lock.
static void VerifyHeap();
// A weaker test than VerifyObject that doesn't require the heap lock,
// and doesn't abort on error, allowing the caller to report more
// meaningful diagnostics.
static bool IsHeapAddress(const Object* obj);
// Initiates an explicit garbage collection.
static void CollectGarbage();
// Implements java.lang.Runtime.maxMemory.
static int64_t GetMaxMemory();
// Implements java.lang.Runtime.totalMemory.
static int64_t GetTotalMemory();
// Implements java.lang.Runtime.freeMemory.
static int64_t GetFreeMemory();
// Implements VMDebug.countInstancesOfClass.
static int64_t CountInstances(Class* c, bool count_assignable);
// Implements dalvik.system.VMRuntime.clearGrowthLimit.
static void ClearGrowthLimit() {
UNIMPLEMENTED(WARNING);
}
// Implements dalvik.system.VMRuntime.getTargetHeapUtilization.
static float GetTargetHeapUtilization() {
return target_utilization_;
}
// Implements dalvik.system.VMRuntime.setTargetHeapUtilization.
static void SetTargetHeapUtilization(float target) {
target_utilization_ = target;
}
// Sets the maximum number of bytes that the heap is allowed to allocate
// from the system. Clamps to the appropriate maximum value.
static void SetIdealFootprint(size_t max_allowed_footprint);
// Blocks the caller until the garbage collector becomes idle.
static void WaitForConcurrentGcToComplete();
static void Lock();
static void Unlock();
static const std::vector<Space*>& GetSpaces() {
return spaces_;
}
static HeapBitmap* GetLiveBits() {
return live_bitmap_;
}
static HeapBitmap* GetMarkBits() {
return mark_bitmap_;
}
static void SetWellKnownClasses(Class* java_lang_ref_FinalizerReference,
Class* java_lang_ref_ReferenceQueue);
static void SetReferenceOffsets(MemberOffset reference_referent_offset,
MemberOffset reference_queue_offset,
MemberOffset reference_queueNext_offset,
MemberOffset reference_pendingNext_offset,
MemberOffset finalizer_reference_zombie_offset);
static Object* GetReferenceReferent(Object* reference);
static void ClearReferenceReferent(Object* reference);
// Returns true if the reference object has not yet been enqueued.
static bool IsEnqueuable(const Object* ref);
static void EnqueueReference(Object* ref, Object** list);
static void EnqueuePendingReference(Object* ref, Object** list);
static Object* DequeuePendingReference(Object** list);
static MemberOffset GetReferencePendingNextOffset() {
DCHECK_NE(reference_pendingNext_offset_.Uint32Value(), 0U);
return reference_pendingNext_offset_;
}
static MemberOffset GetFinalizerReferenceZombieOffset() {
DCHECK_NE(finalizer_reference_zombie_offset_.Uint32Value(), 0U);
return finalizer_reference_zombie_offset_;
}
static void EnableObjectValidation() {
verify_objects_ = true;
}
static void DisableObjectValidation() {
verify_objects_ = false;
}
// Callers must hold the heap lock.
static void RecordFreeLocked(Space* space, const Object* object);
// Must be called if a field of an Object in the heap changes, and before any GC safe-point.
// The call is not needed if NULL is stored in the field.
static void WriteBarrier(const Object* object) {
#ifdef CONCURRENT_GARBAGE_COLLECTOR
// TODO: we need card marking for a concurrent collector.
UNIMPLEMENTED(FATAL);
#endif
}
static void AddFinalizerReference(Object* object);
private:
// Allocates uninitialized storage.
static Object* AllocateLocked(size_t num_bytes);
static Object* AllocateLocked(Space* space, size_t num_bytes);
// Pushes a list of cleared references out to the managed heap.
static void EnqueueClearedReferences(Object** cleared_references);
static void RecordAllocationLocked(Space* space, const Object* object);
static void RecordImageAllocations(Space* space);
static void CollectGarbageInternal();
static void GrowForUtilization();
static void VerifyObjectLocked(const Object *obj);
static void VerificationCallback(Object* obj, void* arg);
static Mutex* lock_;
static std::vector<Space*> spaces_;
// default Space for allocations
static Space* alloc_space_;
static HeapBitmap* mark_bitmap_;
static HeapBitmap* live_bitmap_;
// The maximum size of the heap in bytes.
static size_t maximum_size_;
// True while the garbage collector is running.
static bool is_gc_running_;
// Number of bytes allocated. Adjusted after each allocation and
// free.
static size_t num_bytes_allocated_;
// Number of objects allocated. Adjusted after each allocation and
// free.
static size_t num_objects_allocated_;
static Class* java_lang_ref_FinalizerReference_;
static Class* java_lang_ref_ReferenceQueue_;
// offset of java.lang.ref.Reference.referent
static MemberOffset reference_referent_offset_;
// offset of java.lang.ref.Reference.queue
static MemberOffset reference_queue_offset_;
// offset of java.lang.ref.Reference.queueNext
static MemberOffset reference_queueNext_offset_;
// offset of java.lang.ref.Reference.pendingNext
static MemberOffset reference_pendingNext_offset_;
// offset of java.lang.ref.FinalizerReference.zombie
static MemberOffset finalizer_reference_zombie_offset_;
// Target ideal heap utilization ratio
static float target_utilization_;
static bool verify_objects_;
DISALLOW_IMPLICIT_CONSTRUCTORS(Heap);
};
} // namespace art
#endif // ART_SRC_HEAP_H_
|