Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 17 | #include "entrypoints/quick/quick_alloc_entrypoints.h" |
| 18 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 19 | #include "callee_save_frame.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 20 | #include "entrypoints/entrypoint_utils-inl.h" |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 21 | #include "mirror/art_method-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 22 | #include "mirror/class-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "mirror/object_array-inl.h" |
Ian Rogers | 4f6ad8a | 2013-03-18 15:27:28 -0700 | [diff] [blame] | 24 | #include "mirror/object-inl.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 28 | static constexpr bool kUseTlabFastPath = true; |
| 29 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 30 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \ |
| 31 | extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 32 | uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \ |
| 33 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 34 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 35 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 36 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
Andreas Gampe | 05d2ab2 | 2014-08-06 16:27:52 -0700 | [diff] [blame] | 37 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 38 | if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ |
| 39 | size_t byte_count = klass->GetObjectSize(); \ |
| 40 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 41 | mirror::Object* obj; \ |
| 42 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 43 | obj = self->AllocTlab(byte_count); \ |
| 44 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 45 | obj->SetClass(klass); \ |
| 46 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 47 | if (kUseBrooksReadBarrier) { \ |
| 48 | obj->SetReadBarrierPointer(obj); \ |
| 49 | } \ |
| 50 | obj->AssertReadBarrierPointer(); \ |
| 51 | } \ |
| 52 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 53 | return obj; \ |
| 54 | } \ |
| 55 | } \ |
| 56 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 57 | return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 58 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 59 | extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 60 | mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 61 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 62 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 63 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
| 64 | if (LIKELY(klass->IsInitialized())) { \ |
| 65 | size_t byte_count = klass->GetObjectSize(); \ |
| 66 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 67 | mirror::Object* obj; \ |
| 68 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 69 | obj = self->AllocTlab(byte_count); \ |
| 70 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 71 | obj->SetClass(klass); \ |
| 72 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 73 | if (kUseBrooksReadBarrier) { \ |
| 74 | obj->SetReadBarrierPointer(obj); \ |
| 75 | } \ |
| 76 | obj->AssertReadBarrierPointer(); \ |
| 77 | } \ |
| 78 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 79 | return obj; \ |
| 80 | } \ |
| 81 | } \ |
| 82 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 83 | return AllocObjectFromCodeResolved<instrumented_bool>(klass, method, self, allocator_type); \ |
| 84 | } \ |
| 85 | extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 86 | mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 87 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 88 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 89 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
| 90 | size_t byte_count = klass->GetObjectSize(); \ |
| 91 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 92 | mirror::Object* obj; \ |
| 93 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 94 | obj = self->AllocTlab(byte_count); \ |
| 95 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 96 | obj->SetClass(klass); \ |
| 97 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 98 | if (kUseBrooksReadBarrier) { \ |
| 99 | obj->SetReadBarrierPointer(obj); \ |
| 100 | } \ |
| 101 | obj->AssertReadBarrierPointer(); \ |
| 102 | } \ |
| 103 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 104 | return obj; \ |
| 105 | } \ |
| 106 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 107 | return AllocObjectFromCodeInitialized<instrumented_bool>(klass, method, self, allocator_type); \ |
| 108 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 109 | extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 110 | uint32_t type_idx, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 111 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 112 | ScopedQuickEntrypointChecks sqec(self); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 113 | return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 114 | } \ |
| 115 | extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 116 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 117 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 118 | ScopedQuickEntrypointChecks sqec(self); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 119 | return AllocArrayFromCode<false, instrumented_bool>(type_idx, method, component_count, self, \ |
| 120 | allocator_type); \ |
| 121 | } \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 122 | extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 123 | mirror::Class* klass, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 124 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 125 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 126 | return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, method, component_count, self, \ |
| 127 | allocator_type); \ |
| 128 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 129 | extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 130 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 131 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 132 | ScopedQuickEntrypointChecks sqec(self); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 133 | return AllocArrayFromCode<true, instrumented_bool>(type_idx, method, component_count, self, \ |
| 134 | allocator_type); \ |
| 135 | } \ |
| 136 | extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 137 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 138 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 139 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 140 | if (!instrumented_bool) { \ |
| 141 | return CheckAndAllocArrayFromCode(type_idx, method, component_count, self, false, allocator_type); \ |
| 142 | } else { \ |
| 143 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, method, component_count, self, false, allocator_type); \ |
| 144 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 145 | } \ |
| 146 | extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 147 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 148 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame^] | 149 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 150 | if (!instrumented_bool) { \ |
| 151 | return CheckAndAllocArrayFromCode(type_idx, method, component_count, self, true, allocator_type); \ |
| 152 | } else { \ |
| 153 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, method, component_count, self, true, allocator_type); \ |
| 154 | } \ |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 155 | } |
| 156 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 157 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \ |
| 158 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \ |
| 159 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type) |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 160 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 161 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc) |
| 162 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc) |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 163 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer) |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 164 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB) |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 165 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 166 | #define GENERATE_ENTRYPOINTS(suffix) \ |
| 167 | extern "C" void* art_quick_alloc_array##suffix(uint32_t, void*, int32_t); \ |
| 168 | extern "C" void* art_quick_alloc_array_resolved##suffix(void* klass, void*, int32_t); \ |
| 169 | extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, void*, int32_t); \ |
| 170 | extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, void* method); \ |
| 171 | extern "C" void* art_quick_alloc_object_resolved##suffix(void* klass, void* method); \ |
| 172 | extern "C" void* art_quick_alloc_object_initialized##suffix(void* klass, void* method); \ |
| 173 | extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, void* method); \ |
| 174 | extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, void*, int32_t); \ |
| 175 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, void*, int32_t); \ |
| 176 | extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 177 | extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(void* klass, void*, int32_t); \ |
| 178 | extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 179 | extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, void* method); \ |
| 180 | extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(void* klass, void* method); \ |
| 181 | extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(void* klass, void* method); \ |
| 182 | extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, void* method); \ |
| 183 | extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 184 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 185 | void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \ |
| 186 | if (instrumented) { \ |
| 187 | qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \ |
| 188 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \ |
| 189 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \ |
| 190 | qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \ |
| 191 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \ |
| 192 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \ |
| 193 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \ |
| 194 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \ |
| 195 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \ |
| 196 | } else { \ |
| 197 | qpoints->pAllocArray = art_quick_alloc_array##suffix; \ |
| 198 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \ |
| 199 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \ |
| 200 | qpoints->pAllocObject = art_quick_alloc_object##suffix; \ |
| 201 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \ |
| 202 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \ |
| 203 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \ |
| 204 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \ |
| 205 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \ |
| 206 | } \ |
| 207 | } |
| 208 | |
| 209 | // Generate the entrypoint functions. |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 210 | #if !defined(__APPLE__) || !defined(__LP64__) |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 211 | GENERATE_ENTRYPOINTS(_dlmalloc) |
| 212 | GENERATE_ENTRYPOINTS(_rosalloc) |
| 213 | GENERATE_ENTRYPOINTS(_bump_pointer) |
| 214 | GENERATE_ENTRYPOINTS(_tlab) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 215 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 216 | |
| 217 | static bool entry_points_instrumented = false; |
| 218 | static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc; |
| 219 | |
| 220 | void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { |
| 221 | entry_points_allocator = allocator; |
| 222 | } |
| 223 | |
| 224 | void SetQuickAllocEntryPointsInstrumented(bool instrumented) { |
| 225 | entry_points_instrumented = instrumented; |
| 226 | } |
| 227 | |
| 228 | void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) { |
| 229 | switch (entry_points_allocator) { |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 230 | #if !defined(__APPLE__) || !defined(__LP64__) |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 231 | case gc::kAllocatorTypeDlMalloc: { |
| 232 | SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented); |
| 233 | break; |
| 234 | } |
| 235 | case gc::kAllocatorTypeRosAlloc: { |
| 236 | SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented); |
| 237 | break; |
| 238 | } |
| 239 | case gc::kAllocatorTypeBumpPointer: { |
| 240 | CHECK(kMovingCollector); |
| 241 | SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented); |
| 242 | break; |
| 243 | } |
| 244 | case gc::kAllocatorTypeTLAB: { |
| 245 | CHECK(kMovingCollector); |
| 246 | SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented); |
| 247 | break; |
| 248 | } |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 249 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 250 | default: { |
| 251 | LOG(FATAL) << "Unimplemented"; |
| 252 | } |
| 253 | } |
| 254 | } |
| 255 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 256 | } // namespace art |