Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics.h" |
| 18 | |
Andreas Gampe | a1d2f95 | 2017-04-20 22:53:58 -0700 | [diff] [blame] | 19 | #include "art_field-inl.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 20 | #include "art_method-inl.h" |
David Sehr | c431b9d | 2018-03-02 12:01:51 -0800 | [diff] [blame] | 21 | #include "base/utils.h" |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 22 | #include "class_linker.h" |
Vladimir Marko | 5868ada | 2020-05-12 11:50:34 +0100 | [diff] [blame] | 23 | #include "class_root-inl.h" |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 24 | #include "code_generator.h" |
David Sehr | 8c0961f | 2018-01-23 16:11:38 -0800 | [diff] [blame] | 25 | #include "dex/invoke_type.h" |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 26 | #include "driver/compiler_options.h" |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 27 | #include "gc/space/image_space.h" |
| 28 | #include "image-inl.h" |
| 29 | #include "intrinsic_objects.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 30 | #include "nodes.h" |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 31 | #include "obj_ptr-inl.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 32 | #include "scoped_thread_state_change-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 33 | #include "thread-current-inl.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 34 | |
VladimĂr Marko | 434d968 | 2022-11-04 14:04:17 +0000 | [diff] [blame] | 35 | namespace art HIDDEN { |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 36 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 37 | std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) { |
| 38 | switch (intrinsic) { |
| 39 | case Intrinsics::kNone: |
David Brazdil | 109c89a | 2015-07-31 17:10:43 +0100 | [diff] [blame] | 40 | os << "None"; |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 41 | break; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 42 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 43 | case Intrinsics::k ## Name: \ |
| 44 | os << # Name; \ |
| 45 | break; |
| 46 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 47 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 48 | #undef STATIC_INTRINSICS_LIST |
| 49 | #undef VIRTUAL_INTRINSICS_LIST |
| 50 | #undef OPTIMIZING_INTRINSICS |
| 51 | } |
| 52 | return os; |
| 53 | } |
| 54 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 55 | static const char kIntegerCacheDescriptor[] = "Ljava/lang/Integer$IntegerCache;"; |
| 56 | static const char kIntegerDescriptor[] = "Ljava/lang/Integer;"; |
| 57 | static const char kIntegerArrayDescriptor[] = "[Ljava/lang/Integer;"; |
| 58 | static const char kLowFieldName[] = "low"; |
| 59 | static const char kHighFieldName[] = "high"; |
| 60 | static const char kValueFieldName[] = "value"; |
| 61 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 62 | static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects() |
| 63 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 64 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 65 | const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces(); |
| 66 | DCHECK(!boot_image_spaces.empty()); |
| 67 | const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader(); |
| 68 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = |
| 69 | ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast( |
| 70 | main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects)); |
| 71 | DCHECK(boot_image_live_objects != nullptr); |
| 72 | DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects)); |
| 73 | return boot_image_live_objects; |
| 74 | } |
| 75 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 76 | static ObjPtr<mirror::Class> LookupInitializedClass(Thread* self, |
| 77 | ClassLinker* class_linker, |
| 78 | const char* descriptor) |
| 79 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 80 | ObjPtr<mirror::Class> klass = |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 81 | class_linker->LookupClass(self, descriptor, /* class_loader= */ nullptr); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 82 | DCHECK(klass != nullptr); |
| 83 | DCHECK(klass->IsInitialized()); |
| 84 | return klass; |
| 85 | } |
| 86 | |
| 87 | static ObjPtr<mirror::ObjectArray<mirror::Object>> GetIntegerCacheArray( |
| 88 | ObjPtr<mirror::Class> cache_class) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 89 | ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", kIntegerArrayDescriptor); |
| 90 | DCHECK(cache_field != nullptr); |
| 91 | return ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class)); |
| 92 | } |
| 93 | |
| 94 | static int32_t GetIntegerCacheField(ObjPtr<mirror::Class> cache_class, const char* field_name) |
| 95 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 96 | ArtField* field = cache_class->FindDeclaredStaticField(field_name, "I"); |
| 97 | DCHECK(field != nullptr); |
| 98 | return field->GetInt(cache_class); |
| 99 | } |
| 100 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 101 | static bool CheckIntegerCache(Thread* self, |
| 102 | ClassLinker* class_linker, |
| 103 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects, |
| 104 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_cache) |
| 105 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 106 | DCHECK(boot_image_cache != nullptr); |
| 107 | |
| 108 | // Since we have a cache in the boot image, both java.lang.Integer and |
| 109 | // java.lang.Integer$IntegerCache must be initialized in the boot image. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 110 | ObjPtr<mirror::Class> cache_class = |
| 111 | LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 112 | ObjPtr<mirror::Class> integer_class = |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 113 | LookupInitializedClass(self, class_linker, kIntegerDescriptor); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 114 | |
| 115 | // Check that the current cache is the same as the `boot_image_cache`. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 116 | ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 117 | if (current_cache != boot_image_cache) { |
| 118 | return false; // Messed up IntegerCache.cache. |
| 119 | } |
| 120 | |
| 121 | // Check that the range matches the boot image cache length. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 122 | int32_t low = GetIntegerCacheField(cache_class, kLowFieldName); |
| 123 | int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 124 | if (boot_image_cache->GetLength() != high - low + 1) { |
| 125 | return false; // Messed up IntegerCache.low or IntegerCache.high. |
| 126 | } |
| 127 | |
| 128 | // Check that the elements match the boot image intrinsic objects and check their values as well. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 129 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 130 | DCHECK(value_field != nullptr); |
| 131 | for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) { |
| 132 | ObjPtr<mirror::Object> boot_image_object = |
| 133 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, i); |
| 134 | DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boot_image_object)); |
| 135 | // No need for read barrier for comparison with a boot image object. |
| 136 | ObjPtr<mirror::Object> current_object = |
| 137 | boot_image_cache->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(i); |
| 138 | if (boot_image_object != current_object) { |
| 139 | return false; // Messed up IntegerCache.cache[i] |
| 140 | } |
| 141 | if (value_field->GetInt(boot_image_object) != low + i) { |
| 142 | return false; // Messed up IntegerCache.cache[i].value. |
| 143 | } |
| 144 | } |
| 145 | |
| 146 | return true; |
| 147 | } |
| 148 | |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 149 | static bool CanReferenceBootImageObjects(HInvoke* invoke, const CompilerOptions& compiler_options) { |
| 150 | // Piggyback on the method load kind to determine whether we can use PC-relative addressing |
| 151 | // for AOT. This should cover both the testing config (non-PIC boot image) and codegens that |
| 152 | // reject PC-relative load kinds and fall back to the runtime call. |
| 153 | if (compiler_options.IsAotCompiler() && |
Vladimir Marko | cde6497 | 2023-04-25 16:40:06 +0000 | [diff] [blame] | 154 | !invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) { |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 155 | return false; |
| 156 | } |
| 157 | if (!compiler_options.IsBootImage() && |
| 158 | Runtime::Current()->GetHeap()->GetBootImageSpaces().empty()) { |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 159 | return false; // Running without boot image, cannot use required boot image objects. |
| 160 | } |
| 161 | return true; |
| 162 | } |
| 163 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 164 | void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke, |
| 165 | CodeGenerator* codegen, |
| 166 | Location return_location, |
| 167 | Location first_argument_location) { |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 168 | // The intrinsic will call if it needs to allocate a j.l.Integer. |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 169 | LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly; |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 170 | const CompilerOptions& compiler_options = codegen->GetCompilerOptions(); |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 171 | if (!CanReferenceBootImageObjects(invoke, compiler_options)) { |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 172 | return; |
| 173 | } |
Vladimir Marko | f76ca8c | 2023-04-05 15:24:41 +0000 | [diff] [blame] | 174 | HInstruction* const input = invoke->InputAt(0); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 175 | if (compiler_options.IsBootImage()) { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 176 | if (!compiler_options.IsImageClass(kIntegerCacheDescriptor) || |
| 177 | !compiler_options.IsImageClass(kIntegerDescriptor)) { |
| 178 | return; |
| 179 | } |
| 180 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 181 | Thread* self = Thread::Current(); |
| 182 | ScopedObjectAccess soa(self); |
| 183 | ObjPtr<mirror::Class> cache_class = class_linker->LookupClass( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 184 | self, kIntegerCacheDescriptor, /* class_loader= */ nullptr); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 185 | DCHECK(cache_class != nullptr); |
| 186 | if (UNLIKELY(!cache_class->IsInitialized())) { |
| 187 | LOG(WARNING) << "Image class " << cache_class->PrettyDescriptor() << " is uninitialized."; |
| 188 | return; |
| 189 | } |
| 190 | ObjPtr<mirror::Class> integer_class = |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 191 | class_linker->LookupClass(self, kIntegerDescriptor, /* class_loader= */ nullptr); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 192 | DCHECK(integer_class != nullptr); |
| 193 | if (UNLIKELY(!integer_class->IsInitialized())) { |
| 194 | LOG(WARNING) << "Image class " << integer_class->PrettyDescriptor() << " is uninitialized."; |
| 195 | return; |
| 196 | } |
| 197 | int32_t low = GetIntegerCacheField(cache_class, kLowFieldName); |
| 198 | int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); |
| 199 | if (kIsDebugBuild) { |
| 200 | ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class); |
| 201 | CHECK(current_cache != nullptr); |
| 202 | CHECK_EQ(current_cache->GetLength(), high - low + 1); |
| 203 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
| 204 | CHECK(value_field != nullptr); |
| 205 | for (int32_t i = 0, len = current_cache->GetLength(); i != len; ++i) { |
| 206 | ObjPtr<mirror::Object> current_object = current_cache->GetWithoutChecks(i); |
| 207 | CHECK(current_object != nullptr); |
| 208 | CHECK_EQ(value_field->GetInt(current_object), low + i); |
| 209 | } |
| 210 | } |
Vladimir Marko | f76ca8c | 2023-04-05 15:24:41 +0000 | [diff] [blame] | 211 | if (input->IsIntConstant()) { |
Vladimir Marko | cde6497 | 2023-04-25 16:40:06 +0000 | [diff] [blame] | 212 | int32_t value = input->AsIntConstant()->GetValue(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 213 | if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < |
| 214 | static_cast<uint32_t>(high - low + 1)) { |
| 215 | // No call, we shall use direct pointer to the Integer object. |
| 216 | call_kind = LocationSummary::kNoCall; |
| 217 | } |
| 218 | } |
| 219 | } else { |
| 220 | Runtime* runtime = Runtime::Current(); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 221 | Thread* self = Thread::Current(); |
| 222 | ScopedObjectAccess soa(self); |
| 223 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); |
| 224 | ObjPtr<mirror::ObjectArray<mirror::Object>> cache = |
| 225 | IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects); |
| 226 | if (cache == nullptr) { |
| 227 | return; // No cache in the boot image. |
| 228 | } |
Vladimir Marko | 695348f | 2020-05-19 14:42:02 +0100 | [diff] [blame] | 229 | if (compiler_options.IsJitCompiler()) { |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 230 | if (!CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)) { |
| 231 | return; // The cache was somehow messed up, probably by using reflection. |
| 232 | } |
| 233 | } else { |
Vladimir Marko | 695348f | 2020-05-19 14:42:02 +0100 | [diff] [blame] | 234 | DCHECK(compiler_options.IsAotCompiler()); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 235 | DCHECK(CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)); |
Vladimir Marko | f76ca8c | 2023-04-05 15:24:41 +0000 | [diff] [blame] | 236 | if (input->IsIntConstant()) { |
Vladimir Marko | cde6497 | 2023-04-25 16:40:06 +0000 | [diff] [blame] | 237 | int32_t value = input->AsIntConstant()->GetValue(); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 238 | // Retrieve the `value` from the lowest cached Integer. |
| 239 | ObjPtr<mirror::Object> low_integer = |
| 240 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); |
| 241 | ObjPtr<mirror::Class> integer_class = |
| 242 | low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 243 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 244 | DCHECK(value_field != nullptr); |
| 245 | int32_t low = value_field->GetInt(low_integer); |
| 246 | if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < |
| 247 | static_cast<uint32_t>(cache->GetLength())) { |
| 248 | // No call, we shall use direct pointer to the Integer object. Note that we cannot |
| 249 | // do this for JIT as the "low" can change through reflection before emitting the code. |
| 250 | call_kind = LocationSummary::kNoCall; |
| 251 | } |
| 252 | } |
| 253 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 254 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 255 | |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 256 | ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 257 | LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified); |
| 258 | if (call_kind == LocationSummary::kCallOnMainOnly) { |
Vladimir Marko | f76ca8c | 2023-04-05 15:24:41 +0000 | [diff] [blame] | 259 | locations->SetInAt(0, Location::RegisterOrConstant(input)); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 260 | locations->AddTemp(first_argument_location); |
| 261 | locations->SetOut(return_location); |
| 262 | } else { |
Vladimir Marko | f76ca8c | 2023-04-05 15:24:41 +0000 | [diff] [blame] | 263 | locations->SetInAt(0, Location::ConstantLocation(input)); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 264 | locations->SetOut(Location::RequiresRegister()); |
| 265 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 266 | } |
| 267 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 268 | static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self, ClassLinker* class_linker) |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 269 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 270 | ObjPtr<mirror::Class> cache_class = |
| 271 | LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); |
| 272 | return GetIntegerCacheField(cache_class, kLowFieldName); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 273 | } |
| 274 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 275 | inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo() |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 276 | : value_offset(0), |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 277 | low(0), |
| 278 | length(0u), |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 279 | value_boot_image_reference(kInvalidReference) {} |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 280 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 281 | IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo( |
| 282 | HInvoke* invoke, const CompilerOptions& compiler_options) { |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 283 | // Note that we could cache all of the data looked up here. but there's no good |
| 284 | // location for it. We don't want to add it to WellKnownClasses, to avoid creating global |
| 285 | // jni values. Adding it as state to the compiler singleton seems like wrong |
| 286 | // separation of concerns. |
| 287 | // The need for this data should be pretty rare though. |
| 288 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 289 | // Note that at this point we can no longer abort the code generation. Therefore, |
| 290 | // we need to provide data that shall not lead to a crash even if the fields were |
| 291 | // modified through reflection since ComputeIntegerValueOfLocations() when JITting. |
| 292 | |
Vladimir Marko | 695348f | 2020-05-19 14:42:02 +0100 | [diff] [blame] | 293 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 294 | Thread* self = Thread::Current(); |
| 295 | ScopedObjectAccess soa(self); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 296 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 297 | IntegerValueOfInfo info; |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 298 | if (compiler_options.IsBootImage()) { |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 299 | ObjPtr<mirror::Class> integer_class = invoke->GetResolvedMethod()->GetDeclaringClass(); |
| 300 | DCHECK(integer_class->DescriptorEquals(kIntegerDescriptor)); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 301 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
| 302 | DCHECK(value_field != nullptr); |
| 303 | info.value_offset = value_field->GetOffset().Uint32Value(); |
| 304 | ObjPtr<mirror::Class> cache_class = |
| 305 | LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); |
| 306 | info.low = GetIntegerCacheField(cache_class, kLowFieldName); |
| 307 | int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); |
| 308 | info.length = dchecked_integral_cast<uint32_t>(high - info.low + 1); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 309 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 310 | if (invoke->InputAt(0)->IsIntConstant()) { |
Vladimir Marko | cde6497 | 2023-04-25 16:40:06 +0000 | [diff] [blame] | 311 | int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 312 | uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); |
| 313 | if (index < static_cast<uint32_t>(info.length)) { |
| 314 | info.value_boot_image_reference = IntrinsicObjects::EncodePatch( |
| 315 | IntrinsicObjects::PatchType::kIntegerValueOfObject, index); |
| 316 | } else { |
| 317 | // Not in the cache. |
| 318 | info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference; |
| 319 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 320 | } else { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 321 | info.array_data_boot_image_reference = |
| 322 | IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kIntegerValueOfArray); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 323 | } |
| 324 | } else { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 325 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); |
| 326 | ObjPtr<mirror::Object> low_integer = |
| 327 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); |
| 328 | ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); |
| 329 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
| 330 | DCHECK(value_field != nullptr); |
| 331 | info.value_offset = value_field->GetOffset().Uint32Value(); |
Vladimir Marko | 695348f | 2020-05-19 14:42:02 +0100 | [diff] [blame] | 332 | if (compiler_options.IsJitCompiler()) { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 333 | // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the |
| 334 | // code messes up the `value` field in the lowest cached Integer using reflection. |
| 335 | info.low = GetIntegerCacheLowFromIntegerCache(self, class_linker); |
| 336 | } else { |
| 337 | // For app AOT, the `low_integer->value` should be the same as `IntegerCache.low`. |
| 338 | info.low = value_field->GetInt(low_integer); |
| 339 | DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self, class_linker)); |
| 340 | } |
| 341 | // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead. |
| 342 | info.length = dchecked_integral_cast<uint32_t>( |
| 343 | IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength()); |
| 344 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 345 | if (invoke->InputAt(0)->IsIntConstant()) { |
Vladimir Marko | cde6497 | 2023-04-25 16:40:06 +0000 | [diff] [blame] | 346 | int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 347 | uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); |
| 348 | if (index < static_cast<uint32_t>(info.length)) { |
| 349 | ObjPtr<mirror::Object> integer = |
| 350 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index); |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 351 | info.value_boot_image_reference = CodeGenerator::GetBootImageOffset(integer); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 352 | } else { |
| 353 | // Not in the cache. |
| 354 | info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference; |
| 355 | } |
| 356 | } else { |
| 357 | info.array_data_boot_image_reference = |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 358 | CodeGenerator::GetBootImageOffset(boot_image_live_objects) + |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 359 | IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value(); |
| 360 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 361 | } |
| 362 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 363 | return info; |
| 364 | } |
| 365 | |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 366 | MemberOffset IntrinsicVisitor::GetReferenceDisableIntrinsicOffset() { |
| 367 | ScopedObjectAccess soa(Thread::Current()); |
| 368 | // The "disableIntrinsic" is the first static field. |
| 369 | ArtField* field = GetClassRoot<mirror::Reference>()->GetStaticField(0); |
| 370 | DCHECK_STREQ(field->GetName(), "disableIntrinsic"); |
| 371 | return field->GetOffset(); |
| 372 | } |
| 373 | |
| 374 | MemberOffset IntrinsicVisitor::GetReferenceSlowPathEnabledOffset() { |
| 375 | ScopedObjectAccess soa(Thread::Current()); |
| 376 | // The "slowPathEnabled" is the second static field. |
| 377 | ArtField* field = GetClassRoot<mirror::Reference>()->GetStaticField(1); |
| 378 | DCHECK_STREQ(field->GetName(), "slowPathEnabled"); |
| 379 | return field->GetOffset(); |
| 380 | } |
| 381 | |
| 382 | void IntrinsicVisitor::CreateReferenceGetReferentLocations(HInvoke* invoke, |
| 383 | CodeGenerator* codegen) { |
| 384 | if (!CanReferenceBootImageObjects(invoke, codegen->GetCompilerOptions())) { |
| 385 | return; |
| 386 | } |
| 387 | |
| 388 | ArenaAllocator* allocator = codegen->GetGraph()->GetAllocator(); |
| 389 | LocationSummary* locations = |
| 390 | new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
| 391 | locations->SetInAt(0, Location::RequiresRegister()); |
| 392 | locations->SetOut(Location::RequiresRegister()); |
| 393 | } |
| 394 | |
Vladimir Marko | ac27ac0 | 2021-02-01 09:31:02 +0000 | [diff] [blame] | 395 | void IntrinsicVisitor::CreateReferenceRefersToLocations(HInvoke* invoke) { |
Lokesh Gidra | ca5ed9f | 2022-04-20 01:39:28 +0000 | [diff] [blame] | 396 | if (gUseReadBarrier && !kUseBakerReadBarrier) { |
Vladimir Marko | ac27ac0 | 2021-02-01 09:31:02 +0000 | [diff] [blame] | 397 | // Unimplemented for non-Baker read barrier. |
| 398 | return; |
| 399 | } |
| 400 | |
| 401 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 402 | LocationSummary* locations = |
| 403 | new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
| 404 | locations->SetInAt(0, Location::RequiresRegister()); |
| 405 | locations->SetInAt(1, Location::RequiresRegister()); |
| 406 | locations->SetOut(Location::RequiresRegister()); |
| 407 | } |
| 408 | |
Roland Levillain | 1d775d2 | 2018-09-07 13:56:57 +0100 | [diff] [blame] | 409 | void IntrinsicVisitor::AssertNonMovableStringClass() { |
| 410 | if (kIsDebugBuild) { |
Vladimir Marko | 89cbeb6 | 2019-04-09 10:51:05 +0100 | [diff] [blame] | 411 | ScopedObjectAccess soa(Thread::Current()); |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 412 | ObjPtr<mirror::Class> string_class = GetClassRoot<mirror::String>(); |
Roland Levillain | 1d775d2 | 2018-09-07 13:56:57 +0100 | [diff] [blame] | 413 | CHECK(!art::Runtime::Current()->GetHeap()->IsMovableObject(string_class)); |
| 414 | } |
| 415 | } |
| 416 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 417 | } // namespace art |