Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_H_ |
| 18 | #define ART_COMPILER_OPTIMIZING_INTRINSICS_H_ |
| 19 | |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 20 | #include "code_generator.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 21 | #include "nodes.h" |
| 22 | #include "optimization.h" |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 23 | #include "parallel_move_resolver.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 24 | |
| 25 | namespace art { |
| 26 | |
| 27 | class CompilerDriver; |
| 28 | class DexFile; |
| 29 | |
Anton Kirilov | a3ffea2 | 2016-04-07 17:02:37 +0100 | [diff] [blame] | 30 | // Positive floating-point infinities. |
| 31 | static constexpr uint32_t kPositiveInfinityFloat = 0x7f800000U; |
| 32 | static constexpr uint64_t kPositiveInfinityDouble = UINT64_C(0x7ff0000000000000); |
| 33 | |
xueliang.zhong | c032e74 | 2016-03-28 16:44:32 +0100 | [diff] [blame] | 34 | static constexpr uint32_t kNanFloat = 0x7fc00000U; |
| 35 | static constexpr uint64_t kNanDouble = 0x7ff8000000000000; |
| 36 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 37 | // Recognize intrinsics from HInvoke nodes. |
| 38 | class IntrinsicsRecognizer : public HOptimization { |
| 39 | public: |
Aart Bik | 2ca10eb | 2017-11-15 15:17:53 -0800 | [diff] [blame] | 40 | IntrinsicsRecognizer(HGraph* graph, |
| 41 | OptimizingCompilerStats* stats, |
| 42 | const char* name = kIntrinsicsRecognizerPassName) |
| 43 | : HOptimization(graph, name, stats) {} |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 44 | |
| 45 | void Run() OVERRIDE; |
| 46 | |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame^] | 47 | // Static helper that recognizes intrinsic call. Returns true on success. |
| 48 | // If it fails due to invoke type mismatch, wrong_invoke_type is set. |
| 49 | // Useful to recognize intrinsics on invidual calls outside this full pass. |
| 50 | static bool Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type); |
| 51 | |
Andreas Gampe | 7c3952f | 2015-02-19 18:21:24 -0800 | [diff] [blame] | 52 | static constexpr const char* kIntrinsicsRecognizerPassName = "intrinsics_recognition"; |
| 53 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 54 | private: |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 55 | DISALLOW_COPY_AND_ASSIGN(IntrinsicsRecognizer); |
| 56 | }; |
| 57 | |
| 58 | class IntrinsicVisitor : public ValueObject { |
| 59 | public: |
| 60 | virtual ~IntrinsicVisitor() {} |
| 61 | |
| 62 | // Dispatch logic. |
| 63 | |
| 64 | void Dispatch(HInvoke* invoke) { |
| 65 | switch (invoke->GetIntrinsic()) { |
| 66 | case Intrinsics::kNone: |
| 67 | return; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 68 | #define OPTIMIZING_INTRINSICS(Name, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 69 | case Intrinsics::k ## Name: \ |
| 70 | Visit ## Name(invoke); \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 71 | return; |
| 72 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 73 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 74 | #undef INTRINSICS_LIST |
| 75 | #undef OPTIMIZING_INTRINSICS |
| 76 | |
| 77 | // Do not put a default case. That way the compiler will complain if we missed a case. |
| 78 | } |
| 79 | } |
| 80 | |
| 81 | // Define visitor methods. |
| 82 | |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 83 | #define OPTIMIZING_INTRINSICS(Name, ...) \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 84 | virtual void Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 85 | } |
| 86 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 87 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 88 | #undef INTRINSICS_LIST |
| 89 | #undef OPTIMIZING_INTRINSICS |
| 90 | |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 91 | static void MoveArguments(HInvoke* invoke, |
| 92 | CodeGenerator* codegen, |
| 93 | InvokeDexCallingConventionVisitor* calling_convention_visitor) { |
| 94 | if (kIsDebugBuild && invoke->IsInvokeStaticOrDirect()) { |
| 95 | HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect(); |
David Brazdil | 58282f4 | 2016-01-14 12:45:10 +0000 | [diff] [blame] | 96 | // Explicit clinit checks triggered by static invokes must have been |
| 97 | // pruned by art::PrepareForRegisterAllocation. |
| 98 | DCHECK(!invoke_static_or_direct->IsStaticWithExplicitClinitCheck()); |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 99 | } |
| 100 | |
| 101 | if (invoke->GetNumberOfArguments() == 0) { |
| 102 | // No argument to move. |
| 103 | return; |
| 104 | } |
| 105 | |
| 106 | LocationSummary* locations = invoke->GetLocations(); |
| 107 | |
| 108 | // We're moving potentially two or more locations to locations that could overlap, so we need |
| 109 | // a parallel move resolver. |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 110 | HParallelMove parallel_move(codegen->GetGraph()->GetAllocator()); |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 111 | |
| 112 | for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) { |
| 113 | HInstruction* input = invoke->InputAt(i); |
| 114 | Location cc_loc = calling_convention_visitor->GetNextLocation(input->GetType()); |
| 115 | Location actual_loc = locations->InAt(i); |
| 116 | |
| 117 | parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr); |
| 118 | } |
| 119 | |
| 120 | codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); |
| 121 | } |
| 122 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 123 | static void ComputeIntegerValueOfLocations(HInvoke* invoke, |
| 124 | CodeGenerator* codegen, |
| 125 | Location return_location, |
| 126 | Location first_argument_location); |
| 127 | |
| 128 | // Temporary data structure for holding Integer.valueOf useful data. We only |
| 129 | // use it if the mirror::Class* are in the boot image, so it is fine to keep raw |
| 130 | // mirror::Class pointers in this structure. |
| 131 | struct IntegerValueOfInfo { |
| 132 | IntegerValueOfInfo() |
| 133 | : integer_cache(nullptr), |
| 134 | integer(nullptr), |
| 135 | cache(nullptr), |
| 136 | low(0), |
| 137 | high(0), |
| 138 | value_offset(0) {} |
| 139 | |
| 140 | // The java.lang.IntegerCache class. |
| 141 | mirror::Class* integer_cache; |
| 142 | // The java.lang.Integer class. |
| 143 | mirror::Class* integer; |
| 144 | // Value of java.lang.IntegerCache#cache. |
| 145 | mirror::ObjectArray<mirror::Object>* cache; |
| 146 | // Value of java.lang.IntegerCache#low. |
| 147 | int32_t low; |
| 148 | // Value of java.lang.IntegerCache#high. |
| 149 | int32_t high; |
| 150 | // The offset of java.lang.Integer.value. |
| 151 | int32_t value_offset; |
| 152 | }; |
| 153 | |
| 154 | static IntegerValueOfInfo ComputeIntegerValueOfInfo(); |
| 155 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 156 | protected: |
| 157 | IntrinsicVisitor() {} |
| 158 | |
| 159 | private: |
| 160 | DISALLOW_COPY_AND_ASSIGN(IntrinsicVisitor); |
| 161 | }; |
| 162 | |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 163 | #define GENERIC_OPTIMIZATION(name, bit) \ |
Nicolas Geoffray | 12be662 | 2015-10-07 11:52:21 +0100 | [diff] [blame] | 164 | public: \ |
| 165 | void Set##name() { SetBit(k##name); } \ |
| 166 | bool Get##name() const { return IsBitSet(k##name); } \ |
| 167 | private: \ |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 168 | static constexpr size_t k##name = bit |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 169 | |
| 170 | class IntrinsicOptimizations : public ValueObject { |
| 171 | public: |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 172 | explicit IntrinsicOptimizations(HInvoke* invoke) |
| 173 | : value_(invoke->GetIntrinsicOptimizations()) {} |
Nicolas Geoffray | 12be662 | 2015-10-07 11:52:21 +0100 | [diff] [blame] | 174 | explicit IntrinsicOptimizations(const HInvoke& invoke) |
| 175 | : value_(invoke.GetIntrinsicOptimizations()) {} |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 176 | |
| 177 | static constexpr int kNumberOfGenericOptimizations = 2; |
| 178 | GENERIC_OPTIMIZATION(DoesNotNeedDexCache, 0); |
| 179 | GENERIC_OPTIMIZATION(DoesNotNeedEnvironment, 1); |
| 180 | |
| 181 | protected: |
| 182 | bool IsBitSet(uint32_t bit) const { |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 183 | DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte); |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 184 | return (*value_ & (1 << bit)) != 0u; |
| 185 | } |
| 186 | |
| 187 | void SetBit(uint32_t bit) { |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 188 | DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte); |
| 189 | *(const_cast<uint32_t* const>(value_)) |= (1 << bit); |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 190 | } |
| 191 | |
| 192 | private: |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 193 | const uint32_t* const value_; |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 194 | |
| 195 | DISALLOW_COPY_AND_ASSIGN(IntrinsicOptimizations); |
| 196 | }; |
| 197 | |
| 198 | #undef GENERIC_OPTIMIZATION |
| 199 | |
| 200 | #define INTRINSIC_OPTIMIZATION(name, bit) \ |
Nicolas Geoffray | 12be662 | 2015-10-07 11:52:21 +0100 | [diff] [blame] | 201 | public: \ |
| 202 | void Set##name() { SetBit(k##name); } \ |
| 203 | bool Get##name() const { return IsBitSet(k##name); } \ |
| 204 | private: \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 205 | static constexpr size_t k##name = (bit) + kNumberOfGenericOptimizations |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 206 | |
| 207 | class StringEqualsOptimizations : public IntrinsicOptimizations { |
| 208 | public: |
Nicolas Geoffray | 12be662 | 2015-10-07 11:52:21 +0100 | [diff] [blame] | 209 | explicit StringEqualsOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {} |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 210 | |
| 211 | INTRINSIC_OPTIMIZATION(ArgumentNotNull, 0); |
| 212 | INTRINSIC_OPTIMIZATION(ArgumentIsString, 1); |
Vladimir Marko | da28305 | 2017-11-07 21:17:24 +0000 | [diff] [blame] | 213 | INTRINSIC_OPTIMIZATION(NoReadBarrierForStringClass, 2); |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 214 | |
| 215 | private: |
| 216 | DISALLOW_COPY_AND_ASSIGN(StringEqualsOptimizations); |
| 217 | }; |
| 218 | |
Nicolas Geoffray | ee3cf07 | 2015-10-06 11:45:02 +0100 | [diff] [blame] | 219 | class SystemArrayCopyOptimizations : public IntrinsicOptimizations { |
| 220 | public: |
| 221 | explicit SystemArrayCopyOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {} |
| 222 | |
| 223 | INTRINSIC_OPTIMIZATION(SourceIsNotNull, 0); |
| 224 | INTRINSIC_OPTIMIZATION(DestinationIsNotNull, 1); |
| 225 | INTRINSIC_OPTIMIZATION(DestinationIsSource, 2); |
| 226 | INTRINSIC_OPTIMIZATION(CountIsSourceLength, 3); |
| 227 | INTRINSIC_OPTIMIZATION(CountIsDestinationLength, 4); |
| 228 | INTRINSIC_OPTIMIZATION(DoesNotNeedTypeCheck, 5); |
| 229 | INTRINSIC_OPTIMIZATION(DestinationIsTypedObjectArray, 6); |
| 230 | INTRINSIC_OPTIMIZATION(DestinationIsNonPrimitiveArray, 7); |
| 231 | INTRINSIC_OPTIMIZATION(DestinationIsPrimitiveArray, 8); |
| 232 | INTRINSIC_OPTIMIZATION(SourceIsNonPrimitiveArray, 9); |
| 233 | INTRINSIC_OPTIMIZATION(SourceIsPrimitiveArray, 10); |
| 234 | |
| 235 | private: |
| 236 | DISALLOW_COPY_AND_ASSIGN(SystemArrayCopyOptimizations); |
| 237 | }; |
| 238 | |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 239 | #undef INTRISIC_OPTIMIZATION |
| 240 | |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 241 | // |
| 242 | // Macros for use in the intrinsics code generators. |
| 243 | // |
| 244 | |
| 245 | // Defines an unimplemented intrinsic: that is, a method call that is recognized as an |
| 246 | // intrinsic to exploit e.g. no side-effects or exceptions, but otherwise not handled |
| 247 | // by this architecture-specific intrinsics code generator. Eventually it is implemented |
| 248 | // as a true method call. |
| 249 | #define UNIMPLEMENTED_INTRINSIC(Arch, Name) \ |
| 250 | void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 251 | } \ |
| 252 | void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 253 | } |
| 254 | |
| 255 | // Defines a list of unreached intrinsics: that is, method calls that are recognized as |
| 256 | // an intrinsic, and then always converted into HIR instructions before they reach any |
| 257 | // architecture-specific intrinsics code generator. |
| 258 | #define UNREACHABLE_INTRINSIC(Arch, Name) \ |
| 259 | void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke) { \ |
| 260 | LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic() \ |
| 261 | << " should have been converted to HIR"; \ |
| 262 | } \ |
| 263 | void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke) { \ |
| 264 | LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic() \ |
| 265 | << " should have been converted to HIR"; \ |
| 266 | } |
Orion Hodson | 43f0cdb | 2017-10-10 14:47:32 +0100 | [diff] [blame] | 267 | #define UNREACHABLE_INTRINSICS(Arch) \ |
| 268 | UNREACHABLE_INTRINSIC(Arch, FloatFloatToIntBits) \ |
| 269 | UNREACHABLE_INTRINSIC(Arch, DoubleDoubleToLongBits) \ |
| 270 | UNREACHABLE_INTRINSIC(Arch, FloatIsNaN) \ |
| 271 | UNREACHABLE_INTRINSIC(Arch, DoubleIsNaN) \ |
| 272 | UNREACHABLE_INTRINSIC(Arch, IntegerRotateLeft) \ |
| 273 | UNREACHABLE_INTRINSIC(Arch, LongRotateLeft) \ |
| 274 | UNREACHABLE_INTRINSIC(Arch, IntegerRotateRight) \ |
| 275 | UNREACHABLE_INTRINSIC(Arch, LongRotateRight) \ |
| 276 | UNREACHABLE_INTRINSIC(Arch, IntegerCompare) \ |
| 277 | UNREACHABLE_INTRINSIC(Arch, LongCompare) \ |
| 278 | UNREACHABLE_INTRINSIC(Arch, IntegerSignum) \ |
| 279 | UNREACHABLE_INTRINSIC(Arch, LongSignum) \ |
| 280 | UNREACHABLE_INTRINSIC(Arch, StringCharAt) \ |
| 281 | UNREACHABLE_INTRINSIC(Arch, StringIsEmpty) \ |
| 282 | UNREACHABLE_INTRINSIC(Arch, StringLength) \ |
| 283 | UNREACHABLE_INTRINSIC(Arch, UnsafeLoadFence) \ |
| 284 | UNREACHABLE_INTRINSIC(Arch, UnsafeStoreFence) \ |
| 285 | UNREACHABLE_INTRINSIC(Arch, UnsafeFullFence) \ |
| 286 | UNREACHABLE_INTRINSIC(Arch, VarHandleFullFence) \ |
| 287 | UNREACHABLE_INTRINSIC(Arch, VarHandleAcquireFence) \ |
| 288 | UNREACHABLE_INTRINSIC(Arch, VarHandleReleaseFence) \ |
| 289 | UNREACHABLE_INTRINSIC(Arch, VarHandleLoadLoadFence) \ |
| 290 | UNREACHABLE_INTRINSIC(Arch, VarHandleStoreStoreFence) \ |
| 291 | UNREACHABLE_INTRINSIC(Arch, MethodHandleInvokeExact) \ |
| 292 | UNREACHABLE_INTRINSIC(Arch, MethodHandleInvoke) \ |
| 293 | UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchange) \ |
| 294 | UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchangeAcquire) \ |
| 295 | UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchangeRelease) \ |
| 296 | UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndSet) \ |
| 297 | UNREACHABLE_INTRINSIC(Arch, VarHandleGet) \ |
| 298 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAcquire) \ |
| 299 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAdd) \ |
| 300 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAddAcquire) \ |
| 301 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAddRelease) \ |
| 302 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAnd) \ |
| 303 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAndAcquire) \ |
| 304 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAndRelease) \ |
| 305 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOr) \ |
| 306 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOrAcquire) \ |
| 307 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOrRelease) \ |
| 308 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXor) \ |
| 309 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXorAcquire) \ |
| 310 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXorRelease) \ |
| 311 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSet) \ |
| 312 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSetAcquire) \ |
| 313 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSetRelease) \ |
| 314 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetOpaque) \ |
| 315 | UNREACHABLE_INTRINSIC(Arch, VarHandleGetVolatile) \ |
| 316 | UNREACHABLE_INTRINSIC(Arch, VarHandleSet) \ |
| 317 | UNREACHABLE_INTRINSIC(Arch, VarHandleSetOpaque) \ |
| 318 | UNREACHABLE_INTRINSIC(Arch, VarHandleSetRelease) \ |
| 319 | UNREACHABLE_INTRINSIC(Arch, VarHandleSetVolatile) \ |
| 320 | UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSet) \ |
| 321 | UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetAcquire) \ |
| 322 | UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetPlain) \ |
| 323 | UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetRelease) |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 324 | |
Vladimir Marko | 68c981f | 2016-08-26 13:13:33 +0100 | [diff] [blame] | 325 | template <typename IntrinsicLocationsBuilder, typename Codegenerator> |
| 326 | bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) { |
| 327 | if (invoke->GetIntrinsic() != Intrinsics::kNone) { |
| 328 | // This invoke may have intrinsic code generation defined. However, we must |
| 329 | // now also determine if this code generation is truly there and call-free |
| 330 | // (not unimplemented, no bail on instruction features, or call on slow path). |
| 331 | // This is done by actually calling the locations builder on the instruction |
| 332 | // and clearing out the locations once result is known. We assume this |
| 333 | // call only has creating locations as side effects! |
| 334 | // TODO: Avoid wasting Arena memory. |
| 335 | IntrinsicLocationsBuilder builder(codegen); |
| 336 | bool success = builder.TryDispatch(invoke) && !invoke->GetLocations()->CanCall(); |
| 337 | invoke->SetLocations(nullptr); |
| 338 | return success; |
| 339 | } |
| 340 | return false; |
| 341 | } |
| 342 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 343 | } // namespace art |
| 344 | |
| 345 | #endif // ART_COMPILER_OPTIMIZING_INTRINSICS_H_ |