Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics_x86.h" |
| 18 | |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 19 | #include <limits> |
| 20 | |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 21 | #include "arch/x86/instruction_set_features_x86.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 22 | #include "art_method.h" |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 23 | #include "base/bit_utils.h" |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 24 | #include "code_generator_x86.h" |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 25 | #include "data_type-inl.h" |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 26 | #include "entrypoints/quick/quick_entrypoints.h" |
Andreas Gampe | 09659c2 | 2017-09-18 18:23:32 -0700 | [diff] [blame] | 27 | #include "heap_poisoning.h" |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 28 | #include "intrinsics.h" |
Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 29 | #include "intrinsics_utils.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 30 | #include "lock_word.h" |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 31 | #include "mirror/array-inl.h" |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 32 | #include "mirror/object_array-inl.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 33 | #include "mirror/reference.h" |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 34 | #include "mirror/string.h" |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 35 | #include "mirror/var_handle.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 36 | #include "scoped_thread_state_change-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 37 | #include "thread-current-inl.h" |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 38 | #include "utils/x86/assembler_x86.h" |
| 39 | #include "utils/x86/constants_x86.h" |
| 40 | |
Vladimir Marko | 0a51605 | 2019-10-14 13:00:44 +0000 | [diff] [blame] | 41 | namespace art { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 42 | |
| 43 | namespace x86 { |
| 44 | |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 45 | IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen) |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 46 | : allocator_(codegen->GetGraph()->GetAllocator()), |
Mark P Mendell | 2f10a5f | 2016-01-25 14:47:50 +0000 | [diff] [blame] | 47 | codegen_(codegen) { |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 48 | } |
| 49 | |
| 50 | |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 51 | X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { |
Roland Levillain | b488b78 | 2015-10-22 11:38:49 +0100 | [diff] [blame] | 52 | return down_cast<X86Assembler*>(codegen_->GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 53 | } |
| 54 | |
| 55 | ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 56 | return codegen_->GetGraph()->GetAllocator(); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 57 | } |
| 58 | |
| 59 | bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) { |
| 60 | Dispatch(invoke); |
| 61 | LocationSummary* res = invoke->GetLocations(); |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 62 | if (res == nullptr) { |
| 63 | return false; |
| 64 | } |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 65 | return res->Intrinsified(); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 66 | } |
| 67 | |
Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 68 | using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>; |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 69 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 70 | // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. |
| 71 | #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT |
| 72 | |
| 73 | // Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers. |
| 74 | class ReadBarrierSystemArrayCopySlowPathX86 : public SlowPathCode { |
| 75 | public: |
| 76 | explicit ReadBarrierSystemArrayCopySlowPathX86(HInstruction* instruction) |
| 77 | : SlowPathCode(instruction) { |
| 78 | DCHECK(kEmitCompilerReadBarrier); |
| 79 | DCHECK(kUseBakerReadBarrier); |
| 80 | } |
| 81 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 82 | void EmitNativeCode(CodeGenerator* codegen) override { |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 83 | CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen); |
| 84 | LocationSummary* locations = instruction_->GetLocations(); |
| 85 | DCHECK(locations->CanCall()); |
| 86 | DCHECK(instruction_->IsInvokeStaticOrDirect()) |
| 87 | << "Unexpected instruction in read barrier arraycopy slow path: " |
| 88 | << instruction_->DebugName(); |
| 89 | DCHECK(instruction_->GetLocations()->Intrinsified()); |
| 90 | DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy); |
| 91 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 92 | int32_t element_size = DataType::Size(DataType::Type::kReference); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 93 | uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value(); |
| 94 | |
| 95 | Register src = locations->InAt(0).AsRegister<Register>(); |
| 96 | Location src_pos = locations->InAt(1); |
| 97 | Register dest = locations->InAt(2).AsRegister<Register>(); |
| 98 | Location dest_pos = locations->InAt(3); |
| 99 | Location length = locations->InAt(4); |
| 100 | Location temp1_loc = locations->GetTemp(0); |
| 101 | Register temp1 = temp1_loc.AsRegister<Register>(); |
| 102 | Register temp2 = locations->GetTemp(1).AsRegister<Register>(); |
| 103 | Register temp3 = locations->GetTemp(2).AsRegister<Register>(); |
| 104 | |
| 105 | __ Bind(GetEntryLabel()); |
| 106 | // In this code path, registers `temp1`, `temp2`, and `temp3` |
| 107 | // (resp.) are not used for the base source address, the base |
| 108 | // destination address, and the end source address (resp.), as in |
| 109 | // other SystemArrayCopy intrinsic code paths. Instead they are |
| 110 | // (resp.) used for: |
| 111 | // - the loop index (`i`); |
| 112 | // - the source index (`src_index`) and the loaded (source) |
| 113 | // reference (`value`); and |
| 114 | // - the destination index (`dest_index`). |
| 115 | |
| 116 | // i = 0 |
| 117 | __ xorl(temp1, temp1); |
| 118 | NearLabel loop; |
| 119 | __ Bind(&loop); |
| 120 | // value = src_array[i + src_pos] |
| 121 | if (src_pos.IsConstant()) { |
| 122 | int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 123 | int32_t adjusted_offset = offset + constant * element_size; |
| 124 | __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset)); |
| 125 | } else { |
| 126 | __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); |
| 127 | __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset)); |
| 128 | } |
| 129 | __ MaybeUnpoisonHeapReference(temp2); |
| 130 | // TODO: Inline the mark bit check before calling the runtime? |
| 131 | // value = ReadBarrier::Mark(value) |
| 132 | // No need to save live registers; it's taken care of by the |
| 133 | // entrypoint. Also, there is no need to update the stack mask, |
| 134 | // as this runtime call will not trigger a garbage collection. |
| 135 | // (See ReadBarrierMarkSlowPathX86::EmitNativeCode for more |
| 136 | // explanations.) |
| 137 | DCHECK_NE(temp2, ESP); |
| 138 | DCHECK(0 <= temp2 && temp2 < kNumberOfCpuRegisters) << temp2; |
Roland Levillain | 97c4646 | 2017-05-11 14:04:03 +0100 | [diff] [blame] | 139 | int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(temp2); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 140 | // This runtime call does not require a stack map. |
| 141 | x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this); |
| 142 | __ MaybePoisonHeapReference(temp2); |
| 143 | // dest_array[i + dest_pos] = value |
| 144 | if (dest_pos.IsConstant()) { |
| 145 | int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 146 | int32_t adjusted_offset = offset + constant * element_size; |
| 147 | __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2); |
| 148 | } else { |
| 149 | __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); |
| 150 | __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2); |
| 151 | } |
| 152 | // ++i |
| 153 | __ addl(temp1, Immediate(1)); |
| 154 | // if (i != length) goto loop |
| 155 | x86_codegen->GenerateIntCompare(temp1_loc, length); |
| 156 | __ j(kNotEqual, &loop); |
| 157 | __ jmp(GetExitLabel()); |
| 158 | } |
| 159 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 160 | const char* GetDescription() const override { return "ReadBarrierSystemArrayCopySlowPathX86"; } |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 161 | |
| 162 | private: |
| 163 | DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathX86); |
| 164 | }; |
| 165 | |
| 166 | #undef __ |
| 167 | |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 168 | #define __ assembler-> |
| 169 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 170 | static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is64bit) { |
| 171 | LocationSummary* locations = |
| 172 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 173 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 174 | locations->SetOut(Location::RequiresRegister()); |
| 175 | if (is64bit) { |
| 176 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 177 | } |
| 178 | } |
| 179 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 180 | static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is64bit) { |
| 181 | LocationSummary* locations = |
| 182 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 183 | locations->SetInAt(0, Location::RequiresRegister()); |
| 184 | locations->SetOut(Location::RequiresFpuRegister()); |
| 185 | if (is64bit) { |
| 186 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 187 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 188 | } |
| 189 | } |
| 190 | |
| 191 | static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { |
| 192 | Location input = locations->InAt(0); |
| 193 | Location output = locations->Out(); |
| 194 | if (is64bit) { |
| 195 | // Need to use the temporary. |
| 196 | XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); |
| 197 | __ movsd(temp, input.AsFpuRegister<XmmRegister>()); |
| 198 | __ movd(output.AsRegisterPairLow<Register>(), temp); |
| 199 | __ psrlq(temp, Immediate(32)); |
| 200 | __ movd(output.AsRegisterPairHigh<Register>(), temp); |
| 201 | } else { |
| 202 | __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>()); |
| 203 | } |
| 204 | } |
| 205 | |
| 206 | static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { |
| 207 | Location input = locations->InAt(0); |
| 208 | Location output = locations->Out(); |
| 209 | if (is64bit) { |
| 210 | // Need to use the temporary. |
| 211 | XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); |
| 212 | XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>(); |
| 213 | __ movd(temp1, input.AsRegisterPairLow<Register>()); |
| 214 | __ movd(temp2, input.AsRegisterPairHigh<Register>()); |
| 215 | __ punpckldq(temp1, temp2); |
| 216 | __ movsd(output.AsFpuRegister<XmmRegister>(), temp1); |
| 217 | } else { |
| 218 | __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>()); |
| 219 | } |
| 220 | } |
| 221 | |
| 222 | void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 223 | CreateFPToIntLocations(allocator_, invoke, /* is64bit= */ true); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 224 | } |
| 225 | void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 226 | CreateIntToFPLocations(allocator_, invoke, /* is64bit= */ true); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 227 | } |
| 228 | |
| 229 | void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 230 | MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 231 | } |
| 232 | void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 233 | MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 234 | } |
| 235 | |
| 236 | void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 237 | CreateFPToIntLocations(allocator_, invoke, /* is64bit= */ false); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 238 | } |
| 239 | void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 240 | CreateIntToFPLocations(allocator_, invoke, /* is64bit= */ false); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 241 | } |
| 242 | |
| 243 | void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 244 | MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 245 | } |
| 246 | void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 247 | MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 248 | } |
| 249 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 250 | static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 251 | LocationSummary* locations = |
| 252 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 253 | locations->SetInAt(0, Location::RequiresRegister()); |
| 254 | locations->SetOut(Location::SameAsFirstInput()); |
| 255 | } |
| 256 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 257 | static void CreateLongToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 258 | LocationSummary* locations = |
| 259 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 260 | locations->SetInAt(0, Location::RequiresRegister()); |
| 261 | locations->SetOut(Location::RequiresRegister()); |
| 262 | } |
| 263 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 264 | static void CreateLongToLongLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 265 | LocationSummary* locations = |
| 266 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 267 | locations->SetInAt(0, Location::RequiresRegister()); |
| 268 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 269 | } |
| 270 | |
| 271 | static void GenReverseBytes(LocationSummary* locations, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 272 | DataType::Type size, |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 273 | X86Assembler* assembler) { |
| 274 | Register out = locations->Out().AsRegister<Register>(); |
| 275 | |
| 276 | switch (size) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 277 | case DataType::Type::kInt16: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 278 | // TODO: Can be done with an xchg of 8b registers. This is straight from Quick. |
| 279 | __ bswapl(out); |
| 280 | __ sarl(out, Immediate(16)); |
| 281 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 282 | case DataType::Type::kInt32: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 283 | __ bswapl(out); |
| 284 | break; |
| 285 | default: |
| 286 | LOG(FATAL) << "Unexpected size for reverse-bytes: " << size; |
| 287 | UNREACHABLE(); |
| 288 | } |
| 289 | } |
| 290 | |
| 291 | void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 292 | CreateIntToIntLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 293 | } |
| 294 | |
| 295 | void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 296 | GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 297 | } |
| 298 | |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 299 | void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 300 | CreateLongToLongLocations(allocator_, invoke); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 301 | } |
| 302 | |
| 303 | void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) { |
| 304 | LocationSummary* locations = invoke->GetLocations(); |
| 305 | Location input = locations->InAt(0); |
| 306 | Register input_lo = input.AsRegisterPairLow<Register>(); |
| 307 | Register input_hi = input.AsRegisterPairHigh<Register>(); |
| 308 | Location output = locations->Out(); |
| 309 | Register output_lo = output.AsRegisterPairLow<Register>(); |
| 310 | Register output_hi = output.AsRegisterPairHigh<Register>(); |
| 311 | |
| 312 | X86Assembler* assembler = GetAssembler(); |
| 313 | // Assign the inputs to the outputs, mixing low/high. |
| 314 | __ movl(output_lo, input_hi); |
| 315 | __ movl(output_hi, input_lo); |
| 316 | __ bswapl(output_lo); |
| 317 | __ bswapl(output_hi); |
| 318 | } |
| 319 | |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 320 | void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 321 | CreateIntToIntLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 322 | } |
| 323 | |
| 324 | void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 325 | GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 326 | } |
| 327 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 328 | static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 329 | LocationSummary* locations = |
| 330 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 331 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 332 | locations->SetOut(Location::RequiresFpuRegister()); |
| 333 | } |
| 334 | |
| 335 | void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 336 | CreateFPToFPLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 337 | } |
| 338 | |
| 339 | void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) { |
| 340 | LocationSummary* locations = invoke->GetLocations(); |
| 341 | XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>(); |
| 342 | XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); |
| 343 | |
| 344 | GetAssembler()->sqrtsd(out, in); |
| 345 | } |
| 346 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 347 | static void CreateSSE41FPToFPLocations(ArenaAllocator* allocator, |
| 348 | HInvoke* invoke, |
| 349 | CodeGeneratorX86* codegen) { |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 350 | // Do we have instruction support? |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 351 | if (!codegen->GetInstructionSetFeatures().HasSSE4_1()) { |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 352 | return; |
| 353 | } |
| 354 | |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 355 | CreateFPToFPLocations(allocator, invoke); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 356 | } |
| 357 | |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 358 | static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86Assembler* assembler, int round_mode) { |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 359 | LocationSummary* locations = invoke->GetLocations(); |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 360 | DCHECK(!locations->WillCall()); |
| 361 | XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>(); |
| 362 | XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>(); |
| 363 | __ roundsd(out, in, Immediate(round_mode)); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 364 | } |
| 365 | |
| 366 | void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 367 | CreateSSE41FPToFPLocations(allocator_, invoke, codegen_); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 368 | } |
| 369 | |
| 370 | void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) { |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 371 | GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 2); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 372 | } |
| 373 | |
| 374 | void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 375 | CreateSSE41FPToFPLocations(allocator_, invoke, codegen_); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 376 | } |
| 377 | |
| 378 | void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) { |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 379 | GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 1); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 380 | } |
| 381 | |
| 382 | void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 383 | CreateSSE41FPToFPLocations(allocator_, invoke, codegen_); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 384 | } |
| 385 | |
| 386 | void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) { |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 387 | GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 0); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 388 | } |
| 389 | |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 390 | void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) { |
| 391 | // Do we have instruction support? |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 392 | if (!codegen_->GetInstructionSetFeatures().HasSSE4_1()) { |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 393 | return; |
| 394 | } |
| 395 | |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 396 | HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect(); |
| 397 | DCHECK(static_or_direct != nullptr); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 398 | LocationSummary* locations = |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 399 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
| 400 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 401 | if (static_or_direct->HasSpecialInput() && |
| 402 | invoke->InputAt( |
| 403 | static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) { |
| 404 | locations->SetInAt(1, Location::RequiresRegister()); |
| 405 | } |
| 406 | locations->SetOut(Location::RequiresRegister()); |
| 407 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 408 | locations->AddTemp(Location::RequiresFpuRegister()); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 409 | } |
| 410 | |
| 411 | void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) { |
| 412 | LocationSummary* locations = invoke->GetLocations(); |
Vladimir Marko | 66704db | 2020-06-08 14:04:27 +0100 | [diff] [blame] | 413 | DCHECK(!locations->WillCall()); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 414 | |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 415 | XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>(); |
Aart Bik | 2c9f495 | 2016-08-01 16:52:27 -0700 | [diff] [blame] | 416 | XmmRegister t1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); |
| 417 | XmmRegister t2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>(); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 418 | Register out = locations->Out().AsRegister<Register>(); |
Aart Bik | 2c9f495 | 2016-08-01 16:52:27 -0700 | [diff] [blame] | 419 | NearLabel skip_incr, done; |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 420 | X86Assembler* assembler = GetAssembler(); |
| 421 | |
Aart Bik | 2c9f495 | 2016-08-01 16:52:27 -0700 | [diff] [blame] | 422 | // Since no direct x86 rounding instruction matches the required semantics, |
| 423 | // this intrinsic is implemented as follows: |
| 424 | // result = floor(in); |
| 425 | // if (in - result >= 0.5f) |
| 426 | // result = result + 1.0f; |
| 427 | __ movss(t2, in); |
| 428 | __ roundss(t1, in, Immediate(1)); |
| 429 | __ subss(t2, t1); |
Aart Bik | 0cf8d9c | 2016-08-10 14:05:54 -0700 | [diff] [blame] | 430 | if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) { |
| 431 | // Direct constant area available. |
Nicolas Geoffray | 133719e | 2017-01-22 15:44:39 +0000 | [diff] [blame] | 432 | HX86ComputeBaseMethodAddress* method_address = |
| 433 | invoke->InputAt(1)->AsX86ComputeBaseMethodAddress(); |
Aart Bik | 0cf8d9c | 2016-08-10 14:05:54 -0700 | [diff] [blame] | 434 | Register constant_area = locations->InAt(1).AsRegister<Register>(); |
Nicolas Geoffray | 133719e | 2017-01-22 15:44:39 +0000 | [diff] [blame] | 435 | __ comiss(t2, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(0.5f), |
| 436 | method_address, |
| 437 | constant_area)); |
Aart Bik | 0cf8d9c | 2016-08-10 14:05:54 -0700 | [diff] [blame] | 438 | __ j(kBelow, &skip_incr); |
Nicolas Geoffray | 133719e | 2017-01-22 15:44:39 +0000 | [diff] [blame] | 439 | __ addss(t1, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(1.0f), |
| 440 | method_address, |
| 441 | constant_area)); |
Aart Bik | 0cf8d9c | 2016-08-10 14:05:54 -0700 | [diff] [blame] | 442 | __ Bind(&skip_incr); |
| 443 | } else { |
| 444 | // No constant area: go through stack. |
| 445 | __ pushl(Immediate(bit_cast<int32_t, float>(0.5f))); |
| 446 | __ pushl(Immediate(bit_cast<int32_t, float>(1.0f))); |
| 447 | __ comiss(t2, Address(ESP, 4)); |
| 448 | __ j(kBelow, &skip_incr); |
| 449 | __ addss(t1, Address(ESP, 0)); |
| 450 | __ Bind(&skip_incr); |
| 451 | __ addl(ESP, Immediate(8)); |
| 452 | } |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 453 | |
Aart Bik | 2c9f495 | 2016-08-01 16:52:27 -0700 | [diff] [blame] | 454 | // Final conversion to an integer. Unfortunately this also does not have a |
| 455 | // direct x86 instruction, since NaN should map to 0 and large positive |
| 456 | // values need to be clipped to the extreme value. |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 457 | __ movl(out, Immediate(kPrimIntMax)); |
Aart Bik | 2c9f495 | 2016-08-01 16:52:27 -0700 | [diff] [blame] | 458 | __ cvtsi2ss(t2, out); |
| 459 | __ comiss(t1, t2); |
| 460 | __ j(kAboveEqual, &done); // clipped to max (already in out), does not jump on unordered |
| 461 | __ movl(out, Immediate(0)); // does not change flags |
| 462 | __ j(kUnordered, &done); // NaN mapped to 0 (just moved in out) |
| 463 | __ cvttss2si(out, t1); |
Mark Mendell | fb8d279 | 2015-03-31 22:16:59 -0400 | [diff] [blame] | 464 | __ Bind(&done); |
| 465 | } |
| 466 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 467 | static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 468 | LocationSummary* locations = |
| 469 | new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 470 | InvokeRuntimeCallingConvention calling_convention; |
| 471 | locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0))); |
| 472 | locations->SetOut(Location::FpuRegisterLocation(XMM0)); |
| 473 | } |
| 474 | |
| 475 | static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) { |
| 476 | LocationSummary* locations = invoke->GetLocations(); |
| 477 | DCHECK(locations->WillCall()); |
| 478 | DCHECK(invoke->IsInvokeStaticOrDirect()); |
| 479 | X86Assembler* assembler = codegen->GetAssembler(); |
| 480 | |
| 481 | // We need some place to pass the parameters. |
| 482 | __ subl(ESP, Immediate(16)); |
| 483 | __ cfi().AdjustCFAOffset(16); |
| 484 | |
| 485 | // Pass the parameters at the bottom of the stack. |
| 486 | __ movsd(Address(ESP, 0), XMM0); |
| 487 | |
| 488 | // If we have a second parameter, pass it next. |
| 489 | if (invoke->GetNumberOfArguments() == 2) { |
| 490 | __ movsd(Address(ESP, 8), XMM1); |
| 491 | } |
| 492 | |
| 493 | // Now do the actual call. |
Serban Constantinescu | ba45db0 | 2016-07-12 22:53:02 +0100 | [diff] [blame] | 494 | codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc()); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 495 | |
| 496 | // Extract the return value from the FP stack. |
| 497 | __ fstpl(Address(ESP, 0)); |
| 498 | __ movsd(XMM0, Address(ESP, 0)); |
| 499 | |
| 500 | // And clean up the stack. |
| 501 | __ addl(ESP, Immediate(16)); |
| 502 | __ cfi().AdjustCFAOffset(-16); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 503 | } |
| 504 | |
Shalini Salomi Bodapati | 8943fa1 | 2018-11-21 15:36:00 +0530 | [diff] [blame] | 505 | static void CreateLowestOneBitLocations(ArenaAllocator* allocator, bool is_long, HInvoke* invoke) { |
| 506 | LocationSummary* locations = |
| 507 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
| 508 | if (is_long) { |
| 509 | locations->SetInAt(0, Location::RequiresRegister()); |
| 510 | } else { |
| 511 | locations->SetInAt(0, Location::Any()); |
| 512 | } |
| 513 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 514 | } |
| 515 | |
| 516 | static void GenLowestOneBit(X86Assembler* assembler, |
| 517 | CodeGeneratorX86* codegen, |
| 518 | bool is_long, |
| 519 | HInvoke* invoke) { |
| 520 | LocationSummary* locations = invoke->GetLocations(); |
| 521 | Location src = locations->InAt(0); |
| 522 | Location out_loc = locations->Out(); |
| 523 | |
| 524 | if (invoke->InputAt(0)->IsConstant()) { |
| 525 | // Evaluate this at compile time. |
| 526 | int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant()); |
| 527 | if (value == 0) { |
| 528 | if (is_long) { |
| 529 | __ xorl(out_loc.AsRegisterPairLow<Register>(), out_loc.AsRegisterPairLow<Register>()); |
| 530 | __ xorl(out_loc.AsRegisterPairHigh<Register>(), out_loc.AsRegisterPairHigh<Register>()); |
| 531 | } else { |
| 532 | __ xorl(out_loc.AsRegister<Register>(), out_loc.AsRegister<Register>()); |
| 533 | } |
| 534 | return; |
| 535 | } |
| 536 | // Nonzero value. |
| 537 | value = is_long ? CTZ(static_cast<uint64_t>(value)) |
| 538 | : CTZ(static_cast<uint32_t>(value)); |
| 539 | if (is_long) { |
| 540 | if (value >= 32) { |
| 541 | int shift = value-32; |
| 542 | codegen->Load32BitValue(out_loc.AsRegisterPairLow<Register>(), 0); |
| 543 | codegen->Load32BitValue(out_loc.AsRegisterPairHigh<Register>(), 1 << shift); |
| 544 | } else { |
| 545 | codegen->Load32BitValue(out_loc.AsRegisterPairLow<Register>(), 1 << value); |
| 546 | codegen->Load32BitValue(out_loc.AsRegisterPairHigh<Register>(), 0); |
| 547 | } |
| 548 | } else { |
| 549 | codegen->Load32BitValue(out_loc.AsRegister<Register>(), 1 << value); |
| 550 | } |
| 551 | return; |
| 552 | } |
| 553 | // Handle non constant case |
| 554 | if (is_long) { |
| 555 | DCHECK(src.IsRegisterPair()); |
| 556 | Register src_lo = src.AsRegisterPairLow<Register>(); |
| 557 | Register src_hi = src.AsRegisterPairHigh<Register>(); |
| 558 | |
| 559 | Register out_lo = out_loc.AsRegisterPairLow<Register>(); |
| 560 | Register out_hi = out_loc.AsRegisterPairHigh<Register>(); |
| 561 | |
| 562 | __ movl(out_lo, src_lo); |
| 563 | __ movl(out_hi, src_hi); |
| 564 | |
| 565 | __ negl(out_lo); |
| 566 | __ adcl(out_hi, Immediate(0)); |
| 567 | __ negl(out_hi); |
| 568 | |
| 569 | __ andl(out_lo, src_lo); |
| 570 | __ andl(out_hi, src_hi); |
| 571 | } else { |
| 572 | if (codegen->GetInstructionSetFeatures().HasAVX2() && src.IsRegister()) { |
| 573 | Register out = out_loc.AsRegister<Register>(); |
| 574 | __ blsi(out, src.AsRegister<Register>()); |
| 575 | } else { |
| 576 | Register out = out_loc.AsRegister<Register>(); |
| 577 | // Do tmp & -tmp |
| 578 | if (src.IsRegister()) { |
| 579 | __ movl(out, src.AsRegister<Register>()); |
| 580 | } else { |
| 581 | DCHECK(src.IsStackSlot()); |
| 582 | __ movl(out, Address(ESP, src.GetStackIndex())); |
| 583 | } |
| 584 | __ negl(out); |
| 585 | |
| 586 | if (src.IsRegister()) { |
| 587 | __ andl(out, src.AsRegister<Register>()); |
| 588 | } else { |
| 589 | __ andl(out, Address(ESP, src.GetStackIndex())); |
| 590 | } |
| 591 | } |
| 592 | } |
| 593 | } |
| 594 | |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 595 | void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 596 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 597 | } |
| 598 | |
| 599 | void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) { |
| 600 | GenFPToFPCall(invoke, codegen_, kQuickCos); |
| 601 | } |
| 602 | |
| 603 | void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 604 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 605 | } |
| 606 | |
| 607 | void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) { |
| 608 | GenFPToFPCall(invoke, codegen_, kQuickSin); |
| 609 | } |
| 610 | |
| 611 | void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 612 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 613 | } |
| 614 | |
| 615 | void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) { |
| 616 | GenFPToFPCall(invoke, codegen_, kQuickAcos); |
| 617 | } |
| 618 | |
| 619 | void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 620 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 621 | } |
| 622 | |
| 623 | void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) { |
| 624 | GenFPToFPCall(invoke, codegen_, kQuickAsin); |
| 625 | } |
| 626 | |
| 627 | void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 628 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 629 | } |
| 630 | |
| 631 | void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) { |
| 632 | GenFPToFPCall(invoke, codegen_, kQuickAtan); |
| 633 | } |
| 634 | |
| 635 | void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 636 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 637 | } |
| 638 | |
| 639 | void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) { |
| 640 | GenFPToFPCall(invoke, codegen_, kQuickCbrt); |
| 641 | } |
| 642 | |
| 643 | void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 644 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 645 | } |
| 646 | |
| 647 | void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) { |
| 648 | GenFPToFPCall(invoke, codegen_, kQuickCosh); |
| 649 | } |
| 650 | |
| 651 | void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 652 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 653 | } |
| 654 | |
| 655 | void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) { |
| 656 | GenFPToFPCall(invoke, codegen_, kQuickExp); |
| 657 | } |
| 658 | |
| 659 | void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 660 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 661 | } |
| 662 | |
| 663 | void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) { |
| 664 | GenFPToFPCall(invoke, codegen_, kQuickExpm1); |
| 665 | } |
| 666 | |
| 667 | void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 668 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 669 | } |
| 670 | |
| 671 | void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) { |
| 672 | GenFPToFPCall(invoke, codegen_, kQuickLog); |
| 673 | } |
| 674 | |
| 675 | void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 676 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 677 | } |
| 678 | |
| 679 | void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) { |
| 680 | GenFPToFPCall(invoke, codegen_, kQuickLog10); |
| 681 | } |
| 682 | |
| 683 | void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 684 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 685 | } |
| 686 | |
| 687 | void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) { |
| 688 | GenFPToFPCall(invoke, codegen_, kQuickSinh); |
| 689 | } |
| 690 | |
| 691 | void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 692 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 693 | } |
| 694 | |
| 695 | void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) { |
| 696 | GenFPToFPCall(invoke, codegen_, kQuickTan); |
| 697 | } |
| 698 | |
| 699 | void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 700 | CreateFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 701 | } |
| 702 | |
| 703 | void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) { |
| 704 | GenFPToFPCall(invoke, codegen_, kQuickTanh); |
| 705 | } |
| 706 | |
Shalini Salomi Bodapati | 8943fa1 | 2018-11-21 15:36:00 +0530 | [diff] [blame] | 707 | void IntrinsicLocationsBuilderX86::VisitIntegerLowestOneBit(HInvoke* invoke) { |
| 708 | CreateLowestOneBitLocations(allocator_, /*is_long=*/ false, invoke); |
| 709 | } |
| 710 | void IntrinsicCodeGeneratorX86::VisitIntegerLowestOneBit(HInvoke* invoke) { |
| 711 | GenLowestOneBit(GetAssembler(), codegen_, /*is_long=*/ false, invoke); |
| 712 | } |
| 713 | |
| 714 | void IntrinsicLocationsBuilderX86::VisitLongLowestOneBit(HInvoke* invoke) { |
| 715 | CreateLowestOneBitLocations(allocator_, /*is_long=*/ true, invoke); |
| 716 | } |
| 717 | |
| 718 | void IntrinsicCodeGeneratorX86::VisitLongLowestOneBit(HInvoke* invoke) { |
| 719 | GenLowestOneBit(GetAssembler(), codegen_, /*is_long=*/ true, invoke); |
| 720 | } |
| 721 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 722 | static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 723 | LocationSummary* locations = |
| 724 | new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 725 | InvokeRuntimeCallingConvention calling_convention; |
| 726 | locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0))); |
| 727 | locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1))); |
| 728 | locations->SetOut(Location::FpuRegisterLocation(XMM0)); |
| 729 | } |
| 730 | |
Shalini Salomi Bodapati | 6545ee3 | 2021-11-02 20:01:06 +0530 | [diff] [blame] | 731 | static void CreateFPFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) { |
| 732 | DCHECK_EQ(invoke->GetNumberOfArguments(), 3U); |
| 733 | LocationSummary* locations = |
| 734 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
| 735 | InvokeRuntimeCallingConvention calling_convention; |
| 736 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 737 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 738 | locations->SetInAt(2, Location::RequiresFpuRegister()); |
| 739 | locations->SetOut(Location::SameAsFirstInput()); |
| 740 | } |
| 741 | |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 742 | void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 743 | CreateFPFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 744 | } |
| 745 | |
| 746 | void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) { |
| 747 | GenFPToFPCall(invoke, codegen_, kQuickAtan2); |
| 748 | } |
| 749 | |
Vladimir Marko | 4d17987 | 2018-01-19 14:50:10 +0000 | [diff] [blame] | 750 | void IntrinsicLocationsBuilderX86::VisitMathPow(HInvoke* invoke) { |
| 751 | CreateFPFPToFPCallLocations(allocator_, invoke); |
| 752 | } |
| 753 | |
| 754 | void IntrinsicCodeGeneratorX86::VisitMathPow(HInvoke* invoke) { |
| 755 | GenFPToFPCall(invoke, codegen_, kQuickPow); |
| 756 | } |
| 757 | |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 758 | void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 759 | CreateFPFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 760 | } |
| 761 | |
| 762 | void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) { |
| 763 | GenFPToFPCall(invoke, codegen_, kQuickHypot); |
| 764 | } |
| 765 | |
| 766 | void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 767 | CreateFPFPToFPCallLocations(allocator_, invoke); |
Mark Mendell | a4f1220 | 2015-08-06 15:23:34 -0400 | [diff] [blame] | 768 | } |
| 769 | |
| 770 | void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) { |
| 771 | GenFPToFPCall(invoke, codegen_, kQuickNextAfter); |
| 772 | } |
| 773 | |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 774 | static void CreateSystemArrayCopyLocations(HInvoke* invoke) { |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 775 | // We need at least two of the positions or length to be an integer constant, |
| 776 | // or else we won't have enough free registers. |
| 777 | HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant(); |
| 778 | HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant(); |
| 779 | HIntConstant* length = invoke->InputAt(4)->AsIntConstant(); |
| 780 | |
| 781 | int num_constants = |
| 782 | ((src_pos != nullptr) ? 1 : 0) |
| 783 | + ((dest_pos != nullptr) ? 1 : 0) |
| 784 | + ((length != nullptr) ? 1 : 0); |
| 785 | |
| 786 | if (num_constants < 2) { |
| 787 | // Not enough free registers. |
| 788 | return; |
| 789 | } |
| 790 | |
| 791 | // As long as we are checking, we might as well check to see if the src and dest |
| 792 | // positions are >= 0. |
| 793 | if ((src_pos != nullptr && src_pos->GetValue() < 0) || |
| 794 | (dest_pos != nullptr && dest_pos->GetValue() < 0)) { |
| 795 | // We will have to fail anyways. |
| 796 | return; |
| 797 | } |
| 798 | |
| 799 | // And since we are already checking, check the length too. |
| 800 | if (length != nullptr) { |
| 801 | int32_t len = length->GetValue(); |
| 802 | if (len < 0) { |
| 803 | // Just call as normal. |
| 804 | return; |
| 805 | } |
| 806 | } |
| 807 | |
| 808 | // Okay, it is safe to generate inline code. |
| 809 | LocationSummary* locations = |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 810 | new (invoke->GetBlock()->GetGraph()->GetAllocator()) |
| 811 | LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 812 | // arraycopy(Object src, int srcPos, Object dest, int destPos, int length). |
| 813 | locations->SetInAt(0, Location::RequiresRegister()); |
| 814 | locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); |
| 815 | locations->SetInAt(2, Location::RequiresRegister()); |
| 816 | locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3))); |
| 817 | locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4))); |
| 818 | |
| 819 | // And we need some temporaries. We will use REP MOVSW, so we need fixed registers. |
| 820 | locations->AddTemp(Location::RegisterLocation(ESI)); |
| 821 | locations->AddTemp(Location::RegisterLocation(EDI)); |
| 822 | locations->AddTemp(Location::RegisterLocation(ECX)); |
| 823 | } |
| 824 | |
| 825 | static void CheckPosition(X86Assembler* assembler, |
| 826 | Location pos, |
| 827 | Register input, |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 828 | Location length, |
Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 829 | SlowPathCode* slow_path, |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 830 | Register temp, |
| 831 | bool length_is_input_length = false) { |
| 832 | // Where is the length in the Array? |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 833 | const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value(); |
| 834 | |
| 835 | if (pos.IsConstant()) { |
| 836 | int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue(); |
| 837 | if (pos_const == 0) { |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 838 | if (!length_is_input_length) { |
| 839 | // Check that length(input) >= length. |
| 840 | if (length.IsConstant()) { |
| 841 | __ cmpl(Address(input, length_offset), |
| 842 | Immediate(length.GetConstant()->AsIntConstant()->GetValue())); |
| 843 | } else { |
| 844 | __ cmpl(Address(input, length_offset), length.AsRegister<Register>()); |
| 845 | } |
| 846 | __ j(kLess, slow_path->GetEntryLabel()); |
| 847 | } |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 848 | } else { |
| 849 | // Check that length(input) >= pos. |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 850 | __ movl(temp, Address(input, length_offset)); |
| 851 | __ subl(temp, Immediate(pos_const)); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 852 | __ j(kLess, slow_path->GetEntryLabel()); |
| 853 | |
| 854 | // Check that (length(input) - pos) >= length. |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 855 | if (length.IsConstant()) { |
| 856 | __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue())); |
| 857 | } else { |
| 858 | __ cmpl(temp, length.AsRegister<Register>()); |
| 859 | } |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 860 | __ j(kLess, slow_path->GetEntryLabel()); |
| 861 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 862 | } else if (length_is_input_length) { |
| 863 | // The only way the copy can succeed is if pos is zero. |
| 864 | Register pos_reg = pos.AsRegister<Register>(); |
| 865 | __ testl(pos_reg, pos_reg); |
| 866 | __ j(kNotEqual, slow_path->GetEntryLabel()); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 867 | } else { |
| 868 | // Check that pos >= 0. |
| 869 | Register pos_reg = pos.AsRegister<Register>(); |
| 870 | __ testl(pos_reg, pos_reg); |
| 871 | __ j(kLess, slow_path->GetEntryLabel()); |
| 872 | |
| 873 | // Check that pos <= length(input). |
| 874 | __ cmpl(Address(input, length_offset), pos_reg); |
| 875 | __ j(kLess, slow_path->GetEntryLabel()); |
| 876 | |
| 877 | // Check that (length(input) - pos) >= length. |
| 878 | __ movl(temp, Address(input, length_offset)); |
| 879 | __ subl(temp, pos_reg); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 880 | if (length.IsConstant()) { |
| 881 | __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue())); |
| 882 | } else { |
| 883 | __ cmpl(temp, length.AsRegister<Register>()); |
| 884 | } |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 885 | __ j(kLess, slow_path->GetEntryLabel()); |
| 886 | } |
| 887 | } |
| 888 | |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 889 | static void SystemArrayCopyPrimitive(HInvoke* invoke, |
| 890 | X86Assembler* assembler, |
| 891 | CodeGeneratorX86* codegen, |
| 892 | DataType::Type type) { |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 893 | LocationSummary* locations = invoke->GetLocations(); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 894 | Register src = locations->InAt(0).AsRegister<Register>(); |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 895 | Location src_pos = locations->InAt(1); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 896 | Register dest = locations->InAt(2).AsRegister<Register>(); |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 897 | Location dest_pos = locations->InAt(3); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 898 | Location length = locations->InAt(4); |
| 899 | |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 900 | // Temporaries that we need for MOVSB/W/L. |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 901 | Register src_base = locations->GetTemp(0).AsRegister<Register>(); |
| 902 | DCHECK_EQ(src_base, ESI); |
| 903 | Register dest_base = locations->GetTemp(1).AsRegister<Register>(); |
| 904 | DCHECK_EQ(dest_base, EDI); |
| 905 | Register count = locations->GetTemp(2).AsRegister<Register>(); |
| 906 | DCHECK_EQ(count, ECX); |
| 907 | |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 908 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 909 | codegen->AddSlowPath(slow_path); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 910 | |
| 911 | // Bail out if the source and destination are the same (to handle overlap). |
| 912 | __ cmpl(src, dest); |
| 913 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 914 | |
| 915 | // Bail out if the source is null. |
| 916 | __ testl(src, src); |
| 917 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 918 | |
| 919 | // Bail out if the destination is null. |
| 920 | __ testl(dest, dest); |
| 921 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 922 | |
| 923 | // If the length is negative, bail out. |
| 924 | // We have already checked in the LocationsBuilder for the constant case. |
| 925 | if (!length.IsConstant()) { |
| 926 | __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>()); |
| 927 | __ j(kLess, slow_path->GetEntryLabel()); |
| 928 | } |
| 929 | |
| 930 | // We need the count in ECX. |
| 931 | if (length.IsConstant()) { |
| 932 | __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue())); |
| 933 | } else { |
| 934 | __ movl(count, length.AsRegister<Register>()); |
| 935 | } |
| 936 | |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 937 | // Validity checks: source. Use src_base as a temporary register. |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 938 | CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path, src_base); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 939 | |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 940 | // Validity checks: dest. Use src_base as a temporary register. |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 941 | CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path, src_base); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 942 | |
| 943 | // Okay, everything checks out. Finally time to do the copy. |
| 944 | // Check assumption that sizeof(Char) is 2 (used in scaling below). |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 945 | const size_t data_size = DataType::Size(type); |
| 946 | const ScaleFactor scale_factor = CodeGenerator::ScaleFactorForType(type); |
| 947 | const uint32_t data_offset = mirror::Array::DataOffset(data_size).Uint32Value(); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 948 | |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 949 | if (src_pos.IsConstant()) { |
| 950 | int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 951 | __ leal(src_base, Address(src, data_size * src_pos_const + data_offset)); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 952 | } else { |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 953 | __ leal(src_base, Address(src, src_pos.AsRegister<Register>(), scale_factor, data_offset)); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 954 | } |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 955 | if (dest_pos.IsConstant()) { |
| 956 | int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 957 | __ leal(dest_base, Address(dest, data_size * dest_pos_const + data_offset)); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 958 | } else { |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 959 | __ leal(dest_base, Address(dest, dest_pos.AsRegister<Register>(), scale_factor, data_offset)); |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 960 | } |
| 961 | |
| 962 | // Do the move. |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 963 | switch (type) { |
| 964 | case DataType::Type::kInt8: |
| 965 | __ rep_movsb(); |
| 966 | break; |
| 967 | case DataType::Type::kUint16: |
| 968 | __ rep_movsw(); |
| 969 | break; |
| 970 | case DataType::Type::kInt32: |
| 971 | __ rep_movsl(); |
| 972 | break; |
| 973 | default: |
| 974 | LOG(FATAL) << "Unexpected data type for intrinsic"; |
| 975 | } |
Mark Mendell | 6bc53a9 | 2015-07-01 14:26:52 -0400 | [diff] [blame] | 976 | __ Bind(slow_path->GetExitLabel()); |
| 977 | } |
| 978 | |
Shalini Salomi Bodapati | b414a4c | 2022-02-10 18:03:34 +0530 | [diff] [blame] | 979 | void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) { |
| 980 | CreateSystemArrayCopyLocations(invoke); |
| 981 | } |
| 982 | |
| 983 | void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) { |
| 984 | X86Assembler* assembler = GetAssembler(); |
| 985 | SystemArrayCopyPrimitive(invoke, assembler, codegen_, DataType::Type::kUint16); |
| 986 | } |
| 987 | |
| 988 | void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyByte(HInvoke* invoke) { |
| 989 | X86Assembler* assembler = GetAssembler(); |
| 990 | SystemArrayCopyPrimitive(invoke, assembler, codegen_, DataType::Type::kInt8); |
| 991 | } |
| 992 | |
| 993 | void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyByte(HInvoke* invoke) { |
| 994 | CreateSystemArrayCopyLocations(invoke); |
| 995 | } |
| 996 | |
| 997 | void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyInt(HInvoke* invoke) { |
| 998 | X86Assembler* assembler = GetAssembler(); |
| 999 | SystemArrayCopyPrimitive(invoke, assembler, codegen_, DataType::Type::kInt32); |
| 1000 | } |
| 1001 | |
| 1002 | void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyInt(HInvoke* invoke) { |
| 1003 | CreateSystemArrayCopyLocations(invoke); |
| 1004 | } |
| 1005 | |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1006 | void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) { |
| 1007 | // The inputs plus one temp. |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1008 | LocationSummary* locations = new (allocator_) LocationSummary( |
| 1009 | invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1010 | InvokeRuntimeCallingConvention calling_convention; |
| 1011 | locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); |
| 1012 | locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); |
| 1013 | locations->SetOut(Location::RegisterLocation(EAX)); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1014 | } |
| 1015 | |
| 1016 | void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) { |
| 1017 | X86Assembler* assembler = GetAssembler(); |
| 1018 | LocationSummary* locations = invoke->GetLocations(); |
| 1019 | |
Nicolas Geoffray | 512e04d | 2015-03-27 17:21:24 +0000 | [diff] [blame] | 1020 | // Note that the null check must have been done earlier. |
Calin Juravle | 641547a | 2015-04-21 22:08:51 +0100 | [diff] [blame] | 1021 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1022 | |
| 1023 | Register argument = locations->InAt(1).AsRegister<Register>(); |
| 1024 | __ testl(argument, argument); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 1025 | SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1026 | codegen_->AddSlowPath(slow_path); |
| 1027 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 1028 | |
Serban Constantinescu | ba45db0 | 2016-07-12 22:53:02 +0100 | [diff] [blame] | 1029 | codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1030 | __ Bind(slow_path->GetExitLabel()); |
| 1031 | } |
| 1032 | |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1033 | void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1034 | LocationSummary* locations = |
| 1035 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1036 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1037 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1038 | |
| 1039 | // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction. |
| 1040 | locations->AddTemp(Location::RegisterLocation(ECX)); |
| 1041 | locations->AddTemp(Location::RegisterLocation(EDI)); |
| 1042 | |
| 1043 | // Set output, ESI needed for repe_cmpsl instruction anyways. |
| 1044 | locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap); |
| 1045 | } |
| 1046 | |
| 1047 | void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) { |
| 1048 | X86Assembler* assembler = GetAssembler(); |
| 1049 | LocationSummary* locations = invoke->GetLocations(); |
| 1050 | |
| 1051 | Register str = locations->InAt(0).AsRegister<Register>(); |
| 1052 | Register arg = locations->InAt(1).AsRegister<Register>(); |
| 1053 | Register ecx = locations->GetTemp(0).AsRegister<Register>(); |
| 1054 | Register edi = locations->GetTemp(1).AsRegister<Register>(); |
| 1055 | Register esi = locations->Out().AsRegister<Register>(); |
| 1056 | |
Mark Mendell | 0c9497d | 2015-08-21 09:30:05 -0400 | [diff] [blame] | 1057 | NearLabel end, return_true, return_false; |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1058 | |
| 1059 | // Get offsets of count, value, and class fields within a string object. |
| 1060 | const uint32_t count_offset = mirror::String::CountOffset().Uint32Value(); |
| 1061 | const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value(); |
| 1062 | const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value(); |
| 1063 | |
| 1064 | // Note that the null check must have been done earlier. |
| 1065 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
| 1066 | |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 1067 | StringEqualsOptimizations optimizations(invoke); |
| 1068 | if (!optimizations.GetArgumentNotNull()) { |
| 1069 | // Check if input is null, return false if it is. |
| 1070 | __ testl(arg, arg); |
| 1071 | __ j(kEqual, &return_false); |
| 1072 | } |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1073 | |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 1074 | if (!optimizations.GetArgumentIsString()) { |
Vladimir Marko | 53b5200 | 2016-05-24 19:30:45 +0100 | [diff] [blame] | 1075 | // Instanceof check for the argument by comparing class fields. |
| 1076 | // All string objects must have the same type since String cannot be subclassed. |
| 1077 | // Receiver must be a string object, so its class field is equal to all strings' class fields. |
| 1078 | // If the argument is a string object, its class field must be equal to receiver's class field. |
Roland Levillain | 1d775d2 | 2018-09-07 13:56:57 +0100 | [diff] [blame] | 1079 | // |
| 1080 | // As the String class is expected to be non-movable, we can read the class |
| 1081 | // field from String.equals' arguments without read barriers. |
| 1082 | AssertNonMovableStringClass(); |
| 1083 | // Also, because we use the loaded class references only to compare them, we |
| 1084 | // don't need to unpoison them. |
| 1085 | // /* HeapReference<Class> */ ecx = str->klass_ |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 1086 | __ movl(ecx, Address(str, class_offset)); |
Roland Levillain | 1d775d2 | 2018-09-07 13:56:57 +0100 | [diff] [blame] | 1087 | // if (ecx != /* HeapReference<Class> */ arg->klass_) return false |
Nicolas Geoffray | a83a54d | 2015-10-02 17:30:26 +0100 | [diff] [blame] | 1088 | __ cmpl(ecx, Address(arg, class_offset)); |
| 1089 | __ j(kNotEqual, &return_false); |
| 1090 | } |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1091 | |
| 1092 | // Reference equality check, return true if same reference. |
| 1093 | __ cmpl(str, arg); |
| 1094 | __ j(kEqual, &return_true); |
| 1095 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1096 | // Load length and compression flag of receiver string. |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1097 | __ movl(ecx, Address(str, count_offset)); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1098 | // Check if lengths and compression flags are equal, return false if they're not. |
| 1099 | // Two identical strings will always have same compression style since |
| 1100 | // compression style is decided on alloc. |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1101 | __ cmpl(ecx, Address(arg, count_offset)); |
| 1102 | __ j(kNotEqual, &return_false); |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1103 | // Return true if strings are empty. Even with string compression `count == 0` means empty. |
| 1104 | static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u, |
| 1105 | "Expecting 0=compressed, 1=uncompressed"); |
| 1106 | __ jecxz(&return_true); |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1107 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1108 | if (mirror::kUseStringCompression) { |
| 1109 | NearLabel string_uncompressed; |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1110 | // Extract length and differentiate between both compressed or both uncompressed. |
| 1111 | // Different compression style is cut above. |
| 1112 | __ shrl(ecx, Immediate(1)); |
| 1113 | __ j(kCarrySet, &string_uncompressed); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1114 | // Divide string length by 2, rounding up, and continue as if uncompressed. |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1115 | __ addl(ecx, Immediate(1)); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1116 | __ shrl(ecx, Immediate(1)); |
| 1117 | __ Bind(&string_uncompressed); |
| 1118 | } |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1119 | // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction. |
| 1120 | __ leal(esi, Address(str, value_offset)); |
| 1121 | __ leal(edi, Address(arg, value_offset)); |
| 1122 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1123 | // Divide string length by 2 to compare characters 2 at a time and adjust for lengths not |
| 1124 | // divisible by 2. |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1125 | __ addl(ecx, Immediate(1)); |
| 1126 | __ shrl(ecx, Immediate(1)); |
| 1127 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1128 | // Assertions that must hold in order to compare strings 2 characters (uncompressed) |
| 1129 | // or 4 characters (compressed) at a time. |
Agi Csaki | d7138c8 | 2015-08-13 17:46:44 -0700 | [diff] [blame] | 1130 | DCHECK_ALIGNED(value_offset, 4); |
| 1131 | static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded"); |
| 1132 | |
| 1133 | // Loop to compare strings two characters at a time starting at the beginning of the string. |
| 1134 | __ repe_cmpsl(); |
| 1135 | // If strings are not equal, zero flag will be cleared. |
| 1136 | __ j(kNotEqual, &return_false); |
| 1137 | |
| 1138 | // Return true and exit the function. |
| 1139 | // If loop does not result in returning false, we return true. |
| 1140 | __ Bind(&return_true); |
| 1141 | __ movl(esi, Immediate(1)); |
| 1142 | __ jmp(&end); |
| 1143 | |
| 1144 | // Return false and exit the function. |
| 1145 | __ Bind(&return_false); |
| 1146 | __ xorl(esi, esi); |
| 1147 | __ Bind(&end); |
| 1148 | } |
| 1149 | |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1150 | static void CreateStringIndexOfLocations(HInvoke* invoke, |
| 1151 | ArenaAllocator* allocator, |
| 1152 | bool start_at_zero) { |
| 1153 | LocationSummary* locations = new (allocator) LocationSummary(invoke, |
| 1154 | LocationSummary::kCallOnSlowPath, |
| 1155 | kIntrinsified); |
| 1156 | // The data needs to be in EDI for scasw. So request that the string is there, anyways. |
| 1157 | locations->SetInAt(0, Location::RegisterLocation(EDI)); |
| 1158 | // If we look for a constant char, we'll still have to copy it into EAX. So just request the |
| 1159 | // allocator to do that, anyways. We can still do the constant check by checking the parameter |
| 1160 | // of the instruction explicitly. |
| 1161 | // Note: This works as we don't clobber EAX anywhere. |
| 1162 | locations->SetInAt(1, Location::RegisterLocation(EAX)); |
| 1163 | if (!start_at_zero) { |
| 1164 | locations->SetInAt(2, Location::RequiresRegister()); // The starting index. |
| 1165 | } |
| 1166 | // As we clobber EDI during execution anyways, also use it as the output. |
| 1167 | locations->SetOut(Location::SameAsFirstInput()); |
| 1168 | |
| 1169 | // repne scasw uses ECX as the counter. |
| 1170 | locations->AddTemp(Location::RegisterLocation(ECX)); |
| 1171 | // Need another temporary to be able to compute the result. |
| 1172 | locations->AddTemp(Location::RequiresRegister()); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1173 | if (mirror::kUseStringCompression) { |
| 1174 | // Need another temporary to be able to save unflagged string length. |
| 1175 | locations->AddTemp(Location::RequiresRegister()); |
| 1176 | } |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1177 | } |
| 1178 | |
| 1179 | static void GenerateStringIndexOf(HInvoke* invoke, |
| 1180 | X86Assembler* assembler, |
| 1181 | CodeGeneratorX86* codegen, |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1182 | bool start_at_zero) { |
| 1183 | LocationSummary* locations = invoke->GetLocations(); |
| 1184 | |
| 1185 | // Note that the null check must have been done earlier. |
| 1186 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
| 1187 | |
| 1188 | Register string_obj = locations->InAt(0).AsRegister<Register>(); |
| 1189 | Register search_value = locations->InAt(1).AsRegister<Register>(); |
| 1190 | Register counter = locations->GetTemp(0).AsRegister<Register>(); |
| 1191 | Register string_length = locations->GetTemp(1).AsRegister<Register>(); |
| 1192 | Register out = locations->Out().AsRegister<Register>(); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1193 | // Only used when string compression feature is on. |
| 1194 | Register string_length_flagged; |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1195 | |
| 1196 | // Check our assumptions for registers. |
| 1197 | DCHECK_EQ(string_obj, EDI); |
| 1198 | DCHECK_EQ(search_value, EAX); |
| 1199 | DCHECK_EQ(counter, ECX); |
| 1200 | DCHECK_EQ(out, EDI); |
| 1201 | |
| 1202 | // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1203 | // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. |
Andreas Gampe | 85b62f2 | 2015-09-09 13:15:38 -0700 | [diff] [blame] | 1204 | SlowPathCode* slow_path = nullptr; |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1205 | HInstruction* code_point = invoke->InputAt(1); |
| 1206 | if (code_point->IsIntConstant()) { |
Vladimir Marko | da05108 | 2016-05-17 16:10:20 +0100 | [diff] [blame] | 1207 | if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) > |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1208 | std::numeric_limits<uint16_t>::max()) { |
| 1209 | // Always needs the slow-path. We could directly dispatch to it, but this case should be |
| 1210 | // rare, so for simplicity just put the full slow-path down and branch unconditionally. |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 1211 | slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1212 | codegen->AddSlowPath(slow_path); |
| 1213 | __ jmp(slow_path->GetEntryLabel()); |
| 1214 | __ Bind(slow_path->GetExitLabel()); |
| 1215 | return; |
| 1216 | } |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1217 | } else if (code_point->GetType() != DataType::Type::kUint16) { |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1218 | __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max())); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 1219 | slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1220 | codegen->AddSlowPath(slow_path); |
| 1221 | __ j(kAbove, slow_path->GetEntryLabel()); |
| 1222 | } |
| 1223 | |
| 1224 | // From here down, we know that we are looking for a char that fits in 16 bits. |
| 1225 | // Location of reference to data array within the String object. |
| 1226 | int32_t value_offset = mirror::String::ValueOffset().Int32Value(); |
| 1227 | // Location of count within the String object. |
| 1228 | int32_t count_offset = mirror::String::CountOffset().Int32Value(); |
| 1229 | |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1230 | // Load the count field of the string containing the length and compression flag. |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1231 | __ movl(string_length, Address(string_obj, count_offset)); |
| 1232 | |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1233 | // Do a zero-length check. Even with string compression `count == 0` means empty. |
| 1234 | static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u, |
| 1235 | "Expecting 0=compressed, 1=uncompressed"); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1236 | // TODO: Support jecxz. |
Mark Mendell | 0c9497d | 2015-08-21 09:30:05 -0400 | [diff] [blame] | 1237 | NearLabel not_found_label; |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1238 | __ testl(string_length, string_length); |
| 1239 | __ j(kEqual, ¬_found_label); |
| 1240 | |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1241 | if (mirror::kUseStringCompression) { |
| 1242 | string_length_flagged = locations->GetTemp(2).AsRegister<Register>(); |
| 1243 | __ movl(string_length_flagged, string_length); |
| 1244 | // Extract the length and shift out the least significant bit used as compression flag. |
| 1245 | __ shrl(string_length, Immediate(1)); |
| 1246 | } |
| 1247 | |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1248 | if (start_at_zero) { |
| 1249 | // Number of chars to scan is the same as the string length. |
| 1250 | __ movl(counter, string_length); |
| 1251 | |
| 1252 | // Move to the start of the string. |
| 1253 | __ addl(string_obj, Immediate(value_offset)); |
| 1254 | } else { |
| 1255 | Register start_index = locations->InAt(2).AsRegister<Register>(); |
| 1256 | |
| 1257 | // Do a start_index check. |
| 1258 | __ cmpl(start_index, string_length); |
| 1259 | __ j(kGreaterEqual, ¬_found_label); |
| 1260 | |
| 1261 | // Ensure we have a start index >= 0; |
| 1262 | __ xorl(counter, counter); |
| 1263 | __ cmpl(start_index, Immediate(0)); |
| 1264 | __ cmovl(kGreater, counter, start_index); |
| 1265 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1266 | if (mirror::kUseStringCompression) { |
| 1267 | NearLabel modify_counter, offset_uncompressed_label; |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1268 | __ testl(string_length_flagged, Immediate(1)); |
| 1269 | __ j(kNotZero, &offset_uncompressed_label); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1270 | // Move to the start of the string: string_obj + value_offset + start_index. |
| 1271 | __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_1, value_offset)); |
| 1272 | __ jmp(&modify_counter); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1273 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1274 | // Move to the start of the string: string_obj + value_offset + 2 * start_index. |
| 1275 | __ Bind(&offset_uncompressed_label); |
| 1276 | __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset)); |
| 1277 | |
| 1278 | // Now update ecx (the repne scasw work counter). We have string.length - start_index left to |
| 1279 | // compare. |
| 1280 | __ Bind(&modify_counter); |
| 1281 | } else { |
| 1282 | __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset)); |
| 1283 | } |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1284 | __ negl(counter); |
| 1285 | __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0)); |
| 1286 | } |
| 1287 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1288 | if (mirror::kUseStringCompression) { |
| 1289 | NearLabel uncompressed_string_comparison; |
| 1290 | NearLabel comparison_done; |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1291 | __ testl(string_length_flagged, Immediate(1)); |
| 1292 | __ j(kNotZero, &uncompressed_string_comparison); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1293 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1294 | // Check if EAX (search_value) is ASCII. |
| 1295 | __ cmpl(search_value, Immediate(127)); |
| 1296 | __ j(kGreater, ¬_found_label); |
| 1297 | // Comparing byte-per-byte. |
| 1298 | __ repne_scasb(); |
| 1299 | __ jmp(&comparison_done); |
| 1300 | |
| 1301 | // Everything is set up for repne scasw: |
| 1302 | // * Comparison address in EDI. |
| 1303 | // * Counter in ECX. |
| 1304 | __ Bind(&uncompressed_string_comparison); |
| 1305 | __ repne_scasw(); |
| 1306 | __ Bind(&comparison_done); |
| 1307 | } else { |
| 1308 | __ repne_scasw(); |
| 1309 | } |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1310 | // Did we find a match? |
| 1311 | __ j(kNotEqual, ¬_found_label); |
| 1312 | |
| 1313 | // Yes, we matched. Compute the index of the result. |
| 1314 | __ subl(string_length, counter); |
| 1315 | __ leal(out, Address(string_length, -1)); |
| 1316 | |
Mark Mendell | 0c9497d | 2015-08-21 09:30:05 -0400 | [diff] [blame] | 1317 | NearLabel done; |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1318 | __ jmp(&done); |
| 1319 | |
| 1320 | // Failed to match; return -1. |
| 1321 | __ Bind(¬_found_label); |
| 1322 | __ movl(out, Immediate(-1)); |
| 1323 | |
| 1324 | // And join up at the end. |
| 1325 | __ Bind(&done); |
| 1326 | if (slow_path != nullptr) { |
| 1327 | __ Bind(slow_path->GetExitLabel()); |
| 1328 | } |
| 1329 | } |
| 1330 | |
| 1331 | void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 1332 | CreateStringIndexOfLocations(invoke, allocator_, /* start_at_zero= */ true); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1333 | } |
| 1334 | |
| 1335 | void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 1336 | GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero= */ true); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1337 | } |
| 1338 | |
| 1339 | void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 1340 | CreateStringIndexOfLocations(invoke, allocator_, /* start_at_zero= */ false); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1341 | } |
| 1342 | |
| 1343 | void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 1344 | GenerateStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero= */ false); |
Andreas Gampe | 21030dd | 2015-05-07 14:46:15 -0700 | [diff] [blame] | 1345 | } |
| 1346 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1347 | void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1348 | LocationSummary* locations = new (allocator_) LocationSummary( |
| 1349 | invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1350 | InvokeRuntimeCallingConvention calling_convention; |
| 1351 | locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); |
| 1352 | locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); |
| 1353 | locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); |
| 1354 | locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3))); |
| 1355 | locations->SetOut(Location::RegisterLocation(EAX)); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1356 | } |
| 1357 | |
| 1358 | void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) { |
| 1359 | X86Assembler* assembler = GetAssembler(); |
| 1360 | LocationSummary* locations = invoke->GetLocations(); |
| 1361 | |
| 1362 | Register byte_array = locations->InAt(0).AsRegister<Register>(); |
| 1363 | __ testl(byte_array, byte_array); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 1364 | SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1365 | codegen_->AddSlowPath(slow_path); |
| 1366 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 1367 | |
Serban Constantinescu | ba45db0 | 2016-07-12 22:53:02 +0100 | [diff] [blame] | 1368 | codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc()); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1369 | CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1370 | __ Bind(slow_path->GetExitLabel()); |
| 1371 | } |
| 1372 | |
| 1373 | void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1374 | LocationSummary* locations = |
| 1375 | new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1376 | InvokeRuntimeCallingConvention calling_convention; |
| 1377 | locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); |
| 1378 | locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); |
| 1379 | locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); |
| 1380 | locations->SetOut(Location::RegisterLocation(EAX)); |
| 1381 | } |
| 1382 | |
| 1383 | void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) { |
Roland Levillain | cc3839c | 2016-02-29 16:23:48 +0000 | [diff] [blame] | 1384 | // No need to emit code checking whether `locations->InAt(2)` is a null |
| 1385 | // pointer, as callers of the native method |
| 1386 | // |
| 1387 | // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data) |
| 1388 | // |
| 1389 | // all include a null check on `data` before calling that method. |
Serban Constantinescu | ba45db0 | 2016-07-12 22:53:02 +0100 | [diff] [blame] | 1390 | codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc()); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1391 | CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1392 | } |
| 1393 | |
| 1394 | void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1395 | LocationSummary* locations = new (allocator_) LocationSummary( |
| 1396 | invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1397 | InvokeRuntimeCallingConvention calling_convention; |
| 1398 | locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); |
| 1399 | locations->SetOut(Location::RegisterLocation(EAX)); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1400 | } |
| 1401 | |
| 1402 | void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) { |
| 1403 | X86Assembler* assembler = GetAssembler(); |
| 1404 | LocationSummary* locations = invoke->GetLocations(); |
| 1405 | |
| 1406 | Register string_to_copy = locations->InAt(0).AsRegister<Register>(); |
| 1407 | __ testl(string_to_copy, string_to_copy); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 1408 | SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1409 | codegen_->AddSlowPath(slow_path); |
| 1410 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 1411 | |
Serban Constantinescu | ba45db0 | 2016-07-12 22:53:02 +0100 | [diff] [blame] | 1412 | codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc()); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1413 | CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1414 | __ Bind(slow_path->GetExitLabel()); |
| 1415 | } |
| 1416 | |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1417 | void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) { |
| 1418 | // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1419 | LocationSummary* locations = |
| 1420 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1421 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1422 | locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1))); |
| 1423 | // Place srcEnd in ECX to save a move below. |
| 1424 | locations->SetInAt(2, Location::RegisterLocation(ECX)); |
| 1425 | locations->SetInAt(3, Location::RequiresRegister()); |
| 1426 | locations->SetInAt(4, Location::RequiresRegister()); |
| 1427 | |
| 1428 | // And we need some temporaries. We will use REP MOVSW, so we need fixed registers. |
| 1429 | // We don't have enough registers to also grab ECX, so handle below. |
| 1430 | locations->AddTemp(Location::RegisterLocation(ESI)); |
| 1431 | locations->AddTemp(Location::RegisterLocation(EDI)); |
| 1432 | } |
| 1433 | |
| 1434 | void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) { |
| 1435 | X86Assembler* assembler = GetAssembler(); |
| 1436 | LocationSummary* locations = invoke->GetLocations(); |
| 1437 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1438 | size_t char_component_size = DataType::Size(DataType::Type::kUint16); |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1439 | // Location of data in char array buffer. |
| 1440 | const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value(); |
| 1441 | // Location of char array data in string. |
| 1442 | const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value(); |
| 1443 | |
| 1444 | // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin); |
| 1445 | Register obj = locations->InAt(0).AsRegister<Register>(); |
| 1446 | Location srcBegin = locations->InAt(1); |
| 1447 | int srcBegin_value = |
| 1448 | srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0; |
| 1449 | Register srcEnd = locations->InAt(2).AsRegister<Register>(); |
| 1450 | Register dst = locations->InAt(3).AsRegister<Register>(); |
| 1451 | Register dstBegin = locations->InAt(4).AsRegister<Register>(); |
| 1452 | |
| 1453 | // Check assumption that sizeof(Char) is 2 (used in scaling below). |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1454 | const size_t char_size = DataType::Size(DataType::Type::kUint16); |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1455 | DCHECK_EQ(char_size, 2u); |
| 1456 | |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1457 | // Compute the number of chars (words) to move. |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1458 | // Save ECX, since we don't know if it will be used later. |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1459 | __ pushl(ECX); |
| 1460 | int stack_adjust = kX86WordSize; |
| 1461 | __ cfi().AdjustCFAOffset(stack_adjust); |
| 1462 | DCHECK_EQ(srcEnd, ECX); |
| 1463 | if (srcBegin.IsConstant()) { |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1464 | __ subl(ECX, Immediate(srcBegin_value)); |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1465 | } else { |
| 1466 | DCHECK(srcBegin.IsRegister()); |
| 1467 | __ subl(ECX, srcBegin.AsRegister<Register>()); |
| 1468 | } |
| 1469 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1470 | NearLabel done; |
| 1471 | if (mirror::kUseStringCompression) { |
| 1472 | // Location of count in string |
| 1473 | const uint32_t count_offset = mirror::String::CountOffset().Uint32Value(); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1474 | const size_t c_char_size = DataType::Size(DataType::Type::kInt8); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1475 | DCHECK_EQ(c_char_size, 1u); |
| 1476 | __ pushl(EAX); |
| 1477 | __ cfi().AdjustCFAOffset(stack_adjust); |
| 1478 | |
| 1479 | NearLabel copy_loop, copy_uncompressed; |
Vladimir Marko | fdaf0f4 | 2016-10-13 19:29:53 +0100 | [diff] [blame] | 1480 | __ testl(Address(obj, count_offset), Immediate(1)); |
| 1481 | static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u, |
| 1482 | "Expecting 0=compressed, 1=uncompressed"); |
| 1483 | __ j(kNotZero, ©_uncompressed); |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1484 | // Compute the address of the source string by adding the number of chars from |
| 1485 | // the source beginning to the value offset of a string. |
| 1486 | __ leal(ESI, CodeGeneratorX86::ArrayAddress(obj, srcBegin, TIMES_1, value_offset)); |
| 1487 | |
| 1488 | // Start the loop to copy String's value to Array of Char. |
| 1489 | __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset)); |
| 1490 | __ Bind(©_loop); |
| 1491 | __ jecxz(&done); |
| 1492 | // Use EAX temporary (convert byte from ESI to word). |
| 1493 | // TODO: Use LODSB/STOSW (not supported by X86Assembler) with AH initialized to 0. |
| 1494 | __ movzxb(EAX, Address(ESI, 0)); |
| 1495 | __ movw(Address(EDI, 0), EAX); |
| 1496 | __ leal(EDI, Address(EDI, char_size)); |
| 1497 | __ leal(ESI, Address(ESI, c_char_size)); |
| 1498 | // TODO: Add support for LOOP to X86Assembler. |
| 1499 | __ subl(ECX, Immediate(1)); |
| 1500 | __ jmp(©_loop); |
| 1501 | __ Bind(©_uncompressed); |
| 1502 | } |
| 1503 | |
| 1504 | // Do the copy for uncompressed string. |
| 1505 | // Compute the address of the destination buffer. |
| 1506 | __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset)); |
| 1507 | __ leal(ESI, CodeGeneratorX86::ArrayAddress(obj, srcBegin, TIMES_2, value_offset)); |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1508 | __ rep_movsw(); |
| 1509 | |
jessicahandojo | 4877b79 | 2016-09-08 19:49:13 -0700 | [diff] [blame] | 1510 | __ Bind(&done); |
| 1511 | if (mirror::kUseStringCompression) { |
| 1512 | // Restore EAX. |
| 1513 | __ popl(EAX); |
| 1514 | __ cfi().AdjustCFAOffset(-stack_adjust); |
| 1515 | } |
| 1516 | // Restore ECX. |
Mark Mendell | 8f8926a | 2015-08-17 11:39:06 -0400 | [diff] [blame] | 1517 | __ popl(ECX); |
| 1518 | __ cfi().AdjustCFAOffset(-stack_adjust); |
| 1519 | } |
| 1520 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1521 | static void GenPeek(LocationSummary* locations, DataType::Type size, X86Assembler* assembler) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1522 | Register address = locations->InAt(0).AsRegisterPairLow<Register>(); |
| 1523 | Location out_loc = locations->Out(); |
| 1524 | // x86 allows unaligned access. We do not have to check the input or use specific instructions |
| 1525 | // to avoid a SIGBUS. |
| 1526 | switch (size) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1527 | case DataType::Type::kInt8: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1528 | __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0)); |
| 1529 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1530 | case DataType::Type::kInt16: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1531 | __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0)); |
| 1532 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1533 | case DataType::Type::kInt32: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1534 | __ movl(out_loc.AsRegister<Register>(), Address(address, 0)); |
| 1535 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1536 | case DataType::Type::kInt64: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1537 | __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0)); |
| 1538 | __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4)); |
| 1539 | break; |
| 1540 | default: |
| 1541 | LOG(FATAL) << "Type not recognized for peek: " << size; |
| 1542 | UNREACHABLE(); |
| 1543 | } |
| 1544 | } |
| 1545 | |
| 1546 | void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1547 | CreateLongToIntLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1548 | } |
| 1549 | |
| 1550 | void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1551 | GenPeek(invoke->GetLocations(), DataType::Type::kInt8, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1552 | } |
| 1553 | |
| 1554 | void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1555 | CreateLongToIntLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1556 | } |
| 1557 | |
| 1558 | void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1559 | GenPeek(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1560 | } |
| 1561 | |
| 1562 | void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1563 | CreateLongToLongLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1564 | } |
| 1565 | |
| 1566 | void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1567 | GenPeek(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1568 | } |
| 1569 | |
| 1570 | void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1571 | CreateLongToIntLocations(allocator_, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1572 | } |
| 1573 | |
| 1574 | void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1575 | GenPeek(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1576 | } |
| 1577 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1578 | static void CreateLongIntToVoidLocations(ArenaAllocator* allocator, |
| 1579 | DataType::Type size, |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1580 | HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1581 | LocationSummary* locations = |
| 1582 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1583 | locations->SetInAt(0, Location::RequiresRegister()); |
Roland Levillain | 4c0eb42 | 2015-04-24 16:43:49 +0100 | [diff] [blame] | 1584 | HInstruction* value = invoke->InputAt(1); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1585 | if (size == DataType::Type::kInt8) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1586 | locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value)); |
| 1587 | } else { |
| 1588 | locations->SetInAt(1, Location::RegisterOrConstant(value)); |
| 1589 | } |
| 1590 | } |
| 1591 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1592 | static void GenPoke(LocationSummary* locations, DataType::Type size, X86Assembler* assembler) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1593 | Register address = locations->InAt(0).AsRegisterPairLow<Register>(); |
| 1594 | Location value_loc = locations->InAt(1); |
| 1595 | // x86 allows unaligned access. We do not have to check the input or use specific instructions |
| 1596 | // to avoid a SIGBUS. |
| 1597 | switch (size) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1598 | case DataType::Type::kInt8: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1599 | if (value_loc.IsConstant()) { |
| 1600 | __ movb(Address(address, 0), |
| 1601 | Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue())); |
| 1602 | } else { |
| 1603 | __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>()); |
| 1604 | } |
| 1605 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1606 | case DataType::Type::kInt16: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1607 | if (value_loc.IsConstant()) { |
| 1608 | __ movw(Address(address, 0), |
| 1609 | Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue())); |
| 1610 | } else { |
| 1611 | __ movw(Address(address, 0), value_loc.AsRegister<Register>()); |
| 1612 | } |
| 1613 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1614 | case DataType::Type::kInt32: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1615 | if (value_loc.IsConstant()) { |
| 1616 | __ movl(Address(address, 0), |
| 1617 | Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue())); |
| 1618 | } else { |
| 1619 | __ movl(Address(address, 0), value_loc.AsRegister<Register>()); |
| 1620 | } |
| 1621 | break; |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1622 | case DataType::Type::kInt64: |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1623 | if (value_loc.IsConstant()) { |
| 1624 | int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue(); |
| 1625 | __ movl(Address(address, 0), Immediate(Low32Bits(value))); |
| 1626 | __ movl(Address(address, 4), Immediate(High32Bits(value))); |
| 1627 | } else { |
| 1628 | __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>()); |
| 1629 | __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>()); |
| 1630 | } |
| 1631 | break; |
| 1632 | default: |
| 1633 | LOG(FATAL) << "Type not recognized for poke: " << size; |
| 1634 | UNREACHABLE(); |
| 1635 | } |
| 1636 | } |
| 1637 | |
| 1638 | void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1639 | CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt8, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1640 | } |
| 1641 | |
| 1642 | void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1643 | GenPoke(invoke->GetLocations(), DataType::Type::kInt8, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1644 | } |
| 1645 | |
| 1646 | void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1647 | CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt32, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1648 | } |
| 1649 | |
| 1650 | void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1651 | GenPoke(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1652 | } |
| 1653 | |
| 1654 | void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1655 | CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt64, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1656 | } |
| 1657 | |
| 1658 | void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1659 | GenPoke(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1660 | } |
| 1661 | |
| 1662 | void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1663 | CreateLongIntToVoidLocations(allocator_, DataType::Type::kInt16, invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1664 | } |
| 1665 | |
| 1666 | void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1667 | GenPoke(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1668 | } |
| 1669 | |
| 1670 | void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1671 | LocationSummary* locations = |
| 1672 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1673 | locations->SetOut(Location::RequiresRegister()); |
| 1674 | } |
| 1675 | |
| 1676 | void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) { |
| 1677 | Register out = invoke->GetLocations()->Out().AsRegister<Register>(); |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 1678 | GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86PointerSize>())); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1679 | } |
| 1680 | |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 1681 | static void GenUnsafeGet(HInvoke* invoke, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1682 | DataType::Type type, |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 1683 | bool is_volatile, |
| 1684 | CodeGeneratorX86* codegen) { |
| 1685 | X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); |
| 1686 | LocationSummary* locations = invoke->GetLocations(); |
| 1687 | Location base_loc = locations->InAt(1); |
| 1688 | Register base = base_loc.AsRegister<Register>(); |
| 1689 | Location offset_loc = locations->InAt(2); |
| 1690 | Register offset = offset_loc.AsRegisterPairLow<Register>(); |
| 1691 | Location output_loc = locations->Out(); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1692 | |
| 1693 | switch (type) { |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1694 | case DataType::Type::kInt32: { |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 1695 | Register output = output_loc.AsRegister<Register>(); |
| 1696 | __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0)); |
Roland Levillain | 7c1559a | 2015-12-15 10:55:36 +0000 | [diff] [blame] | 1697 | break; |
| 1698 | } |
| 1699 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1700 | case DataType::Type::kReference: { |
Roland Levillain | 7c1559a | 2015-12-15 10:55:36 +0000 | [diff] [blame] | 1701 | Register output = output_loc.AsRegister<Register>(); |
| 1702 | if (kEmitCompilerReadBarrier) { |
| 1703 | if (kUseBakerReadBarrier) { |
Sang, Chunlei | 0fcd2b8 | 2016-04-05 17:12:59 +0800 | [diff] [blame] | 1704 | Address src(base, offset, ScaleFactor::TIMES_1, 0); |
| 1705 | codegen->GenerateReferenceLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 1706 | invoke, output_loc, base, src, /* needs_null_check= */ false); |
Roland Levillain | 7c1559a | 2015-12-15 10:55:36 +0000 | [diff] [blame] | 1707 | } else { |
| 1708 | __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0)); |
| 1709 | codegen->GenerateReadBarrierSlow( |
| 1710 | invoke, output_loc, output_loc, base_loc, 0U, offset_loc); |
| 1711 | } |
| 1712 | } else { |
| 1713 | __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0)); |
| 1714 | __ MaybeUnpoisonHeapReference(output); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1715 | } |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1716 | break; |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1717 | } |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1718 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1719 | case DataType::Type::kInt64: { |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 1720 | Register output_lo = output_loc.AsRegisterPairLow<Register>(); |
| 1721 | Register output_hi = output_loc.AsRegisterPairHigh<Register>(); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1722 | if (is_volatile) { |
| 1723 | // Need to use a XMM to read atomically. |
| 1724 | XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); |
| 1725 | __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0)); |
| 1726 | __ movd(output_lo, temp); |
| 1727 | __ psrlq(temp, Immediate(32)); |
| 1728 | __ movd(output_hi, temp); |
| 1729 | } else { |
| 1730 | __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0)); |
| 1731 | __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4)); |
| 1732 | } |
| 1733 | } |
| 1734 | break; |
| 1735 | |
| 1736 | default: |
| 1737 | LOG(FATAL) << "Unsupported op size " << type; |
| 1738 | UNREACHABLE(); |
| 1739 | } |
| 1740 | } |
| 1741 | |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1742 | static bool UnsafeGetIntrinsicOnCallList(Intrinsics intrinsic) { |
| 1743 | switch (intrinsic) { |
| 1744 | case Intrinsics::kUnsafeGetObject: |
| 1745 | case Intrinsics::kUnsafeGetObjectVolatile: |
| 1746 | case Intrinsics::kJdkUnsafeGetObject: |
| 1747 | case Intrinsics::kJdkUnsafeGetObjectVolatile: |
| 1748 | case Intrinsics::kJdkUnsafeGetObjectAcquire: |
| 1749 | return true; |
| 1750 | default: |
| 1751 | break; |
| 1752 | } |
| 1753 | return false; |
| 1754 | } |
| 1755 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1756 | static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator, |
Roland Levillain | 7c1559a | 2015-12-15 10:55:36 +0000 | [diff] [blame] | 1757 | HInvoke* invoke, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1758 | DataType::Type type, |
Roland Levillain | 7c1559a | 2015-12-15 10:55:36 +0000 | [diff] [blame] | 1759 | bool is_volatile) { |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1760 | bool can_call = kEmitCompilerReadBarrier && UnsafeGetIntrinsicOnCallList(invoke->GetIntrinsic()); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1761 | LocationSummary* locations = |
| 1762 | new (allocator) LocationSummary(invoke, |
| 1763 | can_call |
| 1764 | ? LocationSummary::kCallOnSlowPath |
| 1765 | : LocationSummary::kNoCall, |
| 1766 | kIntrinsified); |
Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 1767 | if (can_call && kUseBakerReadBarrier) { |
Vladimir Marko | 804b03f | 2016-09-14 16:26:36 +0100 | [diff] [blame] | 1768 | locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers. |
Vladimir Marko | 70e9746 | 2016-08-09 11:04:26 +0100 | [diff] [blame] | 1769 | } |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1770 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 1771 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1772 | locations->SetInAt(2, Location::RequiresRegister()); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1773 | if (type == DataType::Type::kInt64) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1774 | if (is_volatile) { |
| 1775 | // Need to use XMM to read volatile. |
| 1776 | locations->AddTemp(Location::RequiresFpuRegister()); |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 1777 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1778 | } else { |
| 1779 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 1780 | } |
| 1781 | } else { |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 1782 | locations->SetOut(Location::RequiresRegister(), |
Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 1783 | (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap)); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1784 | } |
| 1785 | } |
| 1786 | |
| 1787 | void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1788 | VisitJdkUnsafeGet(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1789 | } |
| 1790 | void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1791 | VisitJdkUnsafeGetVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1792 | } |
| 1793 | void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1794 | VisitJdkUnsafeGetLong(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1795 | } |
| 1796 | void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1797 | VisitJdkUnsafeGetLongVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1798 | } |
| 1799 | void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1800 | VisitJdkUnsafeGetObject(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1801 | } |
| 1802 | void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1803 | VisitJdkUnsafeGetObjectVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1804 | } |
| 1805 | |
| 1806 | |
| 1807 | void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1808 | VisitJdkUnsafeGet(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1809 | } |
| 1810 | void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1811 | VisitJdkUnsafeGetVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1812 | } |
| 1813 | void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1814 | VisitJdkUnsafeGetLong(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1815 | } |
| 1816 | void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1817 | VisitJdkUnsafeGetLongVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1818 | } |
| 1819 | void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1820 | VisitJdkUnsafeGetObject(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1821 | } |
| 1822 | void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1823 | VisitJdkUnsafeGetObjectVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1824 | } |
| 1825 | |
| 1826 | |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1827 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGet(HInvoke* invoke) { |
| 1828 | CreateIntIntIntToIntLocations( |
| 1829 | allocator_, invoke, DataType::Type::kInt32, /*is_volatile=*/ false); |
| 1830 | } |
| 1831 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetVolatile(HInvoke* invoke) { |
| 1832 | CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32, /*is_volatile=*/ true); |
| 1833 | } |
Sorin Basca | 0069ad7 | 2021-09-17 17:33:09 +0000 | [diff] [blame] | 1834 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetAcquire(HInvoke* invoke) { |
| 1835 | CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32, /*is_volatile=*/ true); |
| 1836 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1837 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetLong(HInvoke* invoke) { |
| 1838 | CreateIntIntIntToIntLocations( |
| 1839 | allocator_, invoke, DataType::Type::kInt64, /*is_volatile=*/ false); |
| 1840 | } |
| 1841 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetLongVolatile(HInvoke* invoke) { |
| 1842 | CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64, /*is_volatile=*/ true); |
| 1843 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1844 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetLongAcquire(HInvoke* invoke) { |
| 1845 | CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64, /*is_volatile=*/ true); |
| 1846 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1847 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetObject(HInvoke* invoke) { |
| 1848 | CreateIntIntIntToIntLocations( |
| 1849 | allocator_, invoke, DataType::Type::kReference, /*is_volatile=*/ false); |
| 1850 | } |
| 1851 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 1852 | CreateIntIntIntToIntLocations( |
| 1853 | allocator_, invoke, DataType::Type::kReference, /*is_volatile=*/ true); |
| 1854 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1855 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetObjectAcquire(HInvoke* invoke) { |
| 1856 | CreateIntIntIntToIntLocations( |
| 1857 | allocator_, invoke, DataType::Type::kReference, /*is_volatile=*/ true); |
| 1858 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1859 | |
| 1860 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGet(HInvoke* invoke) { |
| 1861 | GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); |
| 1862 | } |
| 1863 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetVolatile(HInvoke* invoke) { |
| 1864 | GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_); |
| 1865 | } |
Sorin Basca | 0069ad7 | 2021-09-17 17:33:09 +0000 | [diff] [blame] | 1866 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetAcquire(HInvoke* invoke) { |
| 1867 | GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_); |
| 1868 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1869 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetLong(HInvoke* invoke) { |
| 1870 | GenUnsafeGet(invoke, DataType::Type::kInt64, /*is_volatile=*/ false, codegen_); |
| 1871 | } |
| 1872 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetLongVolatile(HInvoke* invoke) { |
| 1873 | GenUnsafeGet(invoke, DataType::Type::kInt64, /*is_volatile=*/ true, codegen_); |
| 1874 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1875 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetLongAcquire(HInvoke* invoke) { |
| 1876 | GenUnsafeGet(invoke, DataType::Type::kInt64, /*is_volatile=*/ true, codegen_); |
| 1877 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1878 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetObject(HInvoke* invoke) { |
| 1879 | GenUnsafeGet(invoke, DataType::Type::kReference, /*is_volatile=*/ false, codegen_); |
| 1880 | } |
| 1881 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 1882 | GenUnsafeGet(invoke, DataType::Type::kReference, /*is_volatile=*/ true, codegen_); |
| 1883 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1884 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetObjectAcquire(HInvoke* invoke) { |
| 1885 | GenUnsafeGet(invoke, DataType::Type::kReference, /*is_volatile=*/ true, codegen_); |
| 1886 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1887 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1888 | static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1889 | DataType::Type type, |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1890 | HInvoke* invoke, |
| 1891 | bool is_volatile) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 1892 | LocationSummary* locations = |
| 1893 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1894 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 1895 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1896 | locations->SetInAt(2, Location::RequiresRegister()); |
| 1897 | locations->SetInAt(3, Location::RequiresRegister()); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1898 | if (type == DataType::Type::kReference) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1899 | // Need temp registers for card-marking. |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1900 | locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too. |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1901 | // Ensure the value is in a byte register. |
| 1902 | locations->AddTemp(Location::RegisterLocation(ECX)); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1903 | } else if (type == DataType::Type::kInt64 && is_volatile) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1904 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 1905 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 1906 | } |
| 1907 | } |
| 1908 | |
| 1909 | void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1910 | VisitJdkUnsafePut(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1911 | } |
| 1912 | void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1913 | VisitJdkUnsafePutOrdered(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1914 | } |
| 1915 | void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1916 | VisitJdkUnsafePutVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1917 | } |
| 1918 | void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1919 | VisitJdkUnsafePutObject(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1920 | } |
| 1921 | void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1922 | VisitJdkUnsafePutObjectOrdered(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1923 | } |
| 1924 | void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1925 | VisitJdkUnsafePutObjectVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1926 | } |
| 1927 | void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1928 | VisitJdkUnsafePutLong(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1929 | } |
| 1930 | void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1931 | VisitJdkUnsafePutLongOrdered(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1932 | } |
| 1933 | void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1934 | VisitJdkUnsafePutLongVolatile(invoke); |
| 1935 | } |
| 1936 | |
| 1937 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePut(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 1938 | CreateIntIntIntIntToVoidPlusTempsLocations( |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1939 | allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ false); |
| 1940 | } |
| 1941 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutOrdered(HInvoke* invoke) { |
| 1942 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1943 | allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ false); |
| 1944 | } |
| 1945 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutVolatile(HInvoke* invoke) { |
| 1946 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1947 | allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ true); |
| 1948 | } |
Sorin Basca | 0069ad7 | 2021-09-17 17:33:09 +0000 | [diff] [blame] | 1949 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutRelease(HInvoke* invoke) { |
| 1950 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1951 | allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ true); |
| 1952 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1953 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutObject(HInvoke* invoke) { |
| 1954 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1955 | allocator_, DataType::Type::kReference, invoke, /*is_volatile=*/ false); |
| 1956 | } |
| 1957 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutObjectOrdered(HInvoke* invoke) { |
| 1958 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1959 | allocator_, DataType::Type::kReference, invoke, /*is_volatile=*/ false); |
| 1960 | } |
| 1961 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutObjectVolatile(HInvoke* invoke) { |
| 1962 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1963 | allocator_, DataType::Type::kReference, invoke, /*is_volatile=*/ true); |
| 1964 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1965 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutObjectRelease(HInvoke* invoke) { |
| 1966 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1967 | allocator_, DataType::Type::kReference, invoke, /*is_volatile=*/ true); |
| 1968 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 1969 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutLong(HInvoke* invoke) { |
| 1970 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1971 | allocator_, DataType::Type::kInt64, invoke, /*is_volatile=*/ false); |
| 1972 | } |
| 1973 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutLongOrdered(HInvoke* invoke) { |
| 1974 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1975 | allocator_, DataType::Type::kInt64, invoke, /*is_volatile=*/ false); |
| 1976 | } |
| 1977 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutLongVolatile(HInvoke* invoke) { |
| 1978 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1979 | allocator_, DataType::Type::kInt64, invoke, /*is_volatile=*/ true); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1980 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 1981 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutLongRelease(HInvoke* invoke) { |
| 1982 | CreateIntIntIntIntToVoidPlusTempsLocations( |
| 1983 | allocator_, DataType::Type::kInt64, invoke, /*is_volatile=*/ true); |
| 1984 | } |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1985 | |
| 1986 | // We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86 |
| 1987 | // memory model. |
| 1988 | static void GenUnsafePut(LocationSummary* locations, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1989 | DataType::Type type, |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1990 | bool is_volatile, |
| 1991 | CodeGeneratorX86* codegen) { |
Roland Levillain | b488b78 | 2015-10-22 11:38:49 +0100 | [diff] [blame] | 1992 | X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1993 | Register base = locations->InAt(1).AsRegister<Register>(); |
| 1994 | Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); |
| 1995 | Location value_loc = locations->InAt(3); |
| 1996 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 1997 | if (type == DataType::Type::kInt64) { |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 1998 | Register value_lo = value_loc.AsRegisterPairLow<Register>(); |
| 1999 | Register value_hi = value_loc.AsRegisterPairHigh<Register>(); |
| 2000 | if (is_volatile) { |
| 2001 | XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); |
| 2002 | XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>(); |
| 2003 | __ movd(temp1, value_lo); |
| 2004 | __ movd(temp2, value_hi); |
| 2005 | __ punpckldq(temp1, temp2); |
| 2006 | __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1); |
| 2007 | } else { |
| 2008 | __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo); |
| 2009 | __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi); |
| 2010 | } |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2011 | } else if (kPoisonHeapReferences && type == DataType::Type::kReference) { |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 2012 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 2013 | __ movl(temp, value_loc.AsRegister<Register>()); |
| 2014 | __ PoisonHeapReference(temp); |
| 2015 | __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2016 | } else { |
| 2017 | __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>()); |
| 2018 | } |
| 2019 | |
| 2020 | if (is_volatile) { |
Mark P Mendell | 17077d8 | 2015-12-16 19:15:59 +0000 | [diff] [blame] | 2021 | codegen->MemoryFence(); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2022 | } |
| 2023 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2024 | if (type == DataType::Type::kReference) { |
Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 2025 | bool value_can_be_null = true; // TODO: Worth finding out this information? |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2026 | codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(), |
| 2027 | locations->GetTemp(1).AsRegister<Register>(), |
| 2028 | base, |
Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 2029 | value_loc.AsRegister<Register>(), |
| 2030 | value_can_be_null); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2031 | } |
| 2032 | } |
| 2033 | |
| 2034 | void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2035 | VisitJdkUnsafePut(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2036 | } |
| 2037 | void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2038 | VisitJdkUnsafePutOrdered(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2039 | } |
| 2040 | void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2041 | VisitJdkUnsafePutVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2042 | } |
| 2043 | void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2044 | VisitJdkUnsafePutObject(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2045 | } |
| 2046 | void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2047 | VisitJdkUnsafePutObjectOrdered(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2048 | } |
| 2049 | void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2050 | VisitJdkUnsafePutObjectVolatile(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2051 | } |
| 2052 | void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2053 | VisitJdkUnsafePutLong(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2054 | } |
| 2055 | void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2056 | VisitJdkUnsafePutLongOrdered(invoke); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2057 | } |
| 2058 | void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2059 | VisitJdkUnsafePutLongVolatile(invoke); |
| 2060 | } |
| 2061 | |
| 2062 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePut(HInvoke* invoke) { |
| 2063 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); |
| 2064 | } |
| 2065 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutOrdered(HInvoke* invoke) { |
| 2066 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); |
| 2067 | } |
| 2068 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutVolatile(HInvoke* invoke) { |
| 2069 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ true, codegen_); |
| 2070 | } |
Sorin Basca | 0069ad7 | 2021-09-17 17:33:09 +0000 | [diff] [blame] | 2071 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutRelease(HInvoke* invoke) { |
| 2072 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ true, codegen_); |
| 2073 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2074 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutObject(HInvoke* invoke) { |
| 2075 | GenUnsafePut( |
| 2076 | invoke->GetLocations(), DataType::Type::kReference, /*is_volatile=*/ false, codegen_); |
| 2077 | } |
| 2078 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutObjectOrdered(HInvoke* invoke) { |
| 2079 | GenUnsafePut( |
| 2080 | invoke->GetLocations(), DataType::Type::kReference, /*is_volatile=*/ false, codegen_); |
| 2081 | } |
| 2082 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutObjectVolatile(HInvoke* invoke) { |
| 2083 | GenUnsafePut( |
| 2084 | invoke->GetLocations(), DataType::Type::kReference, /*is_volatile=*/ true, codegen_); |
| 2085 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 2086 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutObjectRelease(HInvoke* invoke) { |
| 2087 | GenUnsafePut( |
| 2088 | invoke->GetLocations(), DataType::Type::kReference, /*is_volatile=*/ true, codegen_); |
| 2089 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2090 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutLong(HInvoke* invoke) { |
| 2091 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /*is_volatile=*/ false, codegen_); |
| 2092 | } |
| 2093 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutLongOrdered(HInvoke* invoke) { |
| 2094 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /*is_volatile=*/ false, codegen_); |
| 2095 | } |
| 2096 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutLongVolatile(HInvoke* invoke) { |
| 2097 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /*is_volatile=*/ true, codegen_); |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2098 | } |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 2099 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutLongRelease(HInvoke* invoke) { |
| 2100 | GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt64, /*is_volatile=*/ true, codegen_); |
| 2101 | } |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 2102 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2103 | static void CreateIntIntIntIntIntToInt(ArenaAllocator* allocator, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2104 | DataType::Type type, |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2105 | HInvoke* invoke) { |
Ulya Trafimovich | ec696e5 | 2022-01-26 10:21:32 +0000 | [diff] [blame] | 2106 | const bool can_call = kEmitCompilerReadBarrier && |
| 2107 | kUseBakerReadBarrier && |
| 2108 | IsUnsafeCASObject(invoke); |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2109 | LocationSummary* locations = |
| 2110 | new (allocator) LocationSummary(invoke, |
| 2111 | can_call |
| 2112 | ? LocationSummary::kCallOnSlowPath |
| 2113 | : LocationSummary::kNoCall, |
| 2114 | kIntrinsified); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2115 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 2116 | locations->SetInAt(1, Location::RequiresRegister()); |
| 2117 | // Offset is a long, but in 32 bit mode, we only need the low word. |
| 2118 | // Can we update the invoke here to remove a TypeConvert to Long? |
| 2119 | locations->SetInAt(2, Location::RequiresRegister()); |
| 2120 | // Expected value must be in EAX or EDX:EAX. |
| 2121 | // For long, new value must be in ECX:EBX. |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2122 | if (type == DataType::Type::kInt64) { |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2123 | locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX)); |
| 2124 | locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX)); |
| 2125 | } else { |
| 2126 | locations->SetInAt(3, Location::RegisterLocation(EAX)); |
| 2127 | locations->SetInAt(4, Location::RequiresRegister()); |
| 2128 | } |
| 2129 | |
| 2130 | // Force a byte register for the output. |
| 2131 | locations->SetOut(Location::RegisterLocation(EAX)); |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2132 | if (type == DataType::Type::kReference) { |
Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 2133 | // Need temporary registers for card-marking, and possibly for |
| 2134 | // (Baker) read barrier. |
Roland Levillain | b488b78 | 2015-10-22 11:38:49 +0100 | [diff] [blame] | 2135 | locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too. |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2136 | // Need a byte register for marking. |
| 2137 | locations->AddTemp(Location::RegisterLocation(ECX)); |
| 2138 | } |
| 2139 | } |
| 2140 | |
| 2141 | void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2142 | VisitJdkUnsafeCASInt(invoke); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2143 | } |
| 2144 | |
| 2145 | void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2146 | VisitJdkUnsafeCASLong(invoke); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2147 | } |
| 2148 | |
| 2149 | void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2150 | VisitJdkUnsafeCASObject(invoke); |
| 2151 | } |
| 2152 | |
| 2153 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeCASInt(HInvoke* invoke) { |
Ulya Trafimovich | 70102e6 | 2022-01-14 15:20:38 +0000 | [diff] [blame] | 2154 | // `jdk.internal.misc.Unsafe.compareAndSwapInt` has compare-and-set semantics (see javadoc). |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2155 | VisitJdkUnsafeCompareAndSetInt(invoke); |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2156 | } |
| 2157 | |
| 2158 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeCASLong(HInvoke* invoke) { |
Ulya Trafimovich | 70102e6 | 2022-01-14 15:20:38 +0000 | [diff] [blame] | 2159 | // `jdk.internal.misc.Unsafe.compareAndSwapLong` has compare-and-set semantics (see javadoc). |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2160 | VisitJdkUnsafeCompareAndSetLong(invoke); |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2161 | } |
| 2162 | |
| 2163 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeCASObject(HInvoke* invoke) { |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2164 | // `jdk.internal.misc.Unsafe.compareAndSwapObject` has compare-and-set semantics (see javadoc). |
| 2165 | VisitJdkUnsafeCompareAndSetObject(invoke); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2166 | } |
| 2167 | |
Sorin Basca | 0069ad7 | 2021-09-17 17:33:09 +0000 | [diff] [blame] | 2168 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeCompareAndSetInt(HInvoke* invoke) { |
| 2169 | CreateIntIntIntIntIntToInt(allocator_, DataType::Type::kInt32, invoke); |
| 2170 | } |
| 2171 | |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 2172 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeCompareAndSetLong(HInvoke* invoke) { |
| 2173 | CreateIntIntIntIntIntToInt(allocator_, DataType::Type::kInt64, invoke); |
| 2174 | } |
| 2175 | |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2176 | void IntrinsicLocationsBuilderX86::VisitJdkUnsafeCompareAndSetObject(HInvoke* invoke) { |
| 2177 | // The only supported read barrier implementation is the Baker-style read barriers. |
| 2178 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 2179 | return; |
| 2180 | } |
| 2181 | |
| 2182 | CreateIntIntIntIntIntToInt(allocator_, DataType::Type::kReference, invoke); |
| 2183 | } |
| 2184 | |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 2185 | static void GenPrimitiveLockedCmpxchg(DataType::Type type, |
| 2186 | CodeGeneratorX86* codegen, |
| 2187 | Location expected_value, |
| 2188 | Location new_value, |
| 2189 | Register base, |
| 2190 | Register offset, |
| 2191 | // Only necessary for floating point |
| 2192 | Register temp = Register::kNoRegister) { |
Roland Levillain | b488b78 | 2015-10-22 11:38:49 +0100 | [diff] [blame] | 2193 | X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2194 | |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2195 | if (DataType::Kind(type) == DataType::Type::kInt32) { |
| 2196 | DCHECK_EQ(expected_value.AsRegister<Register>(), EAX); |
| 2197 | } |
| 2198 | |
| 2199 | // The address of the field within the holding object. |
| 2200 | Address field_addr(base, offset, TIMES_1, 0); |
| 2201 | |
| 2202 | switch (type) { |
| 2203 | case DataType::Type::kBool: |
| 2204 | case DataType::Type::kInt8: |
| 2205 | __ LockCmpxchgb(field_addr, new_value.AsRegister<ByteRegister>()); |
| 2206 | break; |
| 2207 | case DataType::Type::kInt16: |
| 2208 | case DataType::Type::kUint16: |
| 2209 | __ LockCmpxchgw(field_addr, new_value.AsRegister<Register>()); |
| 2210 | break; |
| 2211 | case DataType::Type::kInt32: |
| 2212 | __ LockCmpxchgl(field_addr, new_value.AsRegister<Register>()); |
| 2213 | break; |
| 2214 | case DataType::Type::kFloat32: { |
| 2215 | // cmpxchg requires the expected value to be in EAX so the new value must be elsewhere. |
| 2216 | DCHECK_NE(temp, EAX); |
| 2217 | // EAX is both an input and an output for cmpxchg |
| 2218 | codegen->Move32(Location::RegisterLocation(EAX), expected_value); |
| 2219 | codegen->Move32(Location::RegisterLocation(temp), new_value); |
| 2220 | __ LockCmpxchgl(field_addr, temp); |
| 2221 | break; |
| 2222 | } |
| 2223 | case DataType::Type::kInt64: |
| 2224 | // Ensure the expected value is in EAX:EDX and that the new |
| 2225 | // value is in EBX:ECX (required by the CMPXCHG8B instruction). |
| 2226 | DCHECK_EQ(expected_value.AsRegisterPairLow<Register>(), EAX); |
| 2227 | DCHECK_EQ(expected_value.AsRegisterPairHigh<Register>(), EDX); |
| 2228 | DCHECK_EQ(new_value.AsRegisterPairLow<Register>(), EBX); |
| 2229 | DCHECK_EQ(new_value.AsRegisterPairHigh<Register>(), ECX); |
| 2230 | __ LockCmpxchg8b(field_addr); |
| 2231 | break; |
| 2232 | default: |
| 2233 | LOG(FATAL) << "Unexpected CAS type " << type; |
| 2234 | } |
| 2235 | // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we |
| 2236 | // don't need scheduling barriers at this time. |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 2237 | } |
| 2238 | |
| 2239 | static void GenPrimitiveCAS(DataType::Type type, |
| 2240 | CodeGeneratorX86* codegen, |
| 2241 | Location expected_value, |
| 2242 | Location new_value, |
| 2243 | Register base, |
| 2244 | Register offset, |
| 2245 | Location out, |
| 2246 | // Only necessary for floating point |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 2247 | Register temp = Register::kNoRegister, |
| 2248 | bool is_cmpxchg = false) { |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 2249 | X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 2250 | |
| 2251 | if (!is_cmpxchg || DataType::Kind(type) == DataType::Type::kInt32) { |
| 2252 | DCHECK_EQ(out.AsRegister<Register>(), EAX); |
| 2253 | } |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 2254 | |
| 2255 | GenPrimitiveLockedCmpxchg(type, codegen, expected_value, new_value, base, offset, temp); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2256 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 2257 | if (is_cmpxchg) { |
| 2258 | // Sign-extend, zero-extend or move the result if necessary |
| 2259 | switch (type) { |
| 2260 | case DataType::Type::kBool: |
| 2261 | __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>()); |
| 2262 | break; |
| 2263 | case DataType::Type::kInt8: |
| 2264 | __ movsxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>()); |
| 2265 | break; |
| 2266 | case DataType::Type::kInt16: |
| 2267 | __ movsxw(out.AsRegister<Register>(), out.AsRegister<Register>()); |
| 2268 | break; |
| 2269 | case DataType::Type::kUint16: |
| 2270 | __ movzxw(out.AsRegister<Register>(), out.AsRegister<Register>()); |
| 2271 | break; |
| 2272 | case DataType::Type::kFloat32: |
| 2273 | __ movd(out.AsFpuRegister<XmmRegister>(), EAX); |
| 2274 | break; |
| 2275 | default: |
| 2276 | // Nothing to do |
| 2277 | break; |
| 2278 | } |
| 2279 | } else { |
| 2280 | // Convert ZF into the Boolean result. |
| 2281 | __ setb(kZero, out.AsRegister<Register>()); |
| 2282 | __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>()); |
| 2283 | } |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2284 | } |
| 2285 | |
| 2286 | static void GenReferenceCAS(HInvoke* invoke, |
| 2287 | CodeGeneratorX86* codegen, |
| 2288 | Location expected_value, |
| 2289 | Location new_value, |
| 2290 | Register base, |
| 2291 | Register offset, |
| 2292 | Register temp, |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 2293 | Register temp2, |
| 2294 | bool is_cmpxchg = false) { |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2295 | X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); |
| 2296 | LocationSummary* locations = invoke->GetLocations(); |
| 2297 | Location out = locations->Out(); |
| 2298 | |
| 2299 | // The address of the field within the holding object. |
| 2300 | Address field_addr(base, offset, TIMES_1, 0); |
| 2301 | |
Andra Danciu | afad9f9 | 2020-09-15 15:38:32 +0000 | [diff] [blame] | 2302 | Register value = new_value.AsRegister<Register>(); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2303 | Register expected = expected_value.AsRegister<Register>(); |
| 2304 | DCHECK_EQ(expected, EAX); |
| 2305 | DCHECK_NE(temp, temp2); |
| 2306 | |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2307 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 2308 | // Need to make sure the reference stored in the field is a to-space |
| 2309 | // one before attempting the CAS or the CAS could fail incorrectly. |
| 2310 | codegen->GenerateReferenceLoadWithBakerReadBarrier( |
| 2311 | invoke, |
| 2312 | // Unused, used only as a "temporary" within the read barrier. |
| 2313 | Location::RegisterLocation(temp), |
| 2314 | base, |
| 2315 | field_addr, |
| 2316 | /* needs_null_check= */ false, |
| 2317 | /* always_update_field= */ true, |
| 2318 | &temp2); |
| 2319 | } |
| 2320 | bool base_equals_value = (base == value); |
| 2321 | if (kPoisonHeapReferences) { |
| 2322 | if (base_equals_value) { |
| 2323 | // If `base` and `value` are the same register location, move |
| 2324 | // `value` to a temporary register. This way, poisoning |
| 2325 | // `value` won't invalidate `base`. |
| 2326 | value = temp; |
| 2327 | __ movl(value, base); |
| 2328 | } |
| 2329 | |
| 2330 | // Check that the register allocator did not assign the location |
| 2331 | // of `expected` (EAX) to `value` nor to `base`, so that heap |
| 2332 | // poisoning (when enabled) works as intended below. |
| 2333 | // - If `value` were equal to `expected`, both references would |
| 2334 | // be poisoned twice, meaning they would not be poisoned at |
| 2335 | // all, as heap poisoning uses address negation. |
| 2336 | // - If `base` were equal to `expected`, poisoning `expected` |
| 2337 | // would invalidate `base`. |
| 2338 | DCHECK_NE(value, expected); |
| 2339 | DCHECK_NE(base, expected); |
| 2340 | __ PoisonHeapReference(expected); |
| 2341 | __ PoisonHeapReference(value); |
| 2342 | } |
| 2343 | __ LockCmpxchgl(field_addr, value); |
| 2344 | |
| 2345 | // LOCK CMPXCHG has full barrier semantics, and we don't need |
| 2346 | // scheduling barriers at this time. |
| 2347 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 2348 | if (is_cmpxchg) { |
| 2349 | DCHECK_EQ(out.AsRegister<Register>(), EAX); |
| 2350 | __ MaybeUnpoisonHeapReference(out.AsRegister<Register>()); |
| 2351 | } else { |
| 2352 | // Convert ZF into the Boolean result. |
| 2353 | __ setb(kZero, out.AsRegister<Register>()); |
| 2354 | __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>()); |
| 2355 | } |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2356 | |
Andra Danciu | afad9f9 | 2020-09-15 15:38:32 +0000 | [diff] [blame] | 2357 | // Mark card for object if the new value is stored. |
| 2358 | bool value_can_be_null = true; // TODO: Worth finding out this information? |
| 2359 | NearLabel skip_mark_gc_card; |
| 2360 | __ j(kNotZero, &skip_mark_gc_card); |
| 2361 | codegen->MarkGCCard(temp, temp2, base, value, value_can_be_null); |
| 2362 | __ Bind(&skip_mark_gc_card); |
| 2363 | |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2364 | // If heap poisoning is enabled, we need to unpoison the values |
| 2365 | // that were poisoned earlier. |
| 2366 | if (kPoisonHeapReferences) { |
| 2367 | if (base_equals_value) { |
| 2368 | // `value` has been moved to a temporary register, no need to |
| 2369 | // unpoison it. |
| 2370 | } else { |
| 2371 | // Ensure `value` is different from `out`, so that unpoisoning |
| 2372 | // the former does not invalidate the latter. |
| 2373 | DCHECK_NE(value, out.AsRegister<Register>()); |
| 2374 | __ UnpoisonHeapReference(value); |
| 2375 | } |
| 2376 | } |
| 2377 | // Do not unpoison the reference contained in register |
| 2378 | // `expected`, as it is the same as register `out` (EAX). |
| 2379 | } |
| 2380 | |
| 2381 | static void GenCAS(DataType::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) { |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2382 | LocationSummary* locations = invoke->GetLocations(); |
| 2383 | |
| 2384 | Register base = locations->InAt(1).AsRegister<Register>(); |
| 2385 | Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2386 | Location expected_value = locations->InAt(3); |
| 2387 | Location new_value = locations->InAt(4); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2388 | Location out = locations->Out(); |
| 2389 | DCHECK_EQ(out.AsRegister<Register>(), EAX); |
| 2390 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2391 | if (type == DataType::Type::kReference) { |
Andra Danciu | ff6d5fa | 2020-09-16 07:08:24 +0000 | [diff] [blame] | 2392 | // The only read barrier implementation supporting the |
| 2393 | // UnsafeCASObject intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 2394 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | ff6d5fa | 2020-09-16 07:08:24 +0000 | [diff] [blame] | 2395 | |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2396 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 2397 | Register temp2 = locations->GetTemp(1).AsRegister<Register>(); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2398 | GenReferenceCAS(invoke, codegen, expected_value, new_value, base, offset, temp, temp2); |
Roland Levillain | b488b78 | 2015-10-22 11:38:49 +0100 | [diff] [blame] | 2399 | } else { |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 2400 | DCHECK(!DataType::IsFloatingPointType(type)); |
| 2401 | GenPrimitiveCAS(type, codegen, expected_value, new_value, base, offset, out); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 2402 | } |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2403 | } |
| 2404 | |
| 2405 | void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2406 | VisitJdkUnsafeCASInt(invoke); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2407 | } |
| 2408 | |
| 2409 | void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) { |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2410 | VisitJdkUnsafeCASLong(invoke); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2411 | } |
| 2412 | |
| 2413 | void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) { |
Roland Levillain | a1aa3b1 | 2016-10-26 13:03:38 +0100 | [diff] [blame] | 2414 | // The only read barrier implementation supporting the |
| 2415 | // UnsafeCASObject intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 2416 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 2417 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2418 | GenCAS(DataType::Type::kReference, invoke, codegen_); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2419 | } |
| 2420 | |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2421 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeCASInt(HInvoke* invoke) { |
Ulya Trafimovich | 70102e6 | 2022-01-14 15:20:38 +0000 | [diff] [blame] | 2422 | // `jdk.internal.misc.Unsafe.compareAndSwapInt` has compare-and-set semantics (see javadoc). |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2423 | VisitJdkUnsafeCompareAndSetInt(invoke); |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2424 | } |
| 2425 | |
| 2426 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeCASLong(HInvoke* invoke) { |
Ulya Trafimovich | 70102e6 | 2022-01-14 15:20:38 +0000 | [diff] [blame] | 2427 | // `jdk.internal.misc.Unsafe.compareAndSwapLong` has compare-and-set semantics (see javadoc). |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2428 | VisitJdkUnsafeCompareAndSetLong(invoke); |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2429 | } |
| 2430 | |
| 2431 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeCASObject(HInvoke* invoke) { |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2432 | // `jdk.internal.misc.Unsafe.compareAndSwapObject` has compare-and-set semantics (see javadoc). |
| 2433 | VisitJdkUnsafeCompareAndSetObject(invoke); |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2434 | } |
| 2435 | |
Sorin Basca | 0069ad7 | 2021-09-17 17:33:09 +0000 | [diff] [blame] | 2436 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeCompareAndSetInt(HInvoke* invoke) { |
| 2437 | GenCAS(DataType::Type::kInt32, invoke, codegen_); |
| 2438 | } |
| 2439 | |
Sorin Basca | 507cf90 | 2021-10-06 12:04:56 +0000 | [diff] [blame] | 2440 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeCompareAndSetLong(HInvoke* invoke) { |
| 2441 | GenCAS(DataType::Type::kInt64, invoke, codegen_); |
| 2442 | } |
| 2443 | |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2444 | void IntrinsicCodeGeneratorX86::VisitJdkUnsafeCompareAndSetObject(HInvoke* invoke) { |
| 2445 | // The only supported read barrier implementation is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 2446 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Ulya Trafimovich | 37af463 | 2022-01-13 15:27:38 +0000 | [diff] [blame] | 2447 | |
| 2448 | GenCAS(DataType::Type::kReference, invoke, codegen_); |
| 2449 | } |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 2450 | |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2451 | void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2452 | LocationSummary* locations = |
| 2453 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2454 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2455 | locations->SetOut(Location::SameAsFirstInput()); |
| 2456 | locations->AddTemp(Location::RequiresRegister()); |
| 2457 | } |
| 2458 | |
| 2459 | static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask, |
| 2460 | X86Assembler* assembler) { |
| 2461 | Immediate imm_shift(shift); |
| 2462 | Immediate imm_mask(mask); |
| 2463 | __ movl(temp, reg); |
| 2464 | __ shrl(reg, imm_shift); |
| 2465 | __ andl(temp, imm_mask); |
| 2466 | __ andl(reg, imm_mask); |
| 2467 | __ shll(temp, imm_shift); |
| 2468 | __ orl(reg, temp); |
| 2469 | } |
| 2470 | |
| 2471 | void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) { |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2472 | X86Assembler* assembler = GetAssembler(); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2473 | LocationSummary* locations = invoke->GetLocations(); |
| 2474 | |
| 2475 | Register reg = locations->InAt(0).AsRegister<Register>(); |
| 2476 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 2477 | |
| 2478 | /* |
| 2479 | * Use one bswap instruction to reverse byte order first and then use 3 rounds of |
| 2480 | * swapping bits to reverse bits in a number x. Using bswap to save instructions |
| 2481 | * compared to generic luni implementation which has 5 rounds of swapping bits. |
| 2482 | * x = bswap x |
| 2483 | * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555; |
| 2484 | * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333; |
| 2485 | * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F; |
| 2486 | */ |
| 2487 | __ bswapl(reg); |
| 2488 | SwapBits(reg, temp, 1, 0x55555555, assembler); |
| 2489 | SwapBits(reg, temp, 2, 0x33333333, assembler); |
| 2490 | SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler); |
| 2491 | } |
| 2492 | |
| 2493 | void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2494 | LocationSummary* locations = |
| 2495 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2496 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2497 | locations->SetOut(Location::SameAsFirstInput()); |
| 2498 | locations->AddTemp(Location::RequiresRegister()); |
| 2499 | } |
| 2500 | |
| 2501 | void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) { |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2502 | X86Assembler* assembler = GetAssembler(); |
Mark Mendell | 58d25fd | 2015-04-03 14:52:31 -0400 | [diff] [blame] | 2503 | LocationSummary* locations = invoke->GetLocations(); |
| 2504 | |
| 2505 | Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>(); |
| 2506 | Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>(); |
| 2507 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 2508 | |
| 2509 | // We want to swap high/low, then bswap each one, and then do the same |
| 2510 | // as a 32 bit reverse. |
| 2511 | // Exchange high and low. |
| 2512 | __ movl(temp, reg_low); |
| 2513 | __ movl(reg_low, reg_high); |
| 2514 | __ movl(reg_high, temp); |
| 2515 | |
| 2516 | // bit-reverse low |
| 2517 | __ bswapl(reg_low); |
| 2518 | SwapBits(reg_low, temp, 1, 0x55555555, assembler); |
| 2519 | SwapBits(reg_low, temp, 2, 0x33333333, assembler); |
| 2520 | SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler); |
| 2521 | |
| 2522 | // bit-reverse high |
| 2523 | __ bswapl(reg_high); |
| 2524 | SwapBits(reg_high, temp, 1, 0x55555555, assembler); |
| 2525 | SwapBits(reg_high, temp, 2, 0x33333333, assembler); |
| 2526 | SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler); |
| 2527 | } |
| 2528 | |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2529 | static void CreateBitCountLocations( |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2530 | ArenaAllocator* allocator, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) { |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2531 | if (!codegen->GetInstructionSetFeatures().HasPopCnt()) { |
| 2532 | // Do nothing if there is no popcnt support. This results in generating |
| 2533 | // a call for the intrinsic rather than direct code. |
| 2534 | return; |
| 2535 | } |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2536 | LocationSummary* locations = |
| 2537 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2538 | if (is_long) { |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2539 | locations->AddTemp(Location::RequiresRegister()); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2540 | } |
Aart Bik | 2a94607 | 2016-01-21 12:49:00 -0800 | [diff] [blame] | 2541 | locations->SetInAt(0, Location::Any()); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2542 | locations->SetOut(Location::RequiresRegister()); |
| 2543 | } |
| 2544 | |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2545 | static void GenBitCount(X86Assembler* assembler, |
| 2546 | CodeGeneratorX86* codegen, |
| 2547 | HInvoke* invoke, bool is_long) { |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2548 | LocationSummary* locations = invoke->GetLocations(); |
| 2549 | Location src = locations->InAt(0); |
| 2550 | Register out = locations->Out().AsRegister<Register>(); |
| 2551 | |
| 2552 | if (invoke->InputAt(0)->IsConstant()) { |
| 2553 | // Evaluate this at compile time. |
| 2554 | int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant()); |
Roland Levillain | fa3912e | 2016-04-01 18:21:55 +0100 | [diff] [blame] | 2555 | int32_t result = is_long |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2556 | ? POPCOUNT(static_cast<uint64_t>(value)) |
| 2557 | : POPCOUNT(static_cast<uint32_t>(value)); |
Roland Levillain | fa3912e | 2016-04-01 18:21:55 +0100 | [diff] [blame] | 2558 | codegen->Load32BitValue(out, result); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2559 | return; |
| 2560 | } |
| 2561 | |
| 2562 | // Handle the non-constant cases. |
| 2563 | if (!is_long) { |
| 2564 | if (src.IsRegister()) { |
| 2565 | __ popcntl(out, src.AsRegister<Register>()); |
| 2566 | } else { |
| 2567 | DCHECK(src.IsStackSlot()); |
| 2568 | __ popcntl(out, Address(ESP, src.GetStackIndex())); |
| 2569 | } |
Aart Bik | 2a94607 | 2016-01-21 12:49:00 -0800 | [diff] [blame] | 2570 | } else { |
| 2571 | // The 64-bit case needs to worry about two parts. |
| 2572 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 2573 | if (src.IsRegisterPair()) { |
| 2574 | __ popcntl(temp, src.AsRegisterPairLow<Register>()); |
| 2575 | __ popcntl(out, src.AsRegisterPairHigh<Register>()); |
| 2576 | } else { |
| 2577 | DCHECK(src.IsDoubleStackSlot()); |
| 2578 | __ popcntl(temp, Address(ESP, src.GetStackIndex())); |
| 2579 | __ popcntl(out, Address(ESP, src.GetHighStackIndex(kX86WordSize))); |
| 2580 | } |
| 2581 | __ addl(out, temp); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2582 | } |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2583 | } |
| 2584 | |
| 2585 | void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2586 | CreateBitCountLocations(allocator_, codegen_, invoke, /* is_long= */ false); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2587 | } |
| 2588 | |
| 2589 | void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2590 | GenBitCount(GetAssembler(), codegen_, invoke, /* is_long= */ false); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2591 | } |
| 2592 | |
| 2593 | void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2594 | CreateBitCountLocations(allocator_, codegen_, invoke, /* is_long= */ true); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2595 | } |
| 2596 | |
| 2597 | void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2598 | GenBitCount(GetAssembler(), codegen_, invoke, /* is_long= */ true); |
Aart Bik | c39dac1 | 2016-01-21 08:59:48 -0800 | [diff] [blame] | 2599 | } |
| 2600 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2601 | static void CreateLeadingZeroLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is_long) { |
| 2602 | LocationSummary* locations = |
| 2603 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2604 | if (is_long) { |
| 2605 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2606 | } else { |
| 2607 | locations->SetInAt(0, Location::Any()); |
| 2608 | } |
| 2609 | locations->SetOut(Location::RequiresRegister()); |
| 2610 | } |
| 2611 | |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2612 | static void GenLeadingZeros(X86Assembler* assembler, |
| 2613 | CodeGeneratorX86* codegen, |
| 2614 | HInvoke* invoke, bool is_long) { |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2615 | LocationSummary* locations = invoke->GetLocations(); |
| 2616 | Location src = locations->InAt(0); |
| 2617 | Register out = locations->Out().AsRegister<Register>(); |
| 2618 | |
| 2619 | if (invoke->InputAt(0)->IsConstant()) { |
| 2620 | // Evaluate this at compile time. |
| 2621 | int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant()); |
| 2622 | if (value == 0) { |
| 2623 | value = is_long ? 64 : 32; |
| 2624 | } else { |
| 2625 | value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value)); |
| 2626 | } |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2627 | codegen->Load32BitValue(out, value); |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2628 | return; |
| 2629 | } |
| 2630 | |
| 2631 | // Handle the non-constant cases. |
| 2632 | if (!is_long) { |
| 2633 | if (src.IsRegister()) { |
| 2634 | __ bsrl(out, src.AsRegister<Register>()); |
| 2635 | } else { |
| 2636 | DCHECK(src.IsStackSlot()); |
| 2637 | __ bsrl(out, Address(ESP, src.GetStackIndex())); |
| 2638 | } |
| 2639 | |
| 2640 | // BSR sets ZF if the input was zero, and the output is undefined. |
Mark Mendell | 0c9497d | 2015-08-21 09:30:05 -0400 | [diff] [blame] | 2641 | NearLabel all_zeroes, done; |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2642 | __ j(kEqual, &all_zeroes); |
| 2643 | |
| 2644 | // Correct the result from BSR to get the final CLZ result. |
| 2645 | __ xorl(out, Immediate(31)); |
| 2646 | __ jmp(&done); |
| 2647 | |
| 2648 | // Fix the zero case with the expected result. |
| 2649 | __ Bind(&all_zeroes); |
| 2650 | __ movl(out, Immediate(32)); |
| 2651 | |
| 2652 | __ Bind(&done); |
| 2653 | return; |
| 2654 | } |
| 2655 | |
| 2656 | // 64 bit case needs to worry about both parts of the register. |
| 2657 | DCHECK(src.IsRegisterPair()); |
| 2658 | Register src_lo = src.AsRegisterPairLow<Register>(); |
| 2659 | Register src_hi = src.AsRegisterPairHigh<Register>(); |
Mark Mendell | 0c9497d | 2015-08-21 09:30:05 -0400 | [diff] [blame] | 2660 | NearLabel handle_low, done, all_zeroes; |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2661 | |
| 2662 | // Is the high word zero? |
| 2663 | __ testl(src_hi, src_hi); |
| 2664 | __ j(kEqual, &handle_low); |
| 2665 | |
| 2666 | // High word is not zero. We know that the BSR result is defined in this case. |
| 2667 | __ bsrl(out, src_hi); |
| 2668 | |
| 2669 | // Correct the result from BSR to get the final CLZ result. |
| 2670 | __ xorl(out, Immediate(31)); |
| 2671 | __ jmp(&done); |
| 2672 | |
| 2673 | // High word was zero. We have to compute the low word count and add 32. |
| 2674 | __ Bind(&handle_low); |
| 2675 | __ bsrl(out, src_lo); |
| 2676 | __ j(kEqual, &all_zeroes); |
| 2677 | |
| 2678 | // We had a valid result. Use an XOR to both correct the result and add 32. |
| 2679 | __ xorl(out, Immediate(63)); |
| 2680 | __ jmp(&done); |
| 2681 | |
| 2682 | // All zero case. |
| 2683 | __ Bind(&all_zeroes); |
| 2684 | __ movl(out, Immediate(64)); |
| 2685 | |
| 2686 | __ Bind(&done); |
| 2687 | } |
| 2688 | |
| 2689 | void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2690 | CreateLeadingZeroLocations(allocator_, invoke, /* is_long= */ false); |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2691 | } |
| 2692 | |
| 2693 | void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2694 | GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ false); |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2695 | } |
| 2696 | |
| 2697 | void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2698 | CreateLeadingZeroLocations(allocator_, invoke, /* is_long= */ true); |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2699 | } |
| 2700 | |
| 2701 | void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2702 | GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ true); |
Mark Mendell | d589767 | 2015-08-12 21:16:41 -0400 | [diff] [blame] | 2703 | } |
| 2704 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 2705 | static void CreateTrailingZeroLocations(ArenaAllocator* allocator, HInvoke* invoke, bool is_long) { |
| 2706 | LocationSummary* locations = |
| 2707 | new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2708 | if (is_long) { |
| 2709 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2710 | } else { |
| 2711 | locations->SetInAt(0, Location::Any()); |
| 2712 | } |
| 2713 | locations->SetOut(Location::RequiresRegister()); |
| 2714 | } |
| 2715 | |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2716 | static void GenTrailingZeros(X86Assembler* assembler, |
| 2717 | CodeGeneratorX86* codegen, |
| 2718 | HInvoke* invoke, bool is_long) { |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2719 | LocationSummary* locations = invoke->GetLocations(); |
| 2720 | Location src = locations->InAt(0); |
| 2721 | Register out = locations->Out().AsRegister<Register>(); |
| 2722 | |
| 2723 | if (invoke->InputAt(0)->IsConstant()) { |
| 2724 | // Evaluate this at compile time. |
| 2725 | int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant()); |
| 2726 | if (value == 0) { |
| 2727 | value = is_long ? 64 : 32; |
| 2728 | } else { |
| 2729 | value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value)); |
| 2730 | } |
Aart Bik | a19616e | 2016-02-01 18:57:58 -0800 | [diff] [blame] | 2731 | codegen->Load32BitValue(out, value); |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2732 | return; |
| 2733 | } |
| 2734 | |
| 2735 | // Handle the non-constant cases. |
| 2736 | if (!is_long) { |
| 2737 | if (src.IsRegister()) { |
| 2738 | __ bsfl(out, src.AsRegister<Register>()); |
| 2739 | } else { |
| 2740 | DCHECK(src.IsStackSlot()); |
| 2741 | __ bsfl(out, Address(ESP, src.GetStackIndex())); |
| 2742 | } |
| 2743 | |
| 2744 | // BSF sets ZF if the input was zero, and the output is undefined. |
| 2745 | NearLabel done; |
| 2746 | __ j(kNotEqual, &done); |
| 2747 | |
| 2748 | // Fix the zero case with the expected result. |
| 2749 | __ movl(out, Immediate(32)); |
| 2750 | |
| 2751 | __ Bind(&done); |
| 2752 | return; |
| 2753 | } |
| 2754 | |
| 2755 | // 64 bit case needs to worry about both parts of the register. |
| 2756 | DCHECK(src.IsRegisterPair()); |
| 2757 | Register src_lo = src.AsRegisterPairLow<Register>(); |
| 2758 | Register src_hi = src.AsRegisterPairHigh<Register>(); |
| 2759 | NearLabel done, all_zeroes; |
| 2760 | |
| 2761 | // If the low word is zero, then ZF will be set. If not, we have the answer. |
| 2762 | __ bsfl(out, src_lo); |
| 2763 | __ j(kNotEqual, &done); |
| 2764 | |
| 2765 | // Low word was zero. We have to compute the high word count and add 32. |
| 2766 | __ bsfl(out, src_hi); |
| 2767 | __ j(kEqual, &all_zeroes); |
| 2768 | |
| 2769 | // We had a valid result. Add 32 to account for the low word being zero. |
| 2770 | __ addl(out, Immediate(32)); |
| 2771 | __ jmp(&done); |
| 2772 | |
| 2773 | // All zero case. |
| 2774 | __ Bind(&all_zeroes); |
| 2775 | __ movl(out, Immediate(64)); |
| 2776 | |
| 2777 | __ Bind(&done); |
| 2778 | } |
| 2779 | |
| 2780 | void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2781 | CreateTrailingZeroLocations(allocator_, invoke, /* is_long= */ false); |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2782 | } |
| 2783 | |
| 2784 | void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2785 | GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ false); |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2786 | } |
| 2787 | |
| 2788 | void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2789 | CreateTrailingZeroLocations(allocator_, invoke, /* is_long= */ true); |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2790 | } |
| 2791 | |
| 2792 | void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 2793 | GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long= */ true); |
Mark Mendell | 2d55479 | 2015-09-15 21:45:18 -0400 | [diff] [blame] | 2794 | } |
| 2795 | |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2796 | static bool IsSameInput(HInstruction* instruction, size_t input0, size_t input1) { |
| 2797 | return instruction->InputAt(input0) == instruction->InputAt(input1); |
| 2798 | } |
| 2799 | |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2800 | // Compute base address for the System.arraycopy intrinsic in `base`. |
| 2801 | static void GenSystemArrayCopyBaseAddress(X86Assembler* assembler, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2802 | DataType::Type type, |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2803 | const Register& array, |
| 2804 | const Location& pos, |
| 2805 | const Register& base) { |
| 2806 | // This routine is only used by the SystemArrayCopy intrinsic at the |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2807 | // moment. We can allow DataType::Type::kReference as `type` to implement |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2808 | // the SystemArrayCopyChar intrinsic. |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2809 | DCHECK_EQ(type, DataType::Type::kReference); |
| 2810 | const int32_t element_size = DataType::Size(type); |
| 2811 | const ScaleFactor scale_factor = static_cast<ScaleFactor>(DataType::SizeShift(type)); |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2812 | const uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value(); |
| 2813 | |
| 2814 | if (pos.IsConstant()) { |
| 2815 | int32_t constant = pos.GetConstant()->AsIntConstant()->GetValue(); |
| 2816 | __ leal(base, Address(array, element_size * constant + data_offset)); |
| 2817 | } else { |
| 2818 | __ leal(base, Address(array, pos.AsRegister<Register>(), scale_factor, data_offset)); |
| 2819 | } |
| 2820 | } |
| 2821 | |
| 2822 | // Compute end source address for the System.arraycopy intrinsic in `end`. |
| 2823 | static void GenSystemArrayCopyEndAddress(X86Assembler* assembler, |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2824 | DataType::Type type, |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2825 | const Location& copy_length, |
| 2826 | const Register& base, |
| 2827 | const Register& end) { |
| 2828 | // This routine is only used by the SystemArrayCopy intrinsic at the |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2829 | // moment. We can allow DataType::Type::kReference as `type` to implement |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2830 | // the SystemArrayCopyChar intrinsic. |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 2831 | DCHECK_EQ(type, DataType::Type::kReference); |
| 2832 | const int32_t element_size = DataType::Size(type); |
| 2833 | const ScaleFactor scale_factor = static_cast<ScaleFactor>(DataType::SizeShift(type)); |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 2834 | |
| 2835 | if (copy_length.IsConstant()) { |
| 2836 | int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue(); |
| 2837 | __ leal(end, Address(base, element_size * constant)); |
| 2838 | } else { |
| 2839 | __ leal(end, Address(base, copy_length.AsRegister<Register>(), scale_factor, 0)); |
| 2840 | } |
| 2841 | } |
| 2842 | |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2843 | void IntrinsicLocationsBuilderX86::VisitSystemArrayCopy(HInvoke* invoke) { |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2844 | // The only read barrier implementation supporting the |
| 2845 | // SystemArrayCopy intrinsic is the Baker-style read barriers. |
| 2846 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2847 | return; |
| 2848 | } |
| 2849 | |
| 2850 | CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke); |
| 2851 | if (invoke->GetLocations() != nullptr) { |
| 2852 | // Need a byte register for marking. |
| 2853 | invoke->GetLocations()->SetTempAt(1, Location::RegisterLocation(ECX)); |
| 2854 | |
| 2855 | static constexpr size_t kSrc = 0; |
| 2856 | static constexpr size_t kSrcPos = 1; |
| 2857 | static constexpr size_t kDest = 2; |
| 2858 | static constexpr size_t kDestPos = 3; |
| 2859 | static constexpr size_t kLength = 4; |
| 2860 | |
| 2861 | if (!invoke->InputAt(kSrcPos)->IsIntConstant() && |
| 2862 | !invoke->InputAt(kDestPos)->IsIntConstant() && |
| 2863 | !invoke->InputAt(kLength)->IsIntConstant()) { |
| 2864 | if (!IsSameInput(invoke, kSrcPos, kDestPos) && |
| 2865 | !IsSameInput(invoke, kSrcPos, kLength) && |
| 2866 | !IsSameInput(invoke, kDestPos, kLength) && |
| 2867 | !IsSameInput(invoke, kSrc, kDest)) { |
| 2868 | // Not enough registers, make the length also take a stack slot. |
| 2869 | invoke->GetLocations()->SetInAt(kLength, Location::Any()); |
| 2870 | } |
| 2871 | } |
| 2872 | } |
| 2873 | } |
| 2874 | |
| 2875 | void IntrinsicCodeGeneratorX86::VisitSystemArrayCopy(HInvoke* invoke) { |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2876 | // The only read barrier implementation supporting the |
| 2877 | // SystemArrayCopy intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 2878 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2879 | |
| 2880 | X86Assembler* assembler = GetAssembler(); |
| 2881 | LocationSummary* locations = invoke->GetLocations(); |
| 2882 | |
| 2883 | uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); |
| 2884 | uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value(); |
| 2885 | uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value(); |
| 2886 | uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value(); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2887 | uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2888 | |
| 2889 | Register src = locations->InAt(0).AsRegister<Register>(); |
| 2890 | Location src_pos = locations->InAt(1); |
| 2891 | Register dest = locations->InAt(2).AsRegister<Register>(); |
| 2892 | Location dest_pos = locations->InAt(3); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2893 | Location length_arg = locations->InAt(4); |
| 2894 | Location length = length_arg; |
| 2895 | Location temp1_loc = locations->GetTemp(0); |
| 2896 | Register temp1 = temp1_loc.AsRegister<Register>(); |
| 2897 | Location temp2_loc = locations->GetTemp(1); |
| 2898 | Register temp2 = temp2_loc.AsRegister<Register>(); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2899 | |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 2900 | SlowPathCode* intrinsic_slow_path = |
| 2901 | new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2902 | codegen_->AddSlowPath(intrinsic_slow_path); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2903 | |
| 2904 | NearLabel conditions_on_positions_validated; |
| 2905 | SystemArrayCopyOptimizations optimizations(invoke); |
| 2906 | |
| 2907 | // If source and destination are the same, we go to slow path if we need to do |
| 2908 | // forward copying. |
| 2909 | if (src_pos.IsConstant()) { |
| 2910 | int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 2911 | if (dest_pos.IsConstant()) { |
| 2912 | int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 2913 | if (optimizations.GetDestinationIsSource()) { |
| 2914 | // Checked when building locations. |
| 2915 | DCHECK_GE(src_pos_constant, dest_pos_constant); |
| 2916 | } else if (src_pos_constant < dest_pos_constant) { |
| 2917 | __ cmpl(src, dest); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2918 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2919 | } |
| 2920 | } else { |
| 2921 | if (!optimizations.GetDestinationIsSource()) { |
| 2922 | __ cmpl(src, dest); |
| 2923 | __ j(kNotEqual, &conditions_on_positions_validated); |
| 2924 | } |
| 2925 | __ cmpl(dest_pos.AsRegister<Register>(), Immediate(src_pos_constant)); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2926 | __ j(kGreater, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2927 | } |
| 2928 | } else { |
| 2929 | if (!optimizations.GetDestinationIsSource()) { |
| 2930 | __ cmpl(src, dest); |
| 2931 | __ j(kNotEqual, &conditions_on_positions_validated); |
| 2932 | } |
| 2933 | if (dest_pos.IsConstant()) { |
| 2934 | int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 2935 | __ cmpl(src_pos.AsRegister<Register>(), Immediate(dest_pos_constant)); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2936 | __ j(kLess, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2937 | } else { |
| 2938 | __ cmpl(src_pos.AsRegister<Register>(), dest_pos.AsRegister<Register>()); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2939 | __ j(kLess, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2940 | } |
| 2941 | } |
| 2942 | |
| 2943 | __ Bind(&conditions_on_positions_validated); |
| 2944 | |
| 2945 | if (!optimizations.GetSourceIsNotNull()) { |
| 2946 | // Bail out if the source is null. |
| 2947 | __ testl(src, src); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2948 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2949 | } |
| 2950 | |
| 2951 | if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) { |
| 2952 | // Bail out if the destination is null. |
| 2953 | __ testl(dest, dest); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2954 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2955 | } |
| 2956 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2957 | Location temp3_loc = locations->GetTemp(2); |
| 2958 | Register temp3 = temp3_loc.AsRegister<Register>(); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2959 | if (length.IsStackSlot()) { |
| 2960 | __ movl(temp3, Address(ESP, length.GetStackIndex())); |
| 2961 | length = Location::RegisterLocation(temp3); |
| 2962 | } |
| 2963 | |
| 2964 | // If the length is negative, bail out. |
| 2965 | // We have already checked in the LocationsBuilder for the constant case. |
| 2966 | if (!length.IsConstant() && |
| 2967 | !optimizations.GetCountIsSourceLength() && |
| 2968 | !optimizations.GetCountIsDestinationLength()) { |
| 2969 | __ testl(length.AsRegister<Register>(), length.AsRegister<Register>()); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2970 | __ j(kLess, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2971 | } |
| 2972 | |
| 2973 | // Validity checks: source. |
| 2974 | CheckPosition(assembler, |
| 2975 | src_pos, |
| 2976 | src, |
| 2977 | length, |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2978 | intrinsic_slow_path, |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2979 | temp1, |
| 2980 | optimizations.GetCountIsSourceLength()); |
| 2981 | |
| 2982 | // Validity checks: dest. |
| 2983 | CheckPosition(assembler, |
| 2984 | dest_pos, |
| 2985 | dest, |
| 2986 | length, |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2987 | intrinsic_slow_path, |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2988 | temp1, |
| 2989 | optimizations.GetCountIsDestinationLength()); |
| 2990 | |
| 2991 | if (!optimizations.GetDoesNotNeedTypeCheck()) { |
| 2992 | // Check whether all elements of the source array are assignable to the component |
| 2993 | // type of the destination array. We do two checks: the classes are the same, |
| 2994 | // or the destination is Object[]. If none of these checks succeed, we go to the |
| 2995 | // slow path. |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2996 | |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 2997 | if (!optimizations.GetSourceIsNonPrimitiveArray()) { |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 2998 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 2999 | // /* HeapReference<Class> */ temp1 = src->klass_ |
| 3000 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3001 | invoke, temp1_loc, src, class_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3002 | // Bail out if the source is not a non primitive array. |
| 3003 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 3004 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3005 | invoke, temp1_loc, temp1, component_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3006 | __ testl(temp1, temp1); |
| 3007 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3008 | // If heap poisoning is enabled, `temp1` has been unpoisoned |
| 3009 | // by the the previous call to GenerateFieldLoadWithBakerReadBarrier. |
| 3010 | } else { |
| 3011 | // /* HeapReference<Class> */ temp1 = src->klass_ |
| 3012 | __ movl(temp1, Address(src, class_offset)); |
| 3013 | __ MaybeUnpoisonHeapReference(temp1); |
| 3014 | // Bail out if the source is not a non primitive array. |
| 3015 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 3016 | __ movl(temp1, Address(temp1, component_offset)); |
| 3017 | __ testl(temp1, temp1); |
| 3018 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3019 | __ MaybeUnpoisonHeapReference(temp1); |
| 3020 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3021 | __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot)); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3022 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3023 | } |
| 3024 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3025 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 3026 | if (length.Equals(Location::RegisterLocation(temp3))) { |
| 3027 | // When Baker read barriers are enabled, register `temp3`, |
| 3028 | // which in the present case contains the `length` parameter, |
| 3029 | // will be overwritten below. Make the `length` location |
| 3030 | // reference the original stack location; it will be moved |
| 3031 | // back to `temp3` later if necessary. |
| 3032 | DCHECK(length_arg.IsStackSlot()); |
| 3033 | length = length_arg; |
| 3034 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3035 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3036 | // /* HeapReference<Class> */ temp1 = dest->klass_ |
| 3037 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3038 | invoke, temp1_loc, dest, class_offset, /* needs_null_check= */ false); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3039 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3040 | if (!optimizations.GetDestinationIsNonPrimitiveArray()) { |
| 3041 | // Bail out if the destination is not a non primitive array. |
| 3042 | // |
| 3043 | // Register `temp1` is not trashed by the read barrier emitted |
| 3044 | // by GenerateFieldLoadWithBakerReadBarrier below, as that |
| 3045 | // method produces a call to a ReadBarrierMarkRegX entry point, |
| 3046 | // which saves all potentially live registers, including |
| 3047 | // temporaries such a `temp1`. |
| 3048 | // /* HeapReference<Class> */ temp2 = temp1->component_type_ |
| 3049 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3050 | invoke, temp2_loc, temp1, component_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3051 | __ testl(temp2, temp2); |
| 3052 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3053 | // If heap poisoning is enabled, `temp2` has been unpoisoned |
| 3054 | // by the the previous call to GenerateFieldLoadWithBakerReadBarrier. |
| 3055 | __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot)); |
| 3056 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3057 | } |
| 3058 | |
| 3059 | // For the same reason given earlier, `temp1` is not trashed by the |
| 3060 | // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below. |
| 3061 | // /* HeapReference<Class> */ temp2 = src->klass_ |
| 3062 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3063 | invoke, temp2_loc, src, class_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3064 | // Note: if heap poisoning is on, we are comparing two unpoisoned references here. |
| 3065 | __ cmpl(temp1, temp2); |
| 3066 | |
| 3067 | if (optimizations.GetDestinationIsTypedObjectArray()) { |
| 3068 | NearLabel do_copy; |
| 3069 | __ j(kEqual, &do_copy); |
| 3070 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 3071 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3072 | invoke, temp1_loc, temp1, component_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3073 | // We do not need to emit a read barrier for the following |
| 3074 | // heap reference load, as `temp1` is only used in a |
| 3075 | // comparison with null below, and this reference is not |
| 3076 | // kept afterwards. |
| 3077 | __ cmpl(Address(temp1, super_offset), Immediate(0)); |
| 3078 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3079 | __ Bind(&do_copy); |
| 3080 | } else { |
| 3081 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3082 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3083 | } else { |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3084 | // Non read barrier code. |
| 3085 | |
| 3086 | // /* HeapReference<Class> */ temp1 = dest->klass_ |
| 3087 | __ movl(temp1, Address(dest, class_offset)); |
| 3088 | if (!optimizations.GetDestinationIsNonPrimitiveArray()) { |
| 3089 | __ MaybeUnpoisonHeapReference(temp1); |
| 3090 | // Bail out if the destination is not a non primitive array. |
| 3091 | // /* HeapReference<Class> */ temp2 = temp1->component_type_ |
| 3092 | __ movl(temp2, Address(temp1, component_offset)); |
| 3093 | __ testl(temp2, temp2); |
| 3094 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3095 | __ MaybeUnpoisonHeapReference(temp2); |
| 3096 | __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot)); |
| 3097 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3098 | // Re-poison the heap reference to make the compare instruction below |
| 3099 | // compare two poisoned references. |
| 3100 | __ PoisonHeapReference(temp1); |
| 3101 | } |
| 3102 | |
| 3103 | // Note: if heap poisoning is on, we are comparing two poisoned references here. |
| 3104 | __ cmpl(temp1, Address(src, class_offset)); |
| 3105 | |
| 3106 | if (optimizations.GetDestinationIsTypedObjectArray()) { |
| 3107 | NearLabel do_copy; |
| 3108 | __ j(kEqual, &do_copy); |
| 3109 | __ MaybeUnpoisonHeapReference(temp1); |
| 3110 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 3111 | __ movl(temp1, Address(temp1, component_offset)); |
| 3112 | __ MaybeUnpoisonHeapReference(temp1); |
| 3113 | __ cmpl(Address(temp1, super_offset), Immediate(0)); |
| 3114 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3115 | __ Bind(&do_copy); |
| 3116 | } else { |
| 3117 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3118 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3119 | } |
| 3120 | } else if (!optimizations.GetSourceIsNonPrimitiveArray()) { |
| 3121 | DCHECK(optimizations.GetDestinationIsNonPrimitiveArray()); |
| 3122 | // Bail out if the source is not a non primitive array. |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3123 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 3124 | // /* HeapReference<Class> */ temp1 = src->klass_ |
| 3125 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3126 | invoke, temp1_loc, src, class_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3127 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 3128 | codegen_->GenerateFieldLoadWithBakerReadBarrier( |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3129 | invoke, temp1_loc, temp1, component_offset, /* needs_null_check= */ false); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3130 | __ testl(temp1, temp1); |
| 3131 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3132 | // If heap poisoning is enabled, `temp1` has been unpoisoned |
| 3133 | // by the the previous call to GenerateFieldLoadWithBakerReadBarrier. |
| 3134 | } else { |
| 3135 | // /* HeapReference<Class> */ temp1 = src->klass_ |
| 3136 | __ movl(temp1, Address(src, class_offset)); |
| 3137 | __ MaybeUnpoisonHeapReference(temp1); |
| 3138 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 3139 | __ movl(temp1, Address(temp1, component_offset)); |
| 3140 | __ testl(temp1, temp1); |
| 3141 | __ j(kEqual, intrinsic_slow_path->GetEntryLabel()); |
| 3142 | __ MaybeUnpoisonHeapReference(temp1); |
| 3143 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3144 | __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot)); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3145 | __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3146 | } |
| 3147 | |
Vladimir Marko | 0ebe0d8 | 2017-09-21 22:50:39 +0100 | [diff] [blame] | 3148 | const DataType::Type type = DataType::Type::kReference; |
| 3149 | const int32_t element_size = DataType::Size(type); |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3150 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3151 | // Compute the base source address in `temp1`. |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3152 | GenSystemArrayCopyBaseAddress(GetAssembler(), type, src, src_pos, temp1); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3153 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3154 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 3155 | // If it is needed (in the case of the fast-path loop), the base |
| 3156 | // destination address is computed later, as `temp2` is used for |
| 3157 | // intermediate computations. |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3158 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3159 | // Compute the end source address in `temp3`. |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3160 | if (length.IsStackSlot()) { |
| 3161 | // Location `length` is again pointing at a stack slot, as |
| 3162 | // register `temp3` (which was containing the length parameter |
| 3163 | // earlier) has been overwritten; restore it now |
| 3164 | DCHECK(length.Equals(length_arg)); |
| 3165 | __ movl(temp3, Address(ESP, length.GetStackIndex())); |
| 3166 | length = Location::RegisterLocation(temp3); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3167 | } |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3168 | GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3169 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3170 | // SystemArrayCopy implementation for Baker read barriers (see |
| 3171 | // also CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier): |
| 3172 | // |
| 3173 | // if (src_ptr != end_ptr) { |
| 3174 | // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState(); |
| 3175 | // lfence; // Load fence or artificial data dependency to prevent load-load reordering |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 3176 | // bool is_gray = (rb_state == ReadBarrier::GrayState()); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3177 | // if (is_gray) { |
| 3178 | // // Slow-path copy. |
| 3179 | // for (size_t i = 0; i != length; ++i) { |
| 3180 | // dest_array[dest_pos + i] = |
| 3181 | // MaybePoison(ReadBarrier::Mark(MaybeUnpoison(src_array[src_pos + i]))); |
| 3182 | // } |
| 3183 | // } else { |
| 3184 | // // Fast-path copy. |
| 3185 | // do { |
| 3186 | // *dest_ptr++ = *src_ptr++; |
| 3187 | // } while (src_ptr != end_ptr) |
| 3188 | // } |
| 3189 | // } |
| 3190 | |
| 3191 | NearLabel loop, done; |
| 3192 | |
| 3193 | // Don't enter copy loop if `length == 0`. |
| 3194 | __ cmpl(temp1, temp3); |
| 3195 | __ j(kEqual, &done); |
| 3196 | |
Vladimir Marko | 953437b | 2016-08-24 08:30:46 +0000 | [diff] [blame] | 3197 | // Given the numeric representation, it's enough to check the low bit of the rb_state. |
Roland Levillain | 14e5a29 | 2018-06-28 12:00:56 +0100 | [diff] [blame] | 3198 | static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0"); |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 3199 | static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1"); |
Vladimir Marko | 953437b | 2016-08-24 08:30:46 +0000 | [diff] [blame] | 3200 | constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte; |
| 3201 | constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte; |
| 3202 | constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position); |
| 3203 | |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 3204 | // if (rb_state == ReadBarrier::GrayState()) |
Vladimir Marko | 953437b | 2016-08-24 08:30:46 +0000 | [diff] [blame] | 3205 | // goto slow_path; |
| 3206 | // At this point, just do the "if" and make sure that flags are preserved until the branch. |
| 3207 | __ testb(Address(src, monitor_offset + gray_byte_position), Immediate(test_value)); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3208 | |
| 3209 | // Load fence to prevent load-load reordering. |
| 3210 | // Note that this is a no-op, thanks to the x86 memory model. |
| 3211 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); |
| 3212 | |
| 3213 | // Slow path used to copy array when `src` is gray. |
| 3214 | SlowPathCode* read_barrier_slow_path = |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 3215 | new (codegen_->GetScopedAllocator()) ReadBarrierSystemArrayCopySlowPathX86(invoke); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3216 | codegen_->AddSlowPath(read_barrier_slow_path); |
| 3217 | |
Vladimir Marko | 953437b | 2016-08-24 08:30:46 +0000 | [diff] [blame] | 3218 | // We have done the "if" of the gray bit check above, now branch based on the flags. |
| 3219 | __ j(kNotZero, read_barrier_slow_path->GetEntryLabel()); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3220 | |
| 3221 | // Fast-path copy. |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3222 | // Compute the base destination address in `temp2`. |
| 3223 | GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3224 | // Iterate over the arrays and do a raw copy of the objects. We don't need to |
| 3225 | // poison/unpoison. |
| 3226 | __ Bind(&loop); |
| 3227 | __ pushl(Address(temp1, 0)); |
| 3228 | __ cfi().AdjustCFAOffset(4); |
| 3229 | __ popl(Address(temp2, 0)); |
| 3230 | __ cfi().AdjustCFAOffset(-4); |
| 3231 | __ addl(temp1, Immediate(element_size)); |
| 3232 | __ addl(temp2, Immediate(element_size)); |
| 3233 | __ cmpl(temp1, temp3); |
| 3234 | __ j(kNotEqual, &loop); |
| 3235 | |
| 3236 | __ Bind(read_barrier_slow_path->GetExitLabel()); |
| 3237 | __ Bind(&done); |
| 3238 | } else { |
| 3239 | // Non read barrier code. |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3240 | // Compute the base destination address in `temp2`. |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3241 | GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3242 | // Compute the end source address in `temp3`. |
Roland Levillain | 9cc0ea8 | 2017-03-16 11:25:59 +0000 | [diff] [blame] | 3243 | GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3); |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3244 | // Iterate over the arrays and do a raw copy of the objects. We don't need to |
| 3245 | // poison/unpoison. |
| 3246 | NearLabel loop, done; |
| 3247 | __ cmpl(temp1, temp3); |
| 3248 | __ j(kEqual, &done); |
| 3249 | __ Bind(&loop); |
| 3250 | __ pushl(Address(temp1, 0)); |
| 3251 | __ cfi().AdjustCFAOffset(4); |
| 3252 | __ popl(Address(temp2, 0)); |
| 3253 | __ cfi().AdjustCFAOffset(-4); |
| 3254 | __ addl(temp1, Immediate(element_size)); |
| 3255 | __ addl(temp2, Immediate(element_size)); |
| 3256 | __ cmpl(temp1, temp3); |
| 3257 | __ j(kNotEqual, &loop); |
| 3258 | __ Bind(&done); |
| 3259 | } |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3260 | |
| 3261 | // We only need one card marking on the destination array. |
Andreas Gampe | 3db7068 | 2018-12-26 15:12:03 -0800 | [diff] [blame] | 3262 | codegen_->MarkGCCard(temp1, temp2, dest, Register(kNoRegister), /* value_can_be_null= */ false); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3263 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 3264 | __ Bind(intrinsic_slow_path->GetExitLabel()); |
Nicolas Geoffray | fea1abd | 2016-07-06 12:09:12 +0100 | [diff] [blame] | 3265 | } |
| 3266 | |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 3267 | static void RequestBaseMethodAddressInRegister(HInvoke* invoke) { |
| 3268 | LocationSummary* locations = invoke->GetLocations(); |
| 3269 | if (locations != nullptr) { |
| 3270 | HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect(); |
| 3271 | // Note: The base method address is not present yet when this is called from the |
| 3272 | // PCRelativeHandlerVisitor via IsCallFreeIntrinsic() to determine whether to insert it. |
| 3273 | if (invoke_static_or_direct->HasSpecialInput()) { |
| 3274 | DCHECK(invoke_static_or_direct->InputAt(invoke_static_or_direct->GetSpecialInputIndex()) |
| 3275 | ->IsX86ComputeBaseMethodAddress()); |
| 3276 | locations->SetInAt(invoke_static_or_direct->GetSpecialInputIndex(), |
| 3277 | Location::RequiresRegister()); |
| 3278 | } |
| 3279 | } |
| 3280 | } |
| 3281 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3282 | void IntrinsicLocationsBuilderX86::VisitIntegerValueOf(HInvoke* invoke) { |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3283 | DCHECK(invoke->IsInvokeStaticOrDirect()); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3284 | InvokeRuntimeCallingConvention calling_convention; |
| 3285 | IntrinsicVisitor::ComputeIntegerValueOfLocations( |
| 3286 | invoke, |
| 3287 | codegen_, |
| 3288 | Location::RegisterLocation(EAX), |
| 3289 | Location::RegisterLocation(calling_convention.GetRegisterAt(0))); |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 3290 | RequestBaseMethodAddressInRegister(invoke); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3291 | } |
| 3292 | |
| 3293 | void IntrinsicCodeGeneratorX86::VisitIntegerValueOf(HInvoke* invoke) { |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3294 | DCHECK(invoke->IsInvokeStaticOrDirect()); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 3295 | IntrinsicVisitor::IntegerValueOfInfo info = |
| 3296 | IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3297 | LocationSummary* locations = invoke->GetLocations(); |
| 3298 | X86Assembler* assembler = GetAssembler(); |
| 3299 | |
| 3300 | Register out = locations->Out().AsRegister<Register>(); |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 3301 | auto allocate_instance = [&]() { |
| 3302 | DCHECK_EQ(out, InvokeRuntimeCallingConvention().GetRegisterAt(0)); |
| 3303 | codegen_->LoadIntrinsicDeclaringClass(out, invoke->AsInvokeStaticOrDirect()); |
| 3304 | codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); |
| 3305 | CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); |
| 3306 | }; |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3307 | if (invoke->InputAt(0)->IsConstant()) { |
| 3308 | int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 3309 | if (static_cast<uint32_t>(value - info.low) < info.length) { |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3310 | // Just embed the j.l.Integer in the code. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 3311 | DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3312 | codegen_->LoadBootImageAddress( |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 3313 | out, info.value_boot_image_reference, invoke->AsInvokeStaticOrDirect()); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3314 | } else { |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3315 | DCHECK(locations->CanCall()); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3316 | // Allocate and initialize a new j.l.Integer. |
| 3317 | // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the |
| 3318 | // JIT object table. |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 3319 | allocate_instance(); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3320 | __ movl(Address(out, info.value_offset), Immediate(value)); |
| 3321 | } |
| 3322 | } else { |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3323 | DCHECK(locations->CanCall()); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3324 | Register in = locations->InAt(0).AsRegister<Register>(); |
| 3325 | // Check bounds of our cache. |
| 3326 | __ leal(out, Address(in, -info.low)); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3327 | __ cmpl(out, Immediate(info.length)); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3328 | NearLabel allocate, done; |
| 3329 | __ j(kAboveEqual, &allocate); |
| 3330 | // If the value is within the bounds, load the j.l.Integer directly from the array. |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3331 | constexpr size_t kElementSize = sizeof(mirror::HeapReference<mirror::Object>); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 3332 | static_assert((1u << TIMES_4) == sizeof(mirror::HeapReference<mirror::Object>), |
| 3333 | "Check heap reference size."); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 3334 | if (codegen_->GetCompilerOptions().IsBootImage()) { |
| 3335 | DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u); |
| 3336 | size_t method_address_index = invoke->AsInvokeStaticOrDirect()->GetSpecialInputIndex(); |
| 3337 | HX86ComputeBaseMethodAddress* method_address = |
| 3338 | invoke->InputAt(method_address_index)->AsX86ComputeBaseMethodAddress(); |
| 3339 | DCHECK(method_address != nullptr); |
| 3340 | Register method_address_reg = |
| 3341 | invoke->GetLocations()->InAt(method_address_index).AsRegister<Register>(); |
Vladimir Marko | 4ef451a | 2020-07-23 09:54:27 +0000 | [diff] [blame] | 3342 | __ movl(out, |
| 3343 | Address(method_address_reg, out, TIMES_4, CodeGeneratorX86::kPlaceholder32BitOffset)); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame] | 3344 | codegen_->RecordBootImageIntrinsicPatch(method_address, info.array_data_boot_image_reference); |
| 3345 | } else { |
| 3346 | // Note: We're about to clobber the index in `out`, so we need to use `in` and |
| 3347 | // adjust the offset accordingly. |
| 3348 | uint32_t mid_array_boot_image_offset = |
| 3349 | info.array_data_boot_image_reference - info.low * kElementSize; |
| 3350 | codegen_->LoadBootImageAddress( |
| 3351 | out, mid_array_boot_image_offset, invoke->AsInvokeStaticOrDirect()); |
| 3352 | DCHECK_NE(out, in); |
| 3353 | __ movl(out, Address(out, in, TIMES_4, 0)); |
| 3354 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3355 | __ MaybeUnpoisonHeapReference(out); |
| 3356 | __ jmp(&done); |
| 3357 | __ Bind(&allocate); |
| 3358 | // Otherwise allocate and initialize a new j.l.Integer. |
Vladimir Marko | de91ca9 | 2020-10-27 13:41:40 +0000 | [diff] [blame] | 3359 | allocate_instance(); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 3360 | __ movl(Address(out, info.value_offset), in); |
| 3361 | __ Bind(&done); |
| 3362 | } |
| 3363 | } |
| 3364 | |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 3365 | void IntrinsicLocationsBuilderX86::VisitReferenceGetReferent(HInvoke* invoke) { |
| 3366 | IntrinsicVisitor::CreateReferenceGetReferentLocations(invoke, codegen_); |
| 3367 | RequestBaseMethodAddressInRegister(invoke); |
| 3368 | } |
| 3369 | |
| 3370 | void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) { |
| 3371 | X86Assembler* assembler = GetAssembler(); |
| 3372 | LocationSummary* locations = invoke->GetLocations(); |
| 3373 | |
| 3374 | Location obj = locations->InAt(0); |
| 3375 | Location out = locations->Out(); |
| 3376 | |
| 3377 | SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke); |
| 3378 | codegen_->AddSlowPath(slow_path); |
| 3379 | |
| 3380 | if (kEmitCompilerReadBarrier) { |
| 3381 | // Check self->GetWeakRefAccessEnabled(). |
| 3382 | ThreadOffset32 offset = Thread::WeakRefAccessEnabledOffset<kX86PointerSize>(); |
Hans Boehm | 1b3ec0f | 2022-01-26 16:53:07 +0000 | [diff] [blame] | 3383 | __ fs()->cmpl(Address::Absolute(offset), |
| 3384 | Immediate(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled))); |
| 3385 | __ j(kNotEqual, slow_path->GetEntryLabel()); |
Vladimir Marko | 01b6552 | 2020-10-28 15:43:54 +0000 | [diff] [blame] | 3386 | } |
| 3387 | |
| 3388 | // Load the java.lang.ref.Reference class, use the output register as a temporary. |
| 3389 | codegen_->LoadIntrinsicDeclaringClass(out.AsRegister<Register>(), |
| 3390 | invoke->AsInvokeStaticOrDirect()); |
| 3391 | |
| 3392 | // Check static fields java.lang.ref.Reference.{disableIntrinsic,slowPathEnabled} together. |
| 3393 | MemberOffset disable_intrinsic_offset = IntrinsicVisitor::GetReferenceDisableIntrinsicOffset(); |
| 3394 | DCHECK_ALIGNED(disable_intrinsic_offset.Uint32Value(), 2u); |
| 3395 | DCHECK_EQ(disable_intrinsic_offset.Uint32Value() + 1u, |
| 3396 | IntrinsicVisitor::GetReferenceSlowPathEnabledOffset().Uint32Value()); |
| 3397 | __ cmpw(Address(out.AsRegister<Register>(), disable_intrinsic_offset.Uint32Value()), |
| 3398 | Immediate(0)); |
| 3399 | __ j(kNotEqual, slow_path->GetEntryLabel()); |
| 3400 | |
| 3401 | // Load the value from the field. |
| 3402 | uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); |
| 3403 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 3404 | codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke, |
| 3405 | out, |
| 3406 | obj.AsRegister<Register>(), |
| 3407 | referent_offset, |
| 3408 | /*needs_null_check=*/ true); |
| 3409 | // Note that the fence is a no-op, thanks to the x86 memory model. |
| 3410 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. |
| 3411 | } else { |
| 3412 | __ movl(out.AsRegister<Register>(), Address(obj.AsRegister<Register>(), referent_offset)); |
| 3413 | codegen_->MaybeRecordImplicitNullCheck(invoke); |
| 3414 | // Note that the fence is a no-op, thanks to the x86 memory model. |
| 3415 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. |
| 3416 | codegen_->MaybeGenerateReadBarrierSlow(invoke, out, out, obj, referent_offset); |
| 3417 | } |
| 3418 | __ Bind(slow_path->GetExitLabel()); |
| 3419 | } |
| 3420 | |
Vladimir Marko | ac27ac0 | 2021-02-01 09:31:02 +0000 | [diff] [blame] | 3421 | void IntrinsicLocationsBuilderX86::VisitReferenceRefersTo(HInvoke* invoke) { |
| 3422 | IntrinsicVisitor::CreateReferenceRefersToLocations(invoke); |
| 3423 | } |
| 3424 | |
| 3425 | void IntrinsicCodeGeneratorX86::VisitReferenceRefersTo(HInvoke* invoke) { |
| 3426 | X86Assembler* assembler = GetAssembler(); |
| 3427 | LocationSummary* locations = invoke->GetLocations(); |
| 3428 | |
| 3429 | Register obj = locations->InAt(0).AsRegister<Register>(); |
| 3430 | Register other = locations->InAt(1).AsRegister<Register>(); |
| 3431 | Register out = locations->Out().AsRegister<Register>(); |
| 3432 | |
| 3433 | uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); |
| 3434 | uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); |
| 3435 | |
| 3436 | __ movl(out, Address(obj, referent_offset)); |
| 3437 | codegen_->MaybeRecordImplicitNullCheck(invoke); |
Vladimir Marko | a0a20cd | 2021-02-05 15:55:47 +0000 | [diff] [blame] | 3438 | __ MaybeUnpoisonHeapReference(out); |
Vladimir Marko | ac27ac0 | 2021-02-01 09:31:02 +0000 | [diff] [blame] | 3439 | // Note that the fence is a no-op, thanks to the x86 memory model. |
| 3440 | codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. |
| 3441 | |
| 3442 | NearLabel end, return_true, return_false; |
| 3443 | __ cmpl(out, other); |
| 3444 | |
| 3445 | if (kEmitCompilerReadBarrier) { |
| 3446 | DCHECK(kUseBakerReadBarrier); |
| 3447 | |
| 3448 | __ j(kEqual, &return_true); |
| 3449 | |
| 3450 | // Check if the loaded reference is null. |
| 3451 | __ testl(out, out); |
| 3452 | __ j(kZero, &return_false); |
| 3453 | |
| 3454 | // For correct memory visibility, we need a barrier before loading the lock word |
| 3455 | // but we already have the barrier emitted for volatile load above which is sufficient. |
| 3456 | |
| 3457 | // Load the lockword and check if it is a forwarding address. |
| 3458 | static_assert(LockWord::kStateShift == 30u); |
| 3459 | static_assert(LockWord::kStateForwardingAddress == 3u); |
| 3460 | __ movl(out, Address(out, monitor_offset)); |
| 3461 | __ cmpl(out, Immediate(static_cast<int32_t>(0xc0000000))); |
| 3462 | __ j(kBelow, &return_false); |
| 3463 | |
| 3464 | // Extract the forwarding address and compare with `other`. |
| 3465 | __ shll(out, Immediate(LockWord::kForwardingAddressShift)); |
| 3466 | __ cmpl(out, other); |
| 3467 | } |
| 3468 | |
| 3469 | __ j(kNotEqual, &return_false); |
| 3470 | |
| 3471 | // Return true and exit the function. |
| 3472 | __ Bind(&return_true); |
| 3473 | __ movl(out, Immediate(1)); |
| 3474 | __ jmp(&end); |
| 3475 | |
| 3476 | // Return false and exit the function. |
| 3477 | __ Bind(&return_false); |
| 3478 | __ xorl(out, out); |
| 3479 | __ Bind(&end); |
| 3480 | } |
| 3481 | |
Nicolas Geoffray | 365719c | 2017-03-08 13:11:50 +0000 | [diff] [blame] | 3482 | void IntrinsicLocationsBuilderX86::VisitThreadInterrupted(HInvoke* invoke) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 3483 | LocationSummary* locations = |
| 3484 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
Nicolas Geoffray | 365719c | 2017-03-08 13:11:50 +0000 | [diff] [blame] | 3485 | locations->SetOut(Location::RequiresRegister()); |
| 3486 | } |
| 3487 | |
| 3488 | void IntrinsicCodeGeneratorX86::VisitThreadInterrupted(HInvoke* invoke) { |
| 3489 | X86Assembler* assembler = GetAssembler(); |
| 3490 | Register out = invoke->GetLocations()->Out().AsRegister<Register>(); |
| 3491 | Address address = Address::Absolute(Thread::InterruptedOffset<kX86PointerSize>().Int32Value()); |
| 3492 | NearLabel done; |
| 3493 | __ fs()->movl(out, address); |
| 3494 | __ testl(out, out); |
| 3495 | __ j(kEqual, &done); |
| 3496 | __ fs()->movl(address, Immediate(0)); |
| 3497 | codegen_->MemoryFence(); |
| 3498 | __ Bind(&done); |
| 3499 | } |
| 3500 | |
Hans Boehm | c7b28de | 2018-03-09 17:05:28 -0800 | [diff] [blame] | 3501 | void IntrinsicLocationsBuilderX86::VisitReachabilityFence(HInvoke* invoke) { |
| 3502 | LocationSummary* locations = |
| 3503 | new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); |
| 3504 | locations->SetInAt(0, Location::Any()); |
| 3505 | } |
| 3506 | |
| 3507 | void IntrinsicCodeGeneratorX86::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } |
Nicolas Geoffray | 365719c | 2017-03-08 13:11:50 +0000 | [diff] [blame] | 3508 | |
Andra Danciu | dc787f4 | 2020-07-07 14:28:56 +0000 | [diff] [blame] | 3509 | void IntrinsicLocationsBuilderX86::VisitIntegerDivideUnsigned(HInvoke* invoke) { |
| 3510 | LocationSummary* locations = new (allocator_) LocationSummary(invoke, |
| 3511 | LocationSummary::kCallOnSlowPath, |
| 3512 | kIntrinsified); |
| 3513 | locations->SetInAt(0, Location::RegisterLocation(EAX)); |
| 3514 | locations->SetInAt(1, Location::RequiresRegister()); |
| 3515 | locations->SetOut(Location::SameAsFirstInput()); |
| 3516 | // Intel uses edx:eax as the dividend. |
| 3517 | locations->AddTemp(Location::RegisterLocation(EDX)); |
| 3518 | } |
| 3519 | |
| 3520 | void IntrinsicCodeGeneratorX86::VisitIntegerDivideUnsigned(HInvoke* invoke) { |
| 3521 | X86Assembler* assembler = GetAssembler(); |
| 3522 | LocationSummary* locations = invoke->GetLocations(); |
| 3523 | Location out = locations->Out(); |
| 3524 | Location first = locations->InAt(0); |
| 3525 | Location second = locations->InAt(1); |
| 3526 | Register edx = locations->GetTemp(0).AsRegister<Register>(); |
| 3527 | Register second_reg = second.AsRegister<Register>(); |
| 3528 | |
| 3529 | DCHECK_EQ(EAX, first.AsRegister<Register>()); |
| 3530 | DCHECK_EQ(EAX, out.AsRegister<Register>()); |
| 3531 | DCHECK_EQ(EDX, edx); |
| 3532 | |
| 3533 | // Check if divisor is zero, bail to managed implementation to handle. |
| 3534 | __ testl(second_reg, second_reg); |
| 3535 | SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 3536 | codegen_->AddSlowPath(slow_path); |
| 3537 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 3538 | |
| 3539 | __ xorl(edx, edx); |
| 3540 | __ divl(second_reg); |
| 3541 | |
| 3542 | __ Bind(slow_path->GetExitLabel()); |
| 3543 | } |
| 3544 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3545 | static bool HasVarHandleIntrinsicImplementation(HInvoke* invoke) { |
| 3546 | VarHandleOptimizations optimizations(invoke); |
| 3547 | if (optimizations.GetDoNotIntrinsify()) { |
| 3548 | return false; |
| 3549 | } |
| 3550 | |
Vladimir Marko | 4a889b7 | 2021-07-21 16:20:54 +0000 | [diff] [blame] | 3551 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3552 | DCHECK_LE(expected_coordinates_count, 2u); // Filtered by the `DoNotIntrinsify` flag above. |
Vladimir Marko | 4a889b7 | 2021-07-21 16:20:54 +0000 | [diff] [blame] | 3553 | if (expected_coordinates_count > 1u) { |
| 3554 | // Only static and instance fields VarHandle are supported now. |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3555 | // TODO: add support for arrays and views. |
Vladimir Marko | b3a7a6a | 2021-07-20 15:02:33 +0100 | [diff] [blame] | 3556 | return false; |
| 3557 | } |
| 3558 | |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 3559 | return true; |
| 3560 | } |
| 3561 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3562 | static void GenerateVarHandleAccessModeCheck(Register varhandle_object, |
| 3563 | mirror::VarHandle::AccessMode access_mode, |
| 3564 | SlowPathCode* slow_path, |
| 3565 | X86Assembler* assembler) { |
| 3566 | const uint32_t access_modes_bitmask_offset = |
| 3567 | mirror::VarHandle::AccessModesBitMaskOffset().Uint32Value(); |
| 3568 | const uint32_t access_mode_bit = 1u << static_cast<uint32_t>(access_mode); |
| 3569 | |
| 3570 | // If the access mode is not supported, bail to runtime implementation to handle |
| 3571 | __ testl(Address(varhandle_object, access_modes_bitmask_offset), Immediate(access_mode_bit)); |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3572 | __ j(kZero, slow_path->GetEntryLabel()); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3573 | } |
| 3574 | |
| 3575 | static void GenerateVarHandleStaticFieldCheck(Register varhandle_object, |
| 3576 | SlowPathCode* slow_path, |
| 3577 | X86Assembler* assembler) { |
| 3578 | const uint32_t coordtype0_offset = mirror::VarHandle::CoordinateType0Offset().Uint32Value(); |
| 3579 | |
| 3580 | // Check that the VarHandle references a static field by checking that coordinateType0 == null. |
| 3581 | // Do not emit read barrier (or unpoison the reference) for comparing to null. |
| 3582 | __ cmpl(Address(varhandle_object, coordtype0_offset), Immediate(0)); |
| 3583 | __ j(kNotEqual, slow_path->GetEntryLabel()); |
| 3584 | } |
| 3585 | |
| 3586 | static void GenerateSubTypeObjectCheck(Register object, |
| 3587 | Register temp, |
| 3588 | Address type_address, |
| 3589 | SlowPathCode* slow_path, |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3590 | X86Assembler* assembler, |
| 3591 | bool object_can_be_null = true) { |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3592 | const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value(); |
| 3593 | const uint32_t super_class_offset = mirror::Class::SuperClassOffset().Uint32Value(); |
| 3594 | NearLabel check_type_compatibility, type_matched; |
| 3595 | |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3596 | // If the object is null, there is no need to check the type |
| 3597 | if (object_can_be_null) { |
| 3598 | __ testl(object, object); |
| 3599 | __ j(kZero, &type_matched); |
| 3600 | } |
| 3601 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3602 | // Do not unpoison for in-memory comparison. |
| 3603 | // We deliberately avoid the read barrier, letting the slow path handle the false negatives. |
| 3604 | __ movl(temp, Address(object, class_offset)); |
| 3605 | __ Bind(&check_type_compatibility); |
| 3606 | __ cmpl(temp, type_address); |
| 3607 | __ j(kEqual, &type_matched); |
| 3608 | // Load the super class. |
| 3609 | __ MaybeUnpoisonHeapReference(temp); |
| 3610 | __ movl(temp, Address(temp, super_class_offset)); |
| 3611 | // If the super class is null, we reached the root of the hierarchy without a match. |
| 3612 | // We let the slow path handle uncovered cases (e.g. interfaces). |
| 3613 | __ testl(temp, temp); |
| 3614 | __ j(kEqual, slow_path->GetEntryLabel()); |
| 3615 | __ jmp(&check_type_compatibility); |
| 3616 | __ Bind(&type_matched); |
| 3617 | } |
| 3618 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3619 | static void GenerateVarHandleInstanceFieldChecks(HInvoke* invoke, |
| 3620 | Register temp, |
| 3621 | SlowPathCode* slow_path, |
| 3622 | X86Assembler* assembler) { |
| 3623 | VarHandleOptimizations optimizations(invoke); |
| 3624 | LocationSummary* locations = invoke->GetLocations(); |
| 3625 | Register varhandle_object = locations->InAt(0).AsRegister<Register>(); |
| 3626 | Register object = locations->InAt(1).AsRegister<Register>(); |
| 3627 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3628 | const uint32_t coordtype0_offset = mirror::VarHandle::CoordinateType0Offset().Uint32Value(); |
| 3629 | const uint32_t coordtype1_offset = mirror::VarHandle::CoordinateType1Offset().Uint32Value(); |
| 3630 | |
| 3631 | // Check that the VarHandle references an instance field by checking that |
| 3632 | // coordinateType1 == null. coordinateType0 should be not null, but this is handled by the |
| 3633 | // type compatibility check with the source object's type, which will fail for null. |
| 3634 | __ cmpl(Address(varhandle_object, coordtype1_offset), Immediate(0)); |
| 3635 | __ j(kNotEqual, slow_path->GetEntryLabel()); |
| 3636 | |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3637 | // Check if the object is null |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3638 | if (!optimizations.GetSkipObjectNullCheck()) { |
| 3639 | __ testl(object, object); |
| 3640 | __ j(kZero, slow_path->GetEntryLabel()); |
| 3641 | } |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3642 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3643 | // Check the object's class against coordinateType0. |
| 3644 | GenerateSubTypeObjectCheck(object, |
| 3645 | temp, |
| 3646 | Address(varhandle_object, coordtype0_offset), |
| 3647 | slow_path, |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3648 | assembler, |
| 3649 | /* object_can_be_null= */ false); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3650 | } |
| 3651 | |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 3652 | static void GenerateVarTypePrimitiveTypeCheck(Register varhandle_object, |
| 3653 | Register temp, |
| 3654 | DataType::Type type, |
| 3655 | SlowPathCode* slow_path, |
| 3656 | X86Assembler* assembler) { |
| 3657 | const uint32_t var_type_offset = mirror::VarHandle::VarTypeOffset().Uint32Value(); |
| 3658 | const uint32_t primitive_type_offset = mirror::Class::PrimitiveTypeOffset().Uint32Value(); |
| 3659 | const uint32_t primitive_type = static_cast<uint32_t>(DataTypeToPrimitive(type)); |
| 3660 | |
| 3661 | // We do not need a read barrier when loading a reference only for loading a constant field |
| 3662 | // through the reference. |
| 3663 | __ movl(temp, Address(varhandle_object, var_type_offset)); |
| 3664 | __ MaybeUnpoisonHeapReference(temp); |
| 3665 | __ cmpw(Address(temp, primitive_type_offset), Immediate(primitive_type)); |
| 3666 | __ j(kNotEqual, slow_path->GetEntryLabel()); |
| 3667 | } |
| 3668 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3669 | static void GenerateVarHandleCommonChecks(HInvoke *invoke, |
| 3670 | Register temp, |
| 3671 | SlowPathCode* slow_path, |
| 3672 | X86Assembler* assembler) { |
| 3673 | LocationSummary* locations = invoke->GetLocations(); |
| 3674 | Register vh_object = locations->InAt(0).AsRegister<Register>(); |
| 3675 | mirror::VarHandle::AccessMode access_mode = |
| 3676 | mirror::VarHandle::GetAccessModeByIntrinsic(invoke->GetIntrinsic()); |
| 3677 | |
| 3678 | GenerateVarHandleAccessModeCheck(vh_object, |
| 3679 | access_mode, |
| 3680 | slow_path, |
| 3681 | assembler); |
| 3682 | |
Vladimir Marko | a41ea27 | 2020-09-07 15:24:36 +0000 | [diff] [blame] | 3683 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3684 | switch (expected_coordinates_count) { |
| 3685 | case 0u: |
| 3686 | GenerateVarHandleStaticFieldCheck(vh_object, slow_path, assembler); |
| 3687 | break; |
| 3688 | case 1u: { |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3689 | GenerateVarHandleInstanceFieldChecks(invoke, temp, slow_path, assembler); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3690 | break; |
| 3691 | } |
| 3692 | default: |
| 3693 | // Unimplemented |
| 3694 | UNREACHABLE(); |
| 3695 | } |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3696 | |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 3697 | // Check the return type and varType parameters. |
| 3698 | mirror::VarHandle::AccessModeTemplate access_mode_template = |
| 3699 | mirror::VarHandle::GetAccessModeTemplate(access_mode); |
| 3700 | DataType::Type type = invoke->GetType(); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3701 | |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 3702 | switch (access_mode_template) { |
| 3703 | case mirror::VarHandle::AccessModeTemplate::kGet: |
| 3704 | // Check the varType.primitiveType against the type we're trying to retrieve. Reference types |
| 3705 | // are also checked later by a HCheckCast node as an additional check. |
| 3706 | GenerateVarTypePrimitiveTypeCheck(vh_object, temp, type, slow_path, assembler); |
| 3707 | break; |
| 3708 | case mirror::VarHandle::AccessModeTemplate::kSet: |
| 3709 | case mirror::VarHandle::AccessModeTemplate::kGetAndUpdate: { |
| 3710 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
| 3711 | DataType::Type value_type = GetDataTypeFromShorty(invoke, value_index); |
| 3712 | |
| 3713 | // Check the varType.primitiveType against the type of the value we're trying to set. |
| 3714 | GenerateVarTypePrimitiveTypeCheck(vh_object, temp, value_type, slow_path, assembler); |
| 3715 | if (value_type == DataType::Type::kReference) { |
| 3716 | const uint32_t var_type_offset = mirror::VarHandle::VarTypeOffset().Uint32Value(); |
| 3717 | |
| 3718 | // If the value type is a reference, check it against the varType. |
| 3719 | GenerateSubTypeObjectCheck(locations->InAt(value_index).AsRegister<Register>(), |
| 3720 | temp, |
| 3721 | Address(vh_object, var_type_offset), |
| 3722 | slow_path, |
| 3723 | assembler); |
| 3724 | } |
| 3725 | break; |
| 3726 | } |
| 3727 | case mirror::VarHandle::AccessModeTemplate::kCompareAndSet: |
| 3728 | case mirror::VarHandle::AccessModeTemplate::kCompareAndExchange: { |
| 3729 | uint32_t new_value_index = invoke->GetNumberOfArguments() - 1; |
| 3730 | uint32_t expected_value_index = invoke->GetNumberOfArguments() - 2; |
| 3731 | DataType::Type value_type = GetDataTypeFromShorty(invoke, new_value_index); |
| 3732 | DCHECK_EQ(value_type, GetDataTypeFromShorty(invoke, expected_value_index)); |
| 3733 | |
| 3734 | // Check the varType.primitiveType against the type of the expected value. |
| 3735 | GenerateVarTypePrimitiveTypeCheck(vh_object, temp, value_type, slow_path, assembler); |
| 3736 | if (value_type == DataType::Type::kReference) { |
| 3737 | const uint32_t var_type_offset = mirror::VarHandle::VarTypeOffset().Uint32Value(); |
| 3738 | |
| 3739 | // If the value type is a reference, check both the expected and the new value against |
| 3740 | // the varType. |
| 3741 | GenerateSubTypeObjectCheck(locations->InAt(new_value_index).AsRegister<Register>(), |
| 3742 | temp, |
| 3743 | Address(vh_object, var_type_offset), |
| 3744 | slow_path, |
| 3745 | assembler); |
| 3746 | GenerateSubTypeObjectCheck(locations->InAt(expected_value_index).AsRegister<Register>(), |
| 3747 | temp, |
| 3748 | Address(vh_object, var_type_offset), |
| 3749 | slow_path, |
| 3750 | assembler); |
| 3751 | } |
| 3752 | break; |
| 3753 | } |
| 3754 | } |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3755 | } |
| 3756 | |
| 3757 | // This method loads the field's address referred by a field VarHandle (base + offset). |
| 3758 | // The return value is the register containing object's reference (in case of an instance field) |
| 3759 | // or the declaring class (in case of a static field). The declaring class is stored in temp |
| 3760 | // register. Field's offset is loaded to the `offset` register. |
| 3761 | static Register GenerateVarHandleFieldReference(HInvoke* invoke, |
| 3762 | CodeGeneratorX86* codegen, |
| 3763 | Register temp, |
| 3764 | /*out*/ Register offset) { |
| 3765 | X86Assembler* assembler = codegen->GetAssembler(); |
| 3766 | LocationSummary* locations = invoke->GetLocations(); |
| 3767 | const uint32_t artfield_offset = mirror::FieldVarHandle::ArtFieldOffset().Uint32Value(); |
| 3768 | const uint32_t offset_offset = ArtField::OffsetOffset().Uint32Value(); |
| 3769 | const uint32_t declaring_class_offset = ArtField::DeclaringClassOffset().Uint32Value(); |
| 3770 | Register varhandle_object = locations->InAt(0).AsRegister<Register>(); |
| 3771 | |
| 3772 | // Load the ArtField and the offset |
| 3773 | __ movl(temp, Address(varhandle_object, artfield_offset)); |
| 3774 | __ movl(offset, Address(temp, offset_offset)); |
Vladimir Marko | a41ea27 | 2020-09-07 15:24:36 +0000 | [diff] [blame] | 3775 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3776 | if (expected_coordinates_count == 0) { |
| 3777 | // For static fields, load the declaring class |
| 3778 | InstructionCodeGeneratorX86* instr_codegen = |
| 3779 | down_cast<InstructionCodeGeneratorX86*>(codegen->GetInstructionVisitor()); |
| 3780 | instr_codegen->GenerateGcRootFieldLoad(invoke, |
| 3781 | Location::RegisterLocation(temp), |
| 3782 | Address(temp, declaring_class_offset), |
| 3783 | /* fixup_label= */ nullptr, |
| 3784 | kCompilerReadBarrierOption); |
| 3785 | return temp; |
| 3786 | } |
| 3787 | |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3788 | // For instance fields, return the register containing the object. |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3789 | DCHECK_EQ(expected_coordinates_count, 1u); |
Andra Danciu | 63c0c2d | 2020-09-07 15:50:40 +0000 | [diff] [blame] | 3790 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3791 | return locations->InAt(1).AsRegister<Register>(); |
| 3792 | } |
| 3793 | |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3794 | static void CreateVarHandleGetLocations(HInvoke* invoke) { |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3795 | // The only read barrier implementation supporting the |
| 3796 | // VarHandleGet intrinsic is the Baker-style read barriers. |
| 3797 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 3798 | return; |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 3799 | } |
| 3800 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3801 | if (!HasVarHandleIntrinsicImplementation(invoke)) { |
Andra Danciu | 0875b0a | 2020-08-28 11:49:44 +0000 | [diff] [blame] | 3802 | return; |
| 3803 | } |
| 3804 | |
| 3805 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 3806 | LocationSummary* locations = new (allocator) LocationSummary( |
| 3807 | invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
| 3808 | locations->SetInAt(0, Location::RequiresRegister()); |
Vladimir Marko | a41ea27 | 2020-09-07 15:24:36 +0000 | [diff] [blame] | 3809 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3810 | if (expected_coordinates_count == 1u) { |
Andra Danciu | 0875b0a | 2020-08-28 11:49:44 +0000 | [diff] [blame] | 3811 | // For instance fields, this is the source object. |
| 3812 | locations->SetInAt(1, Location::RequiresRegister()); |
| 3813 | } |
| 3814 | locations->AddTemp(Location::RequiresRegister()); |
| 3815 | |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 3816 | DataType::Type type = invoke->GetType(); |
Andra Danciu | 0875b0a | 2020-08-28 11:49:44 +0000 | [diff] [blame] | 3817 | switch (DataType::Kind(type)) { |
| 3818 | case DataType::Type::kInt64: |
| 3819 | locations->AddTemp(Location::RequiresRegister()); |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3820 | if (invoke->GetIntrinsic() != Intrinsics::kVarHandleGet) { |
| 3821 | // We need an XmmRegister for Int64 to ensure an atomic load |
| 3822 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 3823 | } |
Andra Danciu | 0875b0a | 2020-08-28 11:49:44 +0000 | [diff] [blame] | 3824 | FALLTHROUGH_INTENDED; |
| 3825 | case DataType::Type::kInt32: |
| 3826 | case DataType::Type::kReference: |
| 3827 | locations->SetOut(Location::RequiresRegister()); |
| 3828 | break; |
| 3829 | default: |
| 3830 | DCHECK(DataType::IsFloatingPointType(type)); |
| 3831 | locations->AddTemp(Location::RequiresRegister()); |
| 3832 | locations->SetOut(Location::RequiresFpuRegister()); |
| 3833 | } |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 3834 | } |
| 3835 | |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3836 | static void GenerateVarHandleGet(HInvoke* invoke, CodeGeneratorX86* codegen) { |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3837 | // The only read barrier implementation supporting the |
| 3838 | // VarHandleGet intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 3839 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | e74df4c | 2020-08-10 09:35:51 +0000 | [diff] [blame] | 3840 | |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3841 | X86Assembler* assembler = codegen->GetAssembler(); |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3842 | LocationSummary* locations = invoke->GetLocations(); |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3843 | DataType::Type type = invoke->GetType(); |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3844 | DCHECK_NE(type, DataType::Type::kVoid); |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3845 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3846 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 3847 | codegen->AddSlowPath(slow_path); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3848 | |
| 3849 | GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler); |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 3850 | |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3851 | Location out = locations->Out(); |
| 3852 | // Use 'out' as a temporary register if it's a core register |
| 3853 | Register offset = |
| 3854 | out.IsRegister() ? out.AsRegister<Register>() : locations->GetTemp(1).AsRegister<Register>(); |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3855 | |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3856 | // Get the field referred by the VarHandle. The returned register contains the object reference |
| 3857 | // or the declaring class. The field offset will be placed in 'offset'. For static fields, the |
| 3858 | // declaring class will be placed in 'temp' register. |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3859 | Register ref = GenerateVarHandleFieldReference(invoke, codegen, temp, offset); |
| 3860 | Address field_addr(ref, offset, TIMES_1, 0); |
Andra Danciu | 1ca6f32 | 2020-08-12 08:58:07 +0000 | [diff] [blame] | 3861 | |
| 3862 | // Load the value from the field |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3863 | if (type == DataType::Type::kReference && kCompilerReadBarrierOption == kWithReadBarrier) { |
| 3864 | codegen->GenerateReferenceLoadWithBakerReadBarrier( |
| 3865 | invoke, out, ref, field_addr, /* needs_null_check= */ false); |
| 3866 | } else if (type == DataType::Type::kInt64 && |
| 3867 | invoke->GetIntrinsic() != Intrinsics::kVarHandleGet) { |
| 3868 | XmmRegister xmm_temp = locations->GetTemp(2).AsFpuRegister<XmmRegister>(); |
Ulya Trafimovich | 322eced | 2021-06-02 15:39:36 +0100 | [diff] [blame] | 3869 | codegen->LoadFromMemoryNoBarrier( |
| 3870 | type, out, field_addr, /* instr= */ nullptr, xmm_temp, /* is_atomic_load= */ true); |
Andra Danciu | aa35883 | 2020-08-25 15:09:43 +0000 | [diff] [blame] | 3871 | } else { |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3872 | codegen->LoadFromMemoryNoBarrier(type, out, field_addr); |
| 3873 | } |
| 3874 | |
| 3875 | if (invoke->GetIntrinsic() == Intrinsics::kVarHandleGetVolatile || |
| 3876 | invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAcquire) { |
| 3877 | // Load fence to prevent load-load reordering. |
| 3878 | // Note that this is a no-op, thanks to the x86 memory model. |
| 3879 | codegen->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3880 | } |
| 3881 | |
| 3882 | __ Bind(slow_path->GetExitLabel()); |
| 3883 | } |
| 3884 | |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 3885 | void IntrinsicLocationsBuilderX86::VisitVarHandleGet(HInvoke* invoke) { |
| 3886 | CreateVarHandleGetLocations(invoke); |
| 3887 | } |
| 3888 | |
| 3889 | void IntrinsicCodeGeneratorX86::VisitVarHandleGet(HInvoke* invoke) { |
| 3890 | GenerateVarHandleGet(invoke, codegen_); |
| 3891 | } |
| 3892 | |
| 3893 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetVolatile(HInvoke* invoke) { |
| 3894 | CreateVarHandleGetLocations(invoke); |
| 3895 | } |
| 3896 | |
| 3897 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetVolatile(HInvoke* invoke) { |
| 3898 | GenerateVarHandleGet(invoke, codegen_); |
| 3899 | } |
| 3900 | |
| 3901 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAcquire(HInvoke* invoke) { |
| 3902 | CreateVarHandleGetLocations(invoke); |
| 3903 | } |
| 3904 | |
| 3905 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAcquire(HInvoke* invoke) { |
| 3906 | GenerateVarHandleGet(invoke, codegen_); |
| 3907 | } |
| 3908 | |
| 3909 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetOpaque(HInvoke* invoke) { |
| 3910 | CreateVarHandleGetLocations(invoke); |
| 3911 | } |
| 3912 | |
| 3913 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetOpaque(HInvoke* invoke) { |
| 3914 | GenerateVarHandleGet(invoke, codegen_); |
| 3915 | } |
| 3916 | |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 3917 | static void CreateVarHandleSetLocations(HInvoke* invoke) { |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3918 | // The only read barrier implementation supporting the |
| 3919 | // VarHandleGet intrinsic is the Baker-style read barriers. |
| 3920 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 3921 | return; |
| 3922 | } |
| 3923 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 3924 | if (!HasVarHandleIntrinsicImplementation(invoke)) { |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3925 | return; |
| 3926 | } |
| 3927 | |
| 3928 | // The last argument should be the value we intend to set. |
| 3929 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
| 3930 | HInstruction* value = invoke->InputAt(value_index); |
| 3931 | DataType::Type value_type = GetDataTypeFromShorty(invoke, value_index); |
Andra Danciu | 71b26b2 | 2020-09-20 09:01:38 +0000 | [diff] [blame] | 3932 | bool needs_atomicity = invoke->GetIntrinsic() != Intrinsics::kVarHandleSet; |
| 3933 | if (value_type == DataType::Type::kInt64 && (!value->IsConstant() || needs_atomicity)) { |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 3934 | // We avoid the case of a non-constant (or volatile) Int64 value because we would need to |
| 3935 | // place it in a register pair. If the slow path is taken, the ParallelMove might fail to move |
| 3936 | // the pair according to the X86DexCallingConvention in case of an overlap (e.g., move the |
| 3937 | // int64 value from <EAX, EBX> to <EBX, ECX>). (Bug: b/168687887) |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3938 | return; |
| 3939 | } |
| 3940 | |
| 3941 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 3942 | LocationSummary* locations = new (allocator) LocationSummary( |
| 3943 | invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3944 | locations->SetInAt(0, Location::RequiresRegister()); |
Vladimir Marko | a41ea27 | 2020-09-07 15:24:36 +0000 | [diff] [blame] | 3945 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3946 | if (expected_coordinates_count == 1u) { |
| 3947 | // For instance fields, this is the source object |
| 3948 | locations->SetInAt(1, Location::RequiresRegister()); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3949 | } |
| 3950 | |
| 3951 | switch (value_type) { |
| 3952 | case DataType::Type::kBool: |
| 3953 | case DataType::Type::kInt8: |
| 3954 | case DataType::Type::kUint8: |
| 3955 | // Ensure the value is in a byte register |
| 3956 | locations->SetInAt(value_index, Location::ByteRegisterOrConstant(EBX, value)); |
| 3957 | break; |
| 3958 | case DataType::Type::kInt16: |
| 3959 | case DataType::Type::kUint16: |
| 3960 | case DataType::Type::kInt32: |
| 3961 | locations->SetInAt(value_index, Location::RegisterOrConstant(value)); |
| 3962 | break; |
| 3963 | case DataType::Type::kInt64: |
Andra Danciu | 71b26b2 | 2020-09-20 09:01:38 +0000 | [diff] [blame] | 3964 | // We only handle constant non-atomic int64 values. |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3965 | DCHECK(value->IsConstant()); |
| 3966 | locations->SetInAt(value_index, Location::ConstantLocation(value->AsConstant())); |
| 3967 | break; |
| 3968 | case DataType::Type::kReference: |
| 3969 | locations->SetInAt(value_index, Location::RequiresRegister()); |
| 3970 | break; |
| 3971 | default: |
| 3972 | DCHECK(DataType::IsFloatingPointType(value_type)); |
Andra Danciu | 71b26b2 | 2020-09-20 09:01:38 +0000 | [diff] [blame] | 3973 | if (needs_atomicity && value_type == DataType::Type::kFloat64) { |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 3974 | locations->SetInAt(value_index, Location::RequiresFpuRegister()); |
| 3975 | } else { |
| 3976 | locations->SetInAt(value_index, Location::FpuRegisterOrConstant(value)); |
| 3977 | } |
| 3978 | } |
| 3979 | |
| 3980 | locations->AddTemp(Location::RequiresRegister()); |
| 3981 | // This temporary register is also used for card for MarkGCCard. Make sure it's a byte register |
| 3982 | locations->AddTemp(Location::RegisterLocation(EAX)); |
| 3983 | if (expected_coordinates_count == 0 && value_type == DataType::Type::kReference) { |
| 3984 | // For static reference fields, we need another temporary for the declaring class. We set it |
| 3985 | // last because we want to make sure that the first 2 temps are reserved for HandleFieldSet. |
| 3986 | locations->AddTemp(Location::RequiresRegister()); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3987 | } |
| 3988 | } |
| 3989 | |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 3990 | static void GenerateVarHandleSet(HInvoke* invoke, CodeGeneratorX86* codegen) { |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3991 | // The only read barrier implementation supporting the |
| 3992 | // VarHandleGet intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 3993 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3994 | |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 3995 | X86Assembler* assembler = codegen->GetAssembler(); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3996 | LocationSummary* locations = invoke->GetLocations(); |
| 3997 | // The value we want to set is the last argument |
| 3998 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 3999 | DataType::Type value_type = GetDataTypeFromShorty(invoke, value_index); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 4000 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 4001 | Register temp2 = locations->GetTemp(1).AsRegister<Register>(); |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4002 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 4003 | codegen->AddSlowPath(slow_path); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 4004 | |
| 4005 | GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler); |
| 4006 | |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 4007 | // For static reference fields, we need another temporary for the declaring class. But since |
| 4008 | // for instance fields the object is in a separate register, it is safe to use the first |
| 4009 | // temporary register for GenerateVarHandleFieldReference. |
| 4010 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4011 | if (value_type == DataType::Type::kReference && expected_coordinates_count == 0) { |
| 4012 | temp = locations->GetTemp(2).AsRegister<Register>(); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 4013 | } |
| 4014 | |
| 4015 | Register offset = temp2; |
| 4016 | // Get the field referred by the VarHandle. The returned register contains the object reference |
| 4017 | // or the declaring class. The field offset will be placed in 'offset'. For static fields, the |
| 4018 | // declaring class will be placed in 'temp' register. |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4019 | Register reference = GenerateVarHandleFieldReference(invoke, codegen, temp, offset); |
Andra Danciu | 73c3180 | 2020-09-01 13:17:05 +0000 | [diff] [blame] | 4020 | |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4021 | bool is_volatile = false; |
| 4022 | switch (invoke->GetIntrinsic()) { |
| 4023 | case Intrinsics::kVarHandleSet: |
| 4024 | case Intrinsics::kVarHandleSetOpaque: |
| 4025 | // The only constraint for setOpaque is to ensure bitwise atomicity (atomically set 64 bit |
| 4026 | // values), but we don't treat Int64 values because we would need to place it in a register |
| 4027 | // pair. If the slow path is taken, the Parallel move might fail to move the register pair |
| 4028 | // in case of an overlap (e.g., move from <EAX, EBX> to <EBX, ECX>). (Bug: b/168687887) |
| 4029 | break; |
Andra Danciu | 71b26b2 | 2020-09-20 09:01:38 +0000 | [diff] [blame] | 4030 | case Intrinsics::kVarHandleSetRelease: |
| 4031 | // setRelease needs to ensure atomicity too. See the above comment. |
| 4032 | codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore); |
| 4033 | break; |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4034 | case Intrinsics::kVarHandleSetVolatile: |
| 4035 | is_volatile = true; |
| 4036 | break; |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4037 | default: |
| 4038 | LOG(FATAL) << "GenerateVarHandleSet received non-set intrinsic " << invoke->GetIntrinsic(); |
Andra Danciu | aa35883 | 2020-08-25 15:09:43 +0000 | [diff] [blame] | 4039 | } |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 4040 | |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4041 | InstructionCodeGeneratorX86* instr_codegen = |
| 4042 | down_cast<InstructionCodeGeneratorX86*>(codegen->GetInstructionVisitor()); |
| 4043 | // Store the value to the field |
| 4044 | instr_codegen->HandleFieldSet(invoke, |
| 4045 | value_index, |
| 4046 | value_type, |
| 4047 | Address(reference, offset, TIMES_1, 0), |
| 4048 | reference, |
| 4049 | is_volatile, |
| 4050 | /* value_can_be_null */ true); |
| 4051 | |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 4052 | __ Bind(slow_path->GetExitLabel()); |
| 4053 | } |
| 4054 | |
Andra Danciu | cde9819 | 2020-09-13 12:32:09 +0000 | [diff] [blame] | 4055 | void IntrinsicLocationsBuilderX86::VisitVarHandleSet(HInvoke* invoke) { |
| 4056 | CreateVarHandleSetLocations(invoke); |
| 4057 | } |
| 4058 | |
| 4059 | void IntrinsicCodeGeneratorX86::VisitVarHandleSet(HInvoke* invoke) { |
| 4060 | GenerateVarHandleSet(invoke, codegen_); |
| 4061 | } |
| 4062 | |
| 4063 | void IntrinsicLocationsBuilderX86::VisitVarHandleSetVolatile(HInvoke* invoke) { |
| 4064 | CreateVarHandleSetLocations(invoke); |
| 4065 | } |
| 4066 | |
| 4067 | void IntrinsicCodeGeneratorX86::VisitVarHandleSetVolatile(HInvoke* invoke) { |
| 4068 | GenerateVarHandleSet(invoke, codegen_); |
| 4069 | } |
| 4070 | |
| 4071 | void IntrinsicLocationsBuilderX86::VisitVarHandleSetRelease(HInvoke* invoke) { |
| 4072 | CreateVarHandleSetLocations(invoke); |
| 4073 | } |
| 4074 | |
| 4075 | void IntrinsicCodeGeneratorX86::VisitVarHandleSetRelease(HInvoke* invoke) { |
| 4076 | GenerateVarHandleSet(invoke, codegen_); |
| 4077 | } |
| 4078 | |
| 4079 | void IntrinsicLocationsBuilderX86::VisitVarHandleSetOpaque(HInvoke* invoke) { |
| 4080 | CreateVarHandleSetLocations(invoke); |
| 4081 | } |
| 4082 | |
| 4083 | void IntrinsicCodeGeneratorX86::VisitVarHandleSetOpaque(HInvoke* invoke) { |
| 4084 | GenerateVarHandleSet(invoke, codegen_); |
| 4085 | } |
| 4086 | |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 4087 | static void CreateVarHandleGetAndSetLocations(HInvoke* invoke) { |
| 4088 | // The only read barrier implementation supporting the |
| 4089 | // VarHandleGet intrinsic is the Baker-style read barriers. |
| 4090 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 4091 | return; |
| 4092 | } |
| 4093 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 4094 | if (!HasVarHandleIntrinsicImplementation(invoke)) { |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 4095 | return; |
| 4096 | } |
| 4097 | |
| 4098 | uint32_t number_of_arguments = invoke->GetNumberOfArguments(); |
| 4099 | uint32_t value_index = number_of_arguments - 1; |
| 4100 | DataType::Type value_type = GetDataTypeFromShorty(invoke, value_index); |
| 4101 | |
| 4102 | if (DataType::Is64BitType(value_type)) { |
| 4103 | // We avoid the case of an Int64/Float64 value because we would need to place it in a register |
| 4104 | // pair. If the slow path is taken, the ParallelMove might fail to move the pair according to |
| 4105 | // the X86DexCallingConvention in case of an overlap (e.g., move the 64 bit value from |
| 4106 | // <EAX, EBX> to <EBX, ECX>). |
| 4107 | return; |
| 4108 | } |
| 4109 | |
| 4110 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 4111 | LocationSummary* locations = new (allocator) LocationSummary( |
| 4112 | invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
| 4113 | locations->AddTemp(Location::RequiresRegister()); |
| 4114 | locations->AddTemp(Location::RequiresRegister()); |
| 4115 | // We use this temporary for the card, so we need a byte register |
| 4116 | locations->AddTemp(Location::RegisterLocation(EBX)); |
| 4117 | locations->SetInAt(0, Location::RequiresRegister()); |
| 4118 | if (GetExpectedVarHandleCoordinatesCount(invoke) == 1u) { |
| 4119 | // For instance fields, this is the source object |
| 4120 | locations->SetInAt(1, Location::RequiresRegister()); |
| 4121 | } else { |
| 4122 | // For static fields, we need another temp because one will be busy with the declaring class. |
| 4123 | locations->AddTemp(Location::RequiresRegister()); |
| 4124 | } |
| 4125 | if (value_type == DataType::Type::kFloat32) { |
| 4126 | locations->AddTemp(Location::RegisterLocation(EAX)); |
| 4127 | locations->SetInAt(value_index, Location::FpuRegisterOrConstant(invoke->InputAt(value_index))); |
| 4128 | locations->SetOut(Location::RequiresFpuRegister()); |
| 4129 | } else { |
| 4130 | locations->SetInAt(value_index, Location::RegisterLocation(EAX)); |
| 4131 | locations->SetOut(Location::RegisterLocation(EAX)); |
| 4132 | } |
| 4133 | } |
| 4134 | |
| 4135 | static void GenerateVarHandleGetAndSet(HInvoke* invoke, CodeGeneratorX86* codegen) { |
| 4136 | // The only read barrier implementation supporting the |
| 4137 | // VarHandleGet intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 4138 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | 9dfb1a9 | 2020-09-22 13:27:18 +0000 | [diff] [blame] | 4139 | |
| 4140 | X86Assembler* assembler = codegen->GetAssembler(); |
| 4141 | LocationSummary* locations = invoke->GetLocations(); |
| 4142 | // The value we want to set is the last argument |
| 4143 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
| 4144 | Location value = locations->InAt(value_index); |
| 4145 | DataType::Type value_type = GetDataTypeFromShorty(invoke, value_index); |
| 4146 | Register temp = locations->GetTemp(1).AsRegister<Register>(); |
| 4147 | Register temp2 = locations->GetTemp(2).AsRegister<Register>(); |
| 4148 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 4149 | codegen->AddSlowPath(slow_path); |
| 4150 | |
| 4151 | GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler); |
| 4152 | |
| 4153 | Register offset = locations->GetTemp(0).AsRegister<Register>(); |
| 4154 | // Get the field referred by the VarHandle. The returned register contains the object reference |
| 4155 | // or the declaring class. The field offset will be placed in 'offset'. For static fields, the |
| 4156 | // declaring class will be placed in 'temp' register. |
| 4157 | Register reference = GenerateVarHandleFieldReference(invoke, codegen, temp, offset); |
| 4158 | Address field_addr(reference, offset, TIMES_1, 0); |
| 4159 | |
| 4160 | if (invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndSetRelease) { |
| 4161 | codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore); |
| 4162 | } |
| 4163 | |
| 4164 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4165 | // For static fields, we need another temporary for the declaring class. But since for instance |
| 4166 | // fields the object is in a separate register, it is safe to use the first temporary register. |
| 4167 | temp = expected_coordinates_count == 1u ? temp : locations->GetTemp(3).AsRegister<Register>(); |
| 4168 | // No need for a lock prefix. `xchg` has an implicit lock when it is used with an address. |
| 4169 | switch (value_type) { |
| 4170 | case DataType::Type::kBool: |
| 4171 | __ xchgb(value.AsRegister<ByteRegister>(), field_addr); |
| 4172 | __ movzxb(locations->Out().AsRegister<Register>(), |
| 4173 | locations->Out().AsRegister<ByteRegister>()); |
| 4174 | break; |
| 4175 | case DataType::Type::kInt8: |
| 4176 | __ xchgb(value.AsRegister<ByteRegister>(), field_addr); |
| 4177 | __ movsxb(locations->Out().AsRegister<Register>(), |
| 4178 | locations->Out().AsRegister<ByteRegister>()); |
| 4179 | break; |
| 4180 | case DataType::Type::kUint16: |
| 4181 | __ xchgw(value.AsRegister<Register>(), field_addr); |
| 4182 | __ movzxw(locations->Out().AsRegister<Register>(), locations->Out().AsRegister<Register>()); |
| 4183 | break; |
| 4184 | case DataType::Type::kInt16: |
| 4185 | __ xchgw(value.AsRegister<Register>(), field_addr); |
| 4186 | __ movsxw(locations->Out().AsRegister<Register>(), locations->Out().AsRegister<Register>()); |
| 4187 | break; |
| 4188 | case DataType::Type::kInt32: |
| 4189 | __ xchgl(value.AsRegister<Register>(), field_addr); |
| 4190 | break; |
| 4191 | case DataType::Type::kFloat32: |
| 4192 | codegen->Move32(Location::RegisterLocation(EAX), value); |
| 4193 | __ xchgl(EAX, field_addr); |
| 4194 | __ movd(locations->Out().AsFpuRegister<XmmRegister>(), EAX); |
| 4195 | break; |
| 4196 | case DataType::Type::kReference: { |
| 4197 | if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 4198 | // Need to make sure the reference stored in the field is a to-space |
| 4199 | // one before attempting the CAS or the CAS could fail incorrectly. |
| 4200 | codegen->GenerateReferenceLoadWithBakerReadBarrier( |
| 4201 | invoke, |
| 4202 | // Unused, used only as a "temporary" within the read barrier. |
| 4203 | Location::RegisterLocation(temp), |
| 4204 | reference, |
| 4205 | field_addr, |
| 4206 | /* needs_null_check= */ false, |
| 4207 | /* always_update_field= */ true, |
| 4208 | &temp2); |
| 4209 | } |
| 4210 | codegen->MarkGCCard( |
| 4211 | temp, temp2, reference, value.AsRegister<Register>(), /* value_can_be_null= */ false); |
| 4212 | if (kPoisonHeapReferences) { |
| 4213 | __ movl(temp, value.AsRegister<Register>()); |
| 4214 | __ PoisonHeapReference(temp); |
| 4215 | __ xchgl(temp, field_addr); |
| 4216 | __ UnpoisonHeapReference(temp); |
| 4217 | __ movl(locations->Out().AsRegister<Register>(), temp); |
| 4218 | } else { |
| 4219 | __ xchgl(locations->Out().AsRegister<Register>(), field_addr); |
| 4220 | } |
| 4221 | break; |
| 4222 | } |
| 4223 | default: |
| 4224 | UNREACHABLE(); |
| 4225 | } |
| 4226 | |
| 4227 | if (invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndSetAcquire) { |
| 4228 | codegen->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); |
| 4229 | } |
| 4230 | |
| 4231 | __ Bind(slow_path->GetExitLabel()); |
| 4232 | } |
| 4233 | |
| 4234 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndSet(HInvoke* invoke) { |
| 4235 | CreateVarHandleGetAndSetLocations(invoke); |
| 4236 | } |
| 4237 | |
| 4238 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndSet(HInvoke* invoke) { |
| 4239 | GenerateVarHandleGetAndSet(invoke, codegen_); |
| 4240 | } |
| 4241 | |
| 4242 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndSetAcquire(HInvoke* invoke) { |
| 4243 | CreateVarHandleGetAndSetLocations(invoke); |
| 4244 | } |
| 4245 | |
| 4246 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndSetAcquire(HInvoke* invoke) { |
| 4247 | GenerateVarHandleGetAndSet(invoke, codegen_); |
| 4248 | } |
| 4249 | |
| 4250 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndSetRelease(HInvoke* invoke) { |
| 4251 | CreateVarHandleGetAndSetLocations(invoke); |
| 4252 | } |
| 4253 | |
| 4254 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndSetRelease(HInvoke* invoke) { |
| 4255 | GenerateVarHandleGetAndSet(invoke, codegen_); |
| 4256 | } |
| 4257 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4258 | static void CreateVarHandleCompareAndSetOrExchangeLocations(HInvoke* invoke) { |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4259 | // The only read barrier implementation supporting the |
| 4260 | // VarHandleGet intrinsic is the Baker-style read barriers. |
| 4261 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 4262 | return; |
| 4263 | } |
| 4264 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 4265 | if (!HasVarHandleIntrinsicImplementation(invoke)) { |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4266 | return; |
| 4267 | } |
| 4268 | |
| 4269 | uint32_t number_of_arguments = invoke->GetNumberOfArguments(); |
| 4270 | uint32_t expected_value_index = number_of_arguments - 2; |
| 4271 | uint32_t new_value_index = number_of_arguments - 1; |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4272 | DataType::Type value_type = GetDataTypeFromShorty(invoke, expected_value_index); |
| 4273 | DCHECK_EQ(value_type, GetDataTypeFromShorty(invoke, new_value_index)); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4274 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4275 | if (DataType::Is64BitType(value_type)) { |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4276 | // We avoid the case of an Int64/Float64 value because we would need to place it in a register |
| 4277 | // pair. If the slow path is taken, the ParallelMove might fail to move the pair according to |
| 4278 | // the X86DexCallingConvention in case of an overlap (e.g., move the 64 bit value from |
| 4279 | // <EAX, EBX> to <EBX, ECX>). |
| 4280 | return; |
| 4281 | } |
| 4282 | |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4283 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 4284 | LocationSummary* locations = new (allocator) LocationSummary( |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4285 | invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4286 | locations->AddTemp(Location::RequiresRegister()); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4287 | locations->AddTemp(Location::RequiresRegister()); |
| 4288 | // We use this temporary for the card, so we need a byte register |
| 4289 | locations->AddTemp(Location::RegisterLocation(EBX)); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4290 | locations->SetInAt(0, Location::RequiresRegister()); |
| 4291 | if (GetExpectedVarHandleCoordinatesCount(invoke) == 1u) { |
| 4292 | // For instance fields, this is the source object |
| 4293 | locations->SetInAt(1, Location::RequiresRegister()); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4294 | } else { |
| 4295 | // For static fields, we need another temp because one will be busy with the declaring class. |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4296 | locations->AddTemp(Location::RequiresRegister()); |
| 4297 | } |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4298 | if (DataType::IsFloatingPointType(value_type)) { |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4299 | // We need EAX for placing the expected value |
| 4300 | locations->AddTemp(Location::RegisterLocation(EAX)); |
| 4301 | locations->SetInAt(new_value_index, |
| 4302 | Location::FpuRegisterOrConstant(invoke->InputAt(new_value_index))); |
| 4303 | locations->SetInAt(expected_value_index, |
| 4304 | Location::FpuRegisterOrConstant(invoke->InputAt(expected_value_index))); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4305 | } else { |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4306 | // Ensure it's in a byte register |
| 4307 | locations->SetInAt(new_value_index, Location::RegisterLocation(ECX)); |
| 4308 | locations->SetInAt(expected_value_index, Location::RegisterLocation(EAX)); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4309 | } |
| 4310 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4311 | mirror::VarHandle::AccessModeTemplate access_mode_template = |
| 4312 | mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic()); |
| 4313 | |
| 4314 | if (access_mode_template == mirror::VarHandle::AccessModeTemplate::kCompareAndExchange && |
| 4315 | value_type == DataType::Type::kFloat32) { |
| 4316 | locations->SetOut(Location::RequiresFpuRegister()); |
| 4317 | } else { |
| 4318 | locations->SetOut(Location::RegisterLocation(EAX)); |
| 4319 | } |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4320 | } |
| 4321 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4322 | static void GenerateVarHandleCompareAndSetOrExchange(HInvoke* invoke, CodeGeneratorX86* codegen) { |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4323 | // The only read barrier implementation supporting the |
| 4324 | // VarHandleGet intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 4325 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4326 | |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4327 | X86Assembler* assembler = codegen->GetAssembler(); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4328 | LocationSummary* locations = invoke->GetLocations(); |
| 4329 | uint32_t number_of_arguments = invoke->GetNumberOfArguments(); |
| 4330 | uint32_t expected_value_index = number_of_arguments - 2; |
| 4331 | uint32_t new_value_index = number_of_arguments - 1; |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4332 | DataType::Type type = GetDataTypeFromShorty(invoke, expected_value_index); |
| 4333 | DCHECK_EQ(type, GetDataTypeFromShorty(invoke, new_value_index)); |
| 4334 | Location expected_value = locations->InAt(expected_value_index); |
| 4335 | Location new_value = locations->InAt(new_value_index); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4336 | Register offset = locations->GetTemp(0).AsRegister<Register>(); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4337 | Register temp = locations->GetTemp(1).AsRegister<Register>(); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4338 | Register temp2 = locations->GetTemp(2).AsRegister<Register>(); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4339 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 4340 | codegen->AddSlowPath(slow_path); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4341 | |
| 4342 | GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4343 | |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4344 | // Get the field referred by the VarHandle. The returned register contains the object reference |
| 4345 | // or the declaring class. The field offset will be placed in 'offset'. For static fields, the |
| 4346 | // declaring class will be placed in 'temp' register. |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4347 | Register reference = GenerateVarHandleFieldReference(invoke, codegen, temp, offset); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4348 | |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4349 | uint32_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4350 | // For generating the compare and exchange, we need 2 temporaries. In case of a static field, the |
| 4351 | // first temporary contains the declaring class so we need another temporary. In case of an |
| 4352 | // instance field, the object comes in a separate register so it's safe to use the first temp. |
| 4353 | temp = (expected_coordinates_count == 1u) ? temp : locations->GetTemp(3).AsRegister<Register>(); |
| 4354 | DCHECK_NE(temp, reference); |
| 4355 | |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4356 | // We are using `lock cmpxchg` in all cases because there is no CAS equivalent that has weak |
| 4357 | // failure semantics. `lock cmpxchg` has full barrier semantics, and we don't need scheduling |
| 4358 | // barriers at this time. |
| 4359 | |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4360 | mirror::VarHandle::AccessModeTemplate access_mode_template = |
| 4361 | mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic()); |
| 4362 | bool is_cmpxchg = |
| 4363 | access_mode_template == mirror::VarHandle::AccessModeTemplate::kCompareAndExchange; |
| 4364 | |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4365 | if (type == DataType::Type::kReference) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4366 | GenReferenceCAS( |
| 4367 | invoke, codegen, expected_value, new_value, reference, offset, temp, temp2, is_cmpxchg); |
Andra Danciu | 8d8380a | 2020-09-11 09:24:01 +0000 | [diff] [blame] | 4368 | } else { |
| 4369 | Location out = locations->Out(); |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4370 | GenPrimitiveCAS( |
| 4371 | type, codegen, expected_value, new_value, reference, offset, out, temp, is_cmpxchg); |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4372 | } |
| 4373 | |
Andra Danciu | 5e13d45 | 2020-09-08 14:35:09 +0000 | [diff] [blame] | 4374 | __ Bind(slow_path->GetExitLabel()); |
| 4375 | } |
Andra Danciu | e3e187f | 2020-07-30 12:19:31 +0000 | [diff] [blame] | 4376 | |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4377 | void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4378 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4379 | } |
| 4380 | |
| 4381 | void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4382 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4383 | } |
| 4384 | |
| 4385 | void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSet(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4386 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4387 | } |
| 4388 | |
| 4389 | void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSet(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4390 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4391 | } |
| 4392 | |
| 4393 | void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetPlain(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4394 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4395 | } |
| 4396 | |
| 4397 | void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetPlain(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4398 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4399 | } |
| 4400 | |
| 4401 | void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetAcquire(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4402 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4403 | } |
| 4404 | |
| 4405 | void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetAcquire(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4406 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4407 | } |
| 4408 | |
| 4409 | void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetRelease(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4410 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4411 | } |
| 4412 | |
| 4413 | void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetRelease(HInvoke* invoke) { |
Andra Danciu | 370948e | 2020-09-23 08:07:25 +0000 | [diff] [blame] | 4414 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
| 4415 | } |
| 4416 | |
| 4417 | void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndExchange(HInvoke* invoke) { |
| 4418 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
| 4419 | } |
| 4420 | |
| 4421 | void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndExchange(HInvoke* invoke) { |
| 4422 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
| 4423 | } |
| 4424 | |
| 4425 | void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndExchangeAcquire(HInvoke* invoke) { |
| 4426 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
| 4427 | } |
| 4428 | |
| 4429 | void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndExchangeAcquire(HInvoke* invoke) { |
| 4430 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
| 4431 | } |
| 4432 | |
| 4433 | void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndExchangeRelease(HInvoke* invoke) { |
| 4434 | CreateVarHandleCompareAndSetOrExchangeLocations(invoke); |
| 4435 | } |
| 4436 | |
| 4437 | void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndExchangeRelease(HInvoke* invoke) { |
| 4438 | GenerateVarHandleCompareAndSetOrExchange(invoke, codegen_); |
Andra Danciu | 52d2c0c | 2020-09-15 14:27:21 +0000 | [diff] [blame] | 4439 | } |
| 4440 | |
Andra Danciu | 49cde24 | 2020-09-22 08:38:50 +0000 | [diff] [blame] | 4441 | static void CreateVarHandleGetAndAddLocations(HInvoke* invoke) { |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4442 | // The only read barrier implementation supporting the |
| 4443 | // VarHandleGet intrinsic is the Baker-style read barriers. |
| 4444 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 4445 | return; |
| 4446 | } |
| 4447 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 4448 | if (!HasVarHandleIntrinsicImplementation(invoke)) { |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4449 | return; |
| 4450 | } |
| 4451 | |
| 4452 | // The last argument should be the value we intend to set. |
| 4453 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
| 4454 | DataType::Type value_type = GetDataTypeFromShorty(invoke, value_index); |
| 4455 | if (DataType::Is64BitType(value_type)) { |
| 4456 | // We avoid the case of an Int64/Float64 value because we would need to place it in a register |
| 4457 | // pair. If the slow path is taken, the ParallelMove might fail to move the pair according to |
| 4458 | // the X86DexCallingConvention in case of an overlap (e.g., move the 64 bit value from |
| 4459 | // <EAX, EBX> to <EBX, ECX>). (Bug: b/168687887) |
| 4460 | return; |
| 4461 | } |
| 4462 | |
| 4463 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 4464 | LocationSummary* locations = new (allocator) LocationSummary( |
| 4465 | invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
| 4466 | locations->AddTemp(Location::RequiresRegister()); |
| 4467 | locations->AddTemp(Location::RequiresRegister()); |
| 4468 | locations->SetInAt(0, Location::RequiresRegister()); |
| 4469 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4470 | if (expected_coordinates_count == 1u) { |
| 4471 | // For instance fields, this is the source object |
| 4472 | locations->SetInAt(1, Location::RequiresRegister()); |
| 4473 | } else { |
| 4474 | // For static fields, we need another temp because one will be busy with the declaring class. |
| 4475 | locations->AddTemp(Location::RequiresRegister()); |
| 4476 | } |
| 4477 | |
| 4478 | if (DataType::IsFloatingPointType(value_type)) { |
| 4479 | locations->AddTemp(Location::RequiresFpuRegister()); |
| 4480 | locations->AddTemp(Location::RegisterLocation(EAX)); |
| 4481 | locations->SetInAt(value_index, Location::RequiresFpuRegister()); |
| 4482 | locations->SetOut(Location::RequiresFpuRegister()); |
| 4483 | } else { |
| 4484 | // xadd updates the register argument with the old value. ByteRegister required for xaddb. |
| 4485 | locations->SetInAt(value_index, Location::RegisterLocation(EAX)); |
| 4486 | locations->SetOut(Location::RegisterLocation(EAX)); |
| 4487 | } |
| 4488 | } |
| 4489 | |
Andra Danciu | 49cde24 | 2020-09-22 08:38:50 +0000 | [diff] [blame] | 4490 | static void GenerateVarHandleGetAndAdd(HInvoke* invoke, CodeGeneratorX86* codegen) { |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4491 | // The only read barrier implementation supporting the |
| 4492 | // VarHandleGet intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 4493 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4494 | |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4495 | X86Assembler* assembler = codegen->GetAssembler(); |
| 4496 | LocationSummary* locations = invoke->GetLocations(); |
| 4497 | uint32_t number_of_arguments = invoke->GetNumberOfArguments(); |
| 4498 | uint32_t value_index = number_of_arguments - 1; |
| 4499 | DataType::Type type = GetDataTypeFromShorty(invoke, value_index); |
| 4500 | DCHECK_EQ(type, invoke->GetType()); |
| 4501 | Location value_loc = locations->InAt(value_index); |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4502 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 4503 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 4504 | codegen->AddSlowPath(slow_path); |
| 4505 | |
| 4506 | GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler); |
| 4507 | |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4508 | Register offset = locations->GetTemp(1).AsRegister<Register>(); |
| 4509 | // Get the field referred by the VarHandle. The returned register contains the object reference |
| 4510 | // or the declaring class. The field offset will be placed in 'offset'. For static fields, the |
| 4511 | // declaring class will be placed in 'temp' register. |
| 4512 | Register reference = GenerateVarHandleFieldReference(invoke, codegen, temp, offset); |
| 4513 | |
| 4514 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4515 | temp = (expected_coordinates_count == 1u) ? temp : locations->GetTemp(2).AsRegister<Register>(); |
| 4516 | DCHECK_NE(temp, reference); |
| 4517 | Address field_addr(reference, offset, TIMES_1, 0); |
| 4518 | |
| 4519 | switch (type) { |
| 4520 | case DataType::Type::kInt8: |
| 4521 | __ LockXaddb(field_addr, value_loc.AsRegister<ByteRegister>()); |
| 4522 | __ movsxb(locations->Out().AsRegister<Register>(), |
| 4523 | locations->Out().AsRegister<ByteRegister>()); |
| 4524 | break; |
| 4525 | case DataType::Type::kInt16: |
| 4526 | __ LockXaddw(field_addr, value_loc.AsRegister<Register>()); |
| 4527 | __ movsxw(locations->Out().AsRegister<Register>(), locations->Out().AsRegister<Register>()); |
| 4528 | break; |
| 4529 | case DataType::Type::kUint16: |
| 4530 | __ LockXaddw(field_addr, value_loc.AsRegister<Register>()); |
| 4531 | __ movzxw(locations->Out().AsRegister<Register>(), locations->Out().AsRegister<Register>()); |
| 4532 | break; |
| 4533 | case DataType::Type::kInt32: |
| 4534 | __ LockXaddl(field_addr, value_loc.AsRegister<Register>()); |
| 4535 | break; |
| 4536 | case DataType::Type::kFloat32: { |
| 4537 | Location temp_float = |
| 4538 | (expected_coordinates_count == 1u) ? locations->GetTemp(2) : locations->GetTemp(3); |
| 4539 | DCHECK(temp_float.IsFpuRegister()); |
| 4540 | Location eax = Location::RegisterLocation(EAX); |
| 4541 | NearLabel try_again; |
| 4542 | __ Bind(&try_again); |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 4543 | __ movss(temp_float.AsFpuRegister<XmmRegister>(), field_addr); |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4544 | __ movd(EAX, temp_float.AsFpuRegister<XmmRegister>()); |
| 4545 | __ addss(temp_float.AsFpuRegister<XmmRegister>(), |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 4546 | value_loc.AsFpuRegister<XmmRegister>()); |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4547 | GenPrimitiveLockedCmpxchg(type, |
| 4548 | codegen, |
Orion Hodson | d6ea38b | 2020-09-18 13:57:23 +0100 | [diff] [blame] | 4549 | /* expected_value= */ eax, |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4550 | /* new_value= */ temp_float, |
| 4551 | reference, |
| 4552 | offset, |
| 4553 | temp); |
| 4554 | __ j(kNotZero, &try_again); |
| 4555 | |
| 4556 | // The old value is present in EAX. |
| 4557 | codegen->Move32(locations->Out(), eax); |
| 4558 | break; |
| 4559 | } |
| 4560 | default: |
| 4561 | UNREACHABLE(); |
| 4562 | } |
| 4563 | |
| 4564 | __ Bind(slow_path->GetExitLabel()); |
| 4565 | } |
| 4566 | |
Andra Danciu | 49cde24 | 2020-09-22 08:38:50 +0000 | [diff] [blame] | 4567 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndAdd(HInvoke* invoke) { |
| 4568 | CreateVarHandleGetAndAddLocations(invoke); |
| 4569 | } |
| 4570 | |
| 4571 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndAdd(HInvoke* invoke) { |
| 4572 | GenerateVarHandleGetAndAdd(invoke, codegen_); |
| 4573 | } |
| 4574 | |
| 4575 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndAddAcquire(HInvoke* invoke) { |
| 4576 | CreateVarHandleGetAndAddLocations(invoke); |
| 4577 | } |
| 4578 | |
| 4579 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndAddAcquire(HInvoke* invoke) { |
| 4580 | GenerateVarHandleGetAndAdd(invoke, codegen_); |
| 4581 | } |
| 4582 | |
| 4583 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndAddRelease(HInvoke* invoke) { |
| 4584 | CreateVarHandleGetAndAddLocations(invoke); |
| 4585 | } |
| 4586 | |
| 4587 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndAddRelease(HInvoke* invoke) { |
| 4588 | GenerateVarHandleGetAndAdd(invoke, codegen_); |
| 4589 | } |
| 4590 | |
Andra Danciu | 6edcc08 | 2020-09-18 09:21:26 +0000 | [diff] [blame] | 4591 | static void CreateVarHandleGetAndBitwiseOpLocations(HInvoke* invoke) { |
| 4592 | // The only read barrier implementation supporting the |
| 4593 | // VarHandleGet intrinsic is the Baker-style read barriers. |
| 4594 | if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { |
| 4595 | return; |
| 4596 | } |
| 4597 | |
Ulyana Trafimovich | 98f01d1 | 2021-07-28 14:33:34 +0000 | [diff] [blame] | 4598 | if (!HasVarHandleIntrinsicImplementation(invoke)) { |
Andra Danciu | 6edcc08 | 2020-09-18 09:21:26 +0000 | [diff] [blame] | 4599 | return; |
| 4600 | } |
| 4601 | |
| 4602 | // The last argument should be the value we intend to set. |
| 4603 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
| 4604 | if (DataType::Is64BitType(GetDataTypeFromShorty(invoke, value_index))) { |
| 4605 | // We avoid the case of an Int64 value because we would need to place it in a register pair. |
| 4606 | // If the slow path is taken, the ParallelMove might fail to move the pair according to the |
| 4607 | // X86DexCallingConvention in case of an overlap (e.g., move the 64 bit value from |
| 4608 | // <EAX, EBX> to <EBX, ECX>). (Bug: b/168687887) |
| 4609 | return; |
| 4610 | } |
| 4611 | |
| 4612 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 4613 | LocationSummary* locations = new (allocator) LocationSummary( |
| 4614 | invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); |
| 4615 | // We need a byte register temp to store the result of the bitwise operation |
| 4616 | locations->AddTemp(Location::RegisterLocation(EBX)); |
| 4617 | locations->AddTemp(Location::RequiresRegister()); |
| 4618 | locations->SetInAt(0, Location::RequiresRegister()); |
| 4619 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4620 | if (expected_coordinates_count == 1u) { |
| 4621 | // For instance fields, this is the source object |
| 4622 | locations->SetInAt(1, Location::RequiresRegister()); |
| 4623 | } else { |
| 4624 | // For static fields, we need another temp because one will be busy with the declaring class. |
| 4625 | locations->AddTemp(Location::RequiresRegister()); |
| 4626 | } |
| 4627 | |
| 4628 | locations->SetInAt(value_index, Location::RegisterOrConstant(invoke->InputAt(value_index))); |
| 4629 | locations->SetOut(Location::RegisterLocation(EAX)); |
| 4630 | } |
| 4631 | |
| 4632 | static void GenerateBitwiseOp(HInvoke* invoke, |
| 4633 | CodeGeneratorX86* codegen, |
| 4634 | Register left, |
| 4635 | Register right) { |
| 4636 | X86Assembler* assembler = codegen->GetAssembler(); |
| 4637 | |
| 4638 | switch (invoke->GetIntrinsic()) { |
| 4639 | case Intrinsics::kVarHandleGetAndBitwiseOr: |
| 4640 | case Intrinsics::kVarHandleGetAndBitwiseOrAcquire: |
| 4641 | case Intrinsics::kVarHandleGetAndBitwiseOrRelease: |
| 4642 | __ orl(left, right); |
| 4643 | break; |
| 4644 | case Intrinsics::kVarHandleGetAndBitwiseXor: |
| 4645 | case Intrinsics::kVarHandleGetAndBitwiseXorAcquire: |
| 4646 | case Intrinsics::kVarHandleGetAndBitwiseXorRelease: |
| 4647 | __ xorl(left, right); |
| 4648 | break; |
| 4649 | case Intrinsics::kVarHandleGetAndBitwiseAnd: |
| 4650 | case Intrinsics::kVarHandleGetAndBitwiseAndAcquire: |
| 4651 | case Intrinsics::kVarHandleGetAndBitwiseAndRelease: |
| 4652 | __ andl(left, right); |
| 4653 | break; |
| 4654 | default: |
| 4655 | UNREACHABLE(); |
| 4656 | } |
| 4657 | } |
| 4658 | |
| 4659 | static void GenerateVarHandleGetAndBitwiseOp(HInvoke* invoke, CodeGeneratorX86* codegen) { |
| 4660 | // The only read barrier implementation supporting the |
| 4661 | // VarHandleGet intrinsic is the Baker-style read barriers. |
Santiago Aboy Solanes | 872ec72 | 2022-02-18 14:10:25 +0000 | [diff] [blame] | 4662 | DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier); |
Andra Danciu | 6edcc08 | 2020-09-18 09:21:26 +0000 | [diff] [blame] | 4663 | |
| 4664 | X86Assembler* assembler = codegen->GetAssembler(); |
| 4665 | LocationSummary* locations = invoke->GetLocations(); |
| 4666 | uint32_t value_index = invoke->GetNumberOfArguments() - 1; |
| 4667 | DataType::Type type = GetDataTypeFromShorty(invoke, value_index); |
| 4668 | DCHECK_EQ(type, invoke->GetType()); |
Andra Danciu | 6edcc08 | 2020-09-18 09:21:26 +0000 | [diff] [blame] | 4669 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 4670 | SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); |
| 4671 | codegen->AddSlowPath(slow_path); |
| 4672 | |
| 4673 | GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler); |
| 4674 | |
Andra Danciu | 6edcc08 | 2020-09-18 09:21:26 +0000 | [diff] [blame] | 4675 | Register offset = locations->GetTemp(1).AsRegister<Register>(); |
| 4676 | size_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke); |
| 4677 | // For static field, we need another temporary because the first one contains the declaring class |
| 4678 | Register reference = |
| 4679 | (expected_coordinates_count == 1u) ? temp : locations->GetTemp(2).AsRegister<Register>(); |
| 4680 | // Get the field referred by the VarHandle. The returned register contains the object reference |
| 4681 | // or the declaring class. The field offset will be placed in 'offset'. For static fields, the |
| 4682 | // declaring class will be placed in 'reference' register. |
| 4683 | reference = GenerateVarHandleFieldReference(invoke, codegen, reference, offset); |
| 4684 | DCHECK_NE(temp, reference); |
| 4685 | Address field_addr(reference, offset, TIMES_1, 0); |
| 4686 | |
| 4687 | Register out = locations->Out().AsRegister<Register>(); |
| 4688 | DCHECK_EQ(out, EAX); |
| 4689 | |
| 4690 | if (invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndBitwiseOrRelease || |
| 4691 | invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndBitwiseXorRelease || |
| 4692 | invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndBitwiseAndRelease) { |
| 4693 | codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore); |
| 4694 | } |
| 4695 | |
| 4696 | NearLabel try_again; |
| 4697 | __ Bind(&try_again); |
| 4698 | // Place the expected value in EAX for cmpxchg |
Andra Danciu | d0f71f2 | 2020-09-17 09:00:15 +0000 | [diff] [blame] | 4699 | codegen->LoadFromMemoryNoBarrier(type, locations->Out(), field_addr); |
Andra Danciu | 6edcc08 | 2020-09-18 09:21:26 +0000 | [diff] [blame] | 4700 | codegen->Move32(locations->GetTemp(0), locations->InAt(value_index)); |
| 4701 | GenerateBitwiseOp(invoke, codegen, temp, out); |
| 4702 | GenPrimitiveLockedCmpxchg(type, |
| 4703 | codegen, |
| 4704 | /* expected_value= */ locations->Out(), |
| 4705 | /* new_value= */ locations->GetTemp(0), |
| 4706 | reference, |
| 4707 | offset); |
| 4708 | // If the cmpxchg failed, another thread changed the value so try again. |
| 4709 | __ j(kNotZero, &try_again); |
| 4710 | |
| 4711 | // The old value is present in EAX. |
| 4712 | |
| 4713 | if (invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndBitwiseOrAcquire || |
| 4714 | invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndBitwiseXorAcquire || |
| 4715 | invoke->GetIntrinsic() == Intrinsics::kVarHandleGetAndBitwiseAndAcquire) { |
| 4716 | codegen->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); |
| 4717 | } |
| 4718 | |
| 4719 | __ Bind(slow_path->GetExitLabel()); |
| 4720 | } |
| 4721 | |
| 4722 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseOr(HInvoke* invoke) { |
| 4723 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4724 | } |
| 4725 | |
| 4726 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseOr(HInvoke* invoke) { |
| 4727 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4728 | } |
| 4729 | |
| 4730 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseOrAcquire(HInvoke* invoke) { |
| 4731 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4732 | } |
| 4733 | |
| 4734 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseOrAcquire(HInvoke* invoke) { |
| 4735 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4736 | } |
| 4737 | |
| 4738 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseOrRelease(HInvoke* invoke) { |
| 4739 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4740 | } |
| 4741 | |
| 4742 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseOrRelease(HInvoke* invoke) { |
| 4743 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4744 | } |
| 4745 | |
| 4746 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseXor(HInvoke* invoke) { |
| 4747 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4748 | } |
| 4749 | |
| 4750 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseXor(HInvoke* invoke) { |
| 4751 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4752 | } |
| 4753 | |
| 4754 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseXorAcquire(HInvoke* invoke) { |
| 4755 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4756 | } |
| 4757 | |
| 4758 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseXorAcquire(HInvoke* invoke) { |
| 4759 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4760 | } |
| 4761 | |
| 4762 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseXorRelease(HInvoke* invoke) { |
| 4763 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4764 | } |
| 4765 | |
| 4766 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseXorRelease(HInvoke* invoke) { |
| 4767 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4768 | } |
| 4769 | |
| 4770 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseAnd(HInvoke* invoke) { |
| 4771 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4772 | } |
| 4773 | |
| 4774 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseAnd(HInvoke* invoke) { |
| 4775 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4776 | } |
| 4777 | |
| 4778 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseAndAcquire(HInvoke* invoke) { |
| 4779 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4780 | } |
| 4781 | |
| 4782 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseAndAcquire(HInvoke* invoke) { |
| 4783 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4784 | } |
| 4785 | |
| 4786 | void IntrinsicLocationsBuilderX86::VisitVarHandleGetAndBitwiseAndRelease(HInvoke* invoke) { |
| 4787 | CreateVarHandleGetAndBitwiseOpLocations(invoke); |
| 4788 | } |
| 4789 | |
| 4790 | void IntrinsicCodeGeneratorX86::VisitVarHandleGetAndBitwiseAndRelease(HInvoke* invoke) { |
| 4791 | GenerateVarHandleGetAndBitwiseOp(invoke, codegen_); |
| 4792 | } |
Andra Danciu | eb2c9dd | 2020-09-14 13:22:40 +0000 | [diff] [blame] | 4793 | |
Shalini Salomi Bodapati | 6545ee3 | 2021-11-02 20:01:06 +0530 | [diff] [blame] | 4794 | static void GenerateMathFma(HInvoke* invoke, CodeGeneratorX86* codegen) { |
| 4795 | DCHECK(DataType::IsFloatingPointType(invoke->GetType())); |
| 4796 | LocationSummary* locations = invoke->GetLocations(); |
| 4797 | DCHECK(locations->InAt(0).Equals(locations->Out())); |
| 4798 | X86Assembler* assembler = codegen->GetAssembler(); |
| 4799 | XmmRegister left = locations->InAt(0).AsFpuRegister<XmmRegister>(); |
| 4800 | XmmRegister right = locations->InAt(1).AsFpuRegister<XmmRegister>(); |
| 4801 | XmmRegister accumulator = locations->InAt(2).AsFpuRegister<XmmRegister>(); |
| 4802 | if (invoke->GetType() == DataType::Type::kFloat32) { |
| 4803 | __ vfmadd213ss(left, right, accumulator); |
| 4804 | } else { |
| 4805 | DCHECK_EQ(invoke->GetType(), DataType::Type::kFloat64); |
| 4806 | __ vfmadd213sd(left, right, accumulator); |
| 4807 | } |
| 4808 | } |
| 4809 | |
| 4810 | void IntrinsicCodeGeneratorX86::VisitMathFmaDouble(HInvoke* invoke) { |
| 4811 | DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2()); |
| 4812 | GenerateMathFma(invoke, codegen_); |
| 4813 | } |
| 4814 | |
| 4815 | void IntrinsicLocationsBuilderX86::VisitMathFmaDouble(HInvoke* invoke) { |
| 4816 | if (codegen_->GetInstructionSetFeatures().HasAVX2()) { |
| 4817 | CreateFPFPFPToFPCallLocations(allocator_, invoke); |
| 4818 | } |
| 4819 | } |
| 4820 | |
| 4821 | void IntrinsicCodeGeneratorX86::VisitMathFmaFloat(HInvoke* invoke) { |
| 4822 | DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2()); |
| 4823 | GenerateMathFma(invoke, codegen_); |
| 4824 | } |
| 4825 | |
| 4826 | void IntrinsicLocationsBuilderX86::VisitMathFmaFloat(HInvoke* invoke) { |
| 4827 | if (codegen_->GetInstructionSetFeatures().HasAVX2()) { |
| 4828 | CreateFPFPFPToFPCallLocations(allocator_, invoke); |
| 4829 | } |
| 4830 | } |
| 4831 | |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 4832 | UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble) |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 4833 | UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite) |
| 4834 | UNIMPLEMENTED_INTRINSIC(X86, DoubleIsInfinite) |
| 4835 | UNIMPLEMENTED_INTRINSIC(X86, IntegerHighestOneBit) |
| 4836 | UNIMPLEMENTED_INTRINSIC(X86, LongHighestOneBit) |
Artem Serov | a3bd4ec | 2020-08-27 16:26:17 +0100 | [diff] [blame] | 4837 | UNIMPLEMENTED_INTRINSIC(X86, LongDivideUnsigned) |
xueliang.zhong | cb58b07 | 2017-10-13 12:06:56 +0100 | [diff] [blame] | 4838 | UNIMPLEMENTED_INTRINSIC(X86, CRC32Update) |
Evgeny Astigeevich | 15c5b97 | 2018-11-20 13:41:40 +0000 | [diff] [blame] | 4839 | UNIMPLEMENTED_INTRINSIC(X86, CRC32UpdateBytes) |
Evgeny Astigeevich | 776a7c2 | 2018-12-17 11:40:34 +0000 | [diff] [blame] | 4840 | UNIMPLEMENTED_INTRINSIC(X86, CRC32UpdateByteBuffer) |
xueliang.zhong | 9ce340f | 2019-01-22 17:46:09 +0000 | [diff] [blame] | 4841 | UNIMPLEMENTED_INTRINSIC(X86, FP16ToFloat) |
Vladimir Marko | 7f958e3 | 2019-10-24 09:03:58 +0000 | [diff] [blame] | 4842 | UNIMPLEMENTED_INTRINSIC(X86, FP16ToHalf) |
Usama Arif | b9f02c2 | 2019-10-25 17:37:33 +0100 | [diff] [blame] | 4843 | UNIMPLEMENTED_INTRINSIC(X86, FP16Floor) |
Usama Arif | 665aac4 | 2019-10-29 11:13:18 +0000 | [diff] [blame] | 4844 | UNIMPLEMENTED_INTRINSIC(X86, FP16Ceil) |
Usama Arif | 681692b | 2019-10-30 16:23:26 +0000 | [diff] [blame] | 4845 | UNIMPLEMENTED_INTRINSIC(X86, FP16Rint) |
Usama Arif | 457e9fa | 2019-11-11 15:29:59 +0000 | [diff] [blame] | 4846 | UNIMPLEMENTED_INTRINSIC(X86, FP16Greater) |
| 4847 | UNIMPLEMENTED_INTRINSIC(X86, FP16GreaterEquals) |
| 4848 | UNIMPLEMENTED_INTRINSIC(X86, FP16Less) |
| 4849 | UNIMPLEMENTED_INTRINSIC(X86, FP16LessEquals) |
Usama Arif | ecbdc07 | 2019-11-13 13:32:54 +0000 | [diff] [blame] | 4850 | UNIMPLEMENTED_INTRINSIC(X86, FP16Compare) |
Usama Arif | 39e2979 | 2019-11-15 10:53:29 +0000 | [diff] [blame] | 4851 | UNIMPLEMENTED_INTRINSIC(X86, FP16Min) |
| 4852 | UNIMPLEMENTED_INTRINSIC(X86, FP16Max) |
Nikita Iashchenko | 745da80 | 2021-01-20 21:52:54 +0000 | [diff] [blame] | 4853 | UNIMPLEMENTED_INTRINSIC(X86, MathMultiplyHigh) |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 4854 | |
Aart Bik | ff7d89c | 2016-11-07 08:49:28 -0800 | [diff] [blame] | 4855 | UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOf); |
| 4856 | UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOfAfter); |
Aart Bik | 71bf7b4 | 2016-11-16 10:17:46 -0800 | [diff] [blame] | 4857 | UNIMPLEMENTED_INTRINSIC(X86, StringBufferAppend); |
| 4858 | UNIMPLEMENTED_INTRINSIC(X86, StringBufferLength); |
| 4859 | UNIMPLEMENTED_INTRINSIC(X86, StringBufferToString); |
Vladimir Marko | d456117 | 2017-10-30 17:48:25 +0000 | [diff] [blame] | 4860 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendObject); |
| 4861 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendString); |
| 4862 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendCharSequence); |
| 4863 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendCharArray); |
| 4864 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendBoolean); |
| 4865 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendChar); |
| 4866 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendInt); |
| 4867 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendLong); |
| 4868 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendFloat); |
| 4869 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppendDouble); |
Aart Bik | 71bf7b4 | 2016-11-16 10:17:46 -0800 | [diff] [blame] | 4870 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderLength); |
| 4871 | UNIMPLEMENTED_INTRINSIC(X86, StringBuilderToString); |
Aart Bik | ff7d89c | 2016-11-07 08:49:28 -0800 | [diff] [blame] | 4872 | |
Aart Bik | 0e54c01 | 2016-03-04 12:08:31 -0800 | [diff] [blame] | 4873 | // 1.8. |
Shalini Salomi Bodapati | 6545ee3 | 2021-11-02 20:01:06 +0530 | [diff] [blame] | 4874 | |
Aart Bik | 0e54c01 | 2016-03-04 12:08:31 -0800 | [diff] [blame] | 4875 | UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt) |
| 4876 | UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddLong) |
| 4877 | UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt) |
| 4878 | UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong) |
| 4879 | UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject) |
Aart Bik | 0e54c01 | 2016-03-04 12:08:31 -0800 | [diff] [blame] | 4880 | |
Andra Danciu | a0130e8 | 2020-07-23 12:34:56 +0000 | [diff] [blame] | 4881 | UNIMPLEMENTED_INTRINSIC(X86, MethodHandleInvokeExact) |
| 4882 | UNIMPLEMENTED_INTRINSIC(X86, MethodHandleInvoke) |
Andra Danciu | a0130e8 | 2020-07-23 12:34:56 +0000 | [diff] [blame] | 4883 | |
Sorin Basca | 2f01e8e | 2021-06-18 06:44:07 +0000 | [diff] [blame] | 4884 | // OpenJDK 11 |
| 4885 | UNIMPLEMENTED_INTRINSIC(X86, JdkUnsafeGetAndAddInt) |
| 4886 | UNIMPLEMENTED_INTRINSIC(X86, JdkUnsafeGetAndAddLong) |
| 4887 | UNIMPLEMENTED_INTRINSIC(X86, JdkUnsafeGetAndSetInt) |
| 4888 | UNIMPLEMENTED_INTRINSIC(X86, JdkUnsafeGetAndSetLong) |
| 4889 | UNIMPLEMENTED_INTRINSIC(X86, JdkUnsafeGetAndSetObject) |
| 4890 | |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 4891 | UNREACHABLE_INTRINSICS(X86) |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 4892 | |
| 4893 | #undef __ |
| 4894 | |
Mark Mendell | 09ed1a3 | 2015-03-25 08:30:06 -0400 | [diff] [blame] | 4895 | } // namespace x86 |
| 4896 | } // namespace art |