Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics_arm.h" |
| 18 | |
| 19 | #include "arch/arm/instruction_set_features_arm.h" |
| 20 | #include "code_generator_arm.h" |
| 21 | #include "entrypoints/quick/quick_entrypoints.h" |
| 22 | #include "intrinsics.h" |
| 23 | #include "mirror/array-inl.h" |
| 24 | #include "mirror/art_method.h" |
| 25 | #include "mirror/string.h" |
| 26 | #include "thread.h" |
| 27 | #include "utils/arm/assembler_arm.h" |
| 28 | |
| 29 | namespace art { |
| 30 | |
| 31 | namespace arm { |
| 32 | |
| 33 | ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() { |
| 34 | return codegen_->GetAssembler(); |
| 35 | } |
| 36 | |
| 37 | ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() { |
| 38 | return codegen_->GetGraph()->GetArena(); |
| 39 | } |
| 40 | |
| 41 | #define __ codegen->GetAssembler()-> |
| 42 | |
| 43 | static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) { |
| 44 | if (!trg.IsValid()) { |
| 45 | DCHECK(type == Primitive::kPrimVoid); |
| 46 | return; |
| 47 | } |
| 48 | |
| 49 | DCHECK_NE(type, Primitive::kPrimVoid); |
| 50 | |
| 51 | if (Primitive::IsIntegralType(type)) { |
| 52 | if (type == Primitive::kPrimLong) { |
| 53 | Register trg_reg_lo = trg.AsRegisterPairLow<Register>(); |
| 54 | Register trg_reg_hi = trg.AsRegisterPairHigh<Register>(); |
| 55 | Register res_reg_lo = R0; |
| 56 | Register res_reg_hi = R1; |
| 57 | if (trg_reg_lo != res_reg_hi) { |
| 58 | if (trg_reg_lo != res_reg_lo) { |
| 59 | __ mov(trg_reg_lo, ShifterOperand(res_reg_lo)); |
| 60 | __ mov(trg_reg_hi, ShifterOperand(res_reg_hi)); |
| 61 | } else { |
| 62 | DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi); |
| 63 | } |
| 64 | } else { |
| 65 | __ mov(trg_reg_hi, ShifterOperand(res_reg_hi)); |
| 66 | __ mov(trg_reg_lo, ShifterOperand(res_reg_lo)); |
| 67 | } |
| 68 | } else { |
| 69 | Register trg_reg = trg.AsRegister<Register>(); |
| 70 | Register res_reg = R0; |
| 71 | if (trg_reg != res_reg) { |
| 72 | __ mov(trg_reg, ShifterOperand(res_reg)); |
| 73 | } |
| 74 | } |
| 75 | } else { |
| 76 | UNIMPLEMENTED(FATAL) << "Floating-point return."; |
| 77 | } |
| 78 | } |
| 79 | |
| 80 | static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM* codegen) { |
| 81 | if (invoke->InputCount() == 0) { |
| 82 | return; |
| 83 | } |
| 84 | |
| 85 | LocationSummary* locations = invoke->GetLocations(); |
| 86 | InvokeDexCallingConventionVisitor calling_convention_visitor; |
| 87 | |
| 88 | // We're moving potentially two or more locations to locations that could overlap, so we need |
| 89 | // a parallel move resolver. |
| 90 | HParallelMove parallel_move(arena); |
| 91 | |
| 92 | for (size_t i = 0; i < invoke->InputCount(); i++) { |
| 93 | HInstruction* input = invoke->InputAt(i); |
| 94 | Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType()); |
| 95 | Location actual_loc = locations->InAt(i); |
| 96 | |
| 97 | parallel_move.AddMove(actual_loc, cc_loc, nullptr); |
| 98 | } |
| 99 | |
| 100 | codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); |
| 101 | } |
| 102 | |
| 103 | // Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified |
| 104 | // call. This will copy the arguments into the positions for a regular call. |
| 105 | // |
| 106 | // Note: The actual parameters are required to be in the locations given by the invoke's location |
| 107 | // summary. If an intrinsic modifies those locations before a slowpath call, they must be |
| 108 | // restored! |
| 109 | class IntrinsicSlowPathARM : public SlowPathCodeARM { |
| 110 | public: |
| 111 | explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { } |
| 112 | |
| 113 | void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { |
| 114 | CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in); |
| 115 | __ Bind(GetEntryLabel()); |
| 116 | |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 117 | SaveLiveRegisters(codegen, invoke_->GetLocations()); |
Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 118 | |
| 119 | MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen); |
| 120 | |
| 121 | if (invoke_->IsInvokeStaticOrDirect()) { |
| 122 | codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 123 | RecordPcInfo(codegen, invoke_, invoke_->GetDexPc()); |
Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 124 | } else { |
| 125 | UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; |
| 126 | UNREACHABLE(); |
| 127 | } |
| 128 | |
| 129 | // Copy the result back to the expected output. |
| 130 | Location out = invoke_->GetLocations()->Out(); |
| 131 | if (out.IsValid()) { |
| 132 | DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. |
| 133 | DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); |
| 134 | MoveFromReturnRegister(out, invoke_->GetType(), codegen); |
| 135 | } |
| 136 | |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 137 | RestoreLiveRegisters(codegen, invoke_->GetLocations()); |
Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 138 | __ b(GetExitLabel()); |
| 139 | } |
| 140 | |
| 141 | private: |
| 142 | // The instruction where this slow path is happening. |
| 143 | HInvoke* const invoke_; |
| 144 | |
| 145 | DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM); |
| 146 | }; |
| 147 | |
| 148 | #undef __ |
| 149 | |
| 150 | bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) { |
| 151 | Dispatch(invoke); |
| 152 | LocationSummary* res = invoke->GetLocations(); |
| 153 | return res != nullptr && res->Intrinsified(); |
| 154 | } |
| 155 | |
| 156 | #define __ assembler-> |
| 157 | |
| 158 | static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 159 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 160 | LocationSummary::kNoCall, |
| 161 | kIntrinsified); |
| 162 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 163 | locations->SetOut(Location::RequiresRegister()); |
| 164 | } |
| 165 | |
| 166 | static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 167 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 168 | LocationSummary::kNoCall, |
| 169 | kIntrinsified); |
| 170 | locations->SetInAt(0, Location::RequiresRegister()); |
| 171 | locations->SetOut(Location::RequiresFpuRegister()); |
| 172 | } |
| 173 | |
| 174 | static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { |
| 175 | Location input = locations->InAt(0); |
| 176 | Location output = locations->Out(); |
| 177 | if (is64bit) { |
| 178 | __ vmovrrd(output.AsRegisterPairLow<Register>(), |
| 179 | output.AsRegisterPairHigh<Register>(), |
| 180 | FromLowSToD(input.AsFpuRegisterPairLow<SRegister>())); |
| 181 | } else { |
| 182 | __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>()); |
| 183 | } |
| 184 | } |
| 185 | |
| 186 | static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { |
| 187 | Location input = locations->InAt(0); |
| 188 | Location output = locations->Out(); |
| 189 | if (is64bit) { |
| 190 | __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()), |
| 191 | input.AsRegisterPairLow<Register>(), |
| 192 | input.AsRegisterPairHigh<Register>()); |
| 193 | } else { |
| 194 | __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>()); |
| 195 | } |
| 196 | } |
| 197 | |
| 198 | void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 199 | CreateFPToIntLocations(arena_, invoke); |
| 200 | } |
| 201 | void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 202 | CreateIntToFPLocations(arena_, invoke); |
| 203 | } |
| 204 | |
| 205 | void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 206 | MoveFPToInt(invoke->GetLocations(), true, GetAssembler()); |
| 207 | } |
| 208 | void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 209 | MoveIntToFP(invoke->GetLocations(), true, GetAssembler()); |
| 210 | } |
| 211 | |
| 212 | void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 213 | CreateFPToIntLocations(arena_, invoke); |
| 214 | } |
| 215 | void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 216 | CreateIntToFPLocations(arena_, invoke); |
| 217 | } |
| 218 | |
| 219 | void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 220 | MoveFPToInt(invoke->GetLocations(), false, GetAssembler()); |
| 221 | } |
| 222 | void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 223 | MoveIntToFP(invoke->GetLocations(), false, GetAssembler()); |
| 224 | } |
| 225 | |
| 226 | static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 227 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 228 | LocationSummary::kNoCall, |
| 229 | kIntrinsified); |
| 230 | locations->SetInAt(0, Location::RequiresRegister()); |
| 231 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 232 | } |
| 233 | |
| 234 | static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 235 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 236 | LocationSummary::kNoCall, |
| 237 | kIntrinsified); |
| 238 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 239 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 240 | } |
| 241 | |
| 242 | static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { |
| 243 | Location in = locations->InAt(0); |
| 244 | Location out = locations->Out(); |
| 245 | |
| 246 | if (is64bit) { |
| 247 | __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), |
| 248 | FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); |
| 249 | } else { |
| 250 | __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>()); |
| 251 | } |
| 252 | } |
| 253 | |
| 254 | void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) { |
| 255 | CreateFPToFPLocations(arena_, invoke); |
| 256 | } |
| 257 | |
| 258 | void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) { |
| 259 | MathAbsFP(invoke->GetLocations(), true, GetAssembler()); |
| 260 | } |
| 261 | |
| 262 | void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) { |
| 263 | CreateFPToFPLocations(arena_, invoke); |
| 264 | } |
| 265 | |
| 266 | void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) { |
| 267 | MathAbsFP(invoke->GetLocations(), false, GetAssembler()); |
| 268 | } |
| 269 | |
| 270 | static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) { |
| 271 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 272 | LocationSummary::kNoCall, |
| 273 | kIntrinsified); |
| 274 | locations->SetInAt(0, Location::RequiresRegister()); |
| 275 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 276 | |
| 277 | locations->AddTemp(Location::RequiresRegister()); |
| 278 | } |
| 279 | |
| 280 | static void GenAbsInteger(LocationSummary* locations, |
| 281 | bool is64bit, |
| 282 | ArmAssembler* assembler) { |
| 283 | Location in = locations->InAt(0); |
| 284 | Location output = locations->Out(); |
| 285 | |
| 286 | Register mask = locations->GetTemp(0).AsRegister<Register>(); |
| 287 | |
| 288 | if (is64bit) { |
| 289 | Register in_reg_lo = in.AsRegisterPairLow<Register>(); |
| 290 | Register in_reg_hi = in.AsRegisterPairHigh<Register>(); |
| 291 | Register out_reg_lo = output.AsRegisterPairLow<Register>(); |
| 292 | Register out_reg_hi = output.AsRegisterPairHigh<Register>(); |
| 293 | |
| 294 | DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected."; |
| 295 | |
| 296 | __ Asr(mask, in_reg_hi, 31); |
| 297 | __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask)); |
| 298 | __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask)); |
| 299 | __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo)); |
| 300 | __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi)); |
| 301 | } else { |
| 302 | Register in_reg = in.AsRegister<Register>(); |
| 303 | Register out_reg = output.AsRegister<Register>(); |
| 304 | |
| 305 | __ Asr(mask, in_reg, 31); |
| 306 | __ add(out_reg, in_reg, ShifterOperand(mask)); |
| 307 | __ eor(out_reg, mask, ShifterOperand(out_reg)); |
| 308 | } |
| 309 | } |
| 310 | |
| 311 | void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) { |
| 312 | CreateIntToIntPlusTemp(arena_, invoke); |
| 313 | } |
| 314 | |
| 315 | void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) { |
| 316 | GenAbsInteger(invoke->GetLocations(), false, GetAssembler()); |
| 317 | } |
| 318 | |
| 319 | |
| 320 | void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) { |
| 321 | CreateIntToIntPlusTemp(arena_, invoke); |
| 322 | } |
| 323 | |
| 324 | void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) { |
| 325 | GenAbsInteger(invoke->GetLocations(), true, GetAssembler()); |
| 326 | } |
| 327 | |
| 328 | static void GenMinMax(LocationSummary* locations, |
| 329 | bool is_min, |
| 330 | ArmAssembler* assembler) { |
| 331 | Register op1 = locations->InAt(0).AsRegister<Register>(); |
| 332 | Register op2 = locations->InAt(1).AsRegister<Register>(); |
| 333 | Register out = locations->Out().AsRegister<Register>(); |
| 334 | |
| 335 | __ cmp(op1, ShifterOperand(op2)); |
| 336 | |
| 337 | __ it((is_min) ? Condition::LT : Condition::GT, kItElse); |
| 338 | __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT); |
| 339 | __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE); |
| 340 | } |
| 341 | |
| 342 | static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 343 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 344 | LocationSummary::kNoCall, |
| 345 | kIntrinsified); |
| 346 | locations->SetInAt(0, Location::RequiresRegister()); |
| 347 | locations->SetInAt(1, Location::RequiresRegister()); |
| 348 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 349 | } |
| 350 | |
| 351 | void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) { |
| 352 | CreateIntIntToIntLocations(arena_, invoke); |
| 353 | } |
| 354 | |
| 355 | void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) { |
| 356 | GenMinMax(invoke->GetLocations(), true, GetAssembler()); |
| 357 | } |
| 358 | |
| 359 | void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) { |
| 360 | CreateIntIntToIntLocations(arena_, invoke); |
| 361 | } |
| 362 | |
| 363 | void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) { |
| 364 | GenMinMax(invoke->GetLocations(), false, GetAssembler()); |
| 365 | } |
| 366 | |
| 367 | void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) { |
| 368 | CreateFPToFPLocations(arena_, invoke); |
| 369 | } |
| 370 | |
| 371 | void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) { |
| 372 | LocationSummary* locations = invoke->GetLocations(); |
| 373 | ArmAssembler* assembler = GetAssembler(); |
| 374 | __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()), |
| 375 | FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>())); |
| 376 | } |
| 377 | |
| 378 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) { |
| 379 | CreateIntToIntLocations(arena_, invoke); |
| 380 | } |
| 381 | |
| 382 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) { |
| 383 | ArmAssembler* assembler = GetAssembler(); |
| 384 | // Ignore upper 4B of long address. |
| 385 | __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(), |
| 386 | Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); |
| 387 | } |
| 388 | |
| 389 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 390 | CreateIntToIntLocations(arena_, invoke); |
| 391 | } |
| 392 | |
| 393 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 394 | ArmAssembler* assembler = GetAssembler(); |
| 395 | // Ignore upper 4B of long address. |
| 396 | __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(), |
| 397 | Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); |
| 398 | } |
| 399 | |
| 400 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 401 | CreateIntToIntLocations(arena_, invoke); |
| 402 | } |
| 403 | |
| 404 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 405 | ArmAssembler* assembler = GetAssembler(); |
| 406 | // Ignore upper 4B of long address. |
| 407 | Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); |
| 408 | // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor |
| 409 | // exception. So we can't use ldrd as addr may be unaligned. |
| 410 | Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>(); |
| 411 | Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>(); |
| 412 | if (addr == lo) { |
| 413 | __ ldr(hi, Address(addr, 4)); |
| 414 | __ ldr(lo, Address(addr, 0)); |
| 415 | } else { |
| 416 | __ ldr(lo, Address(addr, 0)); |
| 417 | __ ldr(hi, Address(addr, 4)); |
| 418 | } |
| 419 | } |
| 420 | |
| 421 | void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 422 | CreateIntToIntLocations(arena_, invoke); |
| 423 | } |
| 424 | |
| 425 | void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 426 | ArmAssembler* assembler = GetAssembler(); |
| 427 | // Ignore upper 4B of long address. |
| 428 | __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(), |
| 429 | Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); |
| 430 | } |
| 431 | |
| 432 | static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 433 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 434 | LocationSummary::kNoCall, |
| 435 | kIntrinsified); |
| 436 | locations->SetInAt(0, Location::RequiresRegister()); |
| 437 | locations->SetInAt(1, Location::RequiresRegister()); |
| 438 | } |
| 439 | |
| 440 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) { |
| 441 | CreateIntIntToVoidLocations(arena_, invoke); |
| 442 | } |
| 443 | |
| 444 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) { |
| 445 | ArmAssembler* assembler = GetAssembler(); |
| 446 | __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(), |
| 447 | Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); |
| 448 | } |
| 449 | |
| 450 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 451 | CreateIntIntToVoidLocations(arena_, invoke); |
| 452 | } |
| 453 | |
| 454 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 455 | ArmAssembler* assembler = GetAssembler(); |
| 456 | __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(), |
| 457 | Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); |
| 458 | } |
| 459 | |
| 460 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 461 | CreateIntIntToVoidLocations(arena_, invoke); |
| 462 | } |
| 463 | |
| 464 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 465 | ArmAssembler* assembler = GetAssembler(); |
| 466 | // Ignore upper 4B of long address. |
| 467 | Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>(); |
| 468 | // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor |
| 469 | // exception. So we can't use ldrd as addr may be unaligned. |
| 470 | __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0)); |
| 471 | __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4)); |
| 472 | } |
| 473 | |
| 474 | void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 475 | CreateIntIntToVoidLocations(arena_, invoke); |
| 476 | } |
| 477 | |
| 478 | void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 479 | ArmAssembler* assembler = GetAssembler(); |
| 480 | __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(), |
| 481 | Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>())); |
| 482 | } |
| 483 | |
| 484 | void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) { |
| 485 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 486 | LocationSummary::kNoCall, |
| 487 | kIntrinsified); |
| 488 | locations->SetOut(Location::RequiresRegister()); |
| 489 | } |
| 490 | |
| 491 | void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) { |
| 492 | ArmAssembler* assembler = GetAssembler(); |
| 493 | __ LoadFromOffset(kLoadWord, |
| 494 | invoke->GetLocations()->Out().AsRegister<Register>(), |
| 495 | TR, |
| 496 | Thread::PeerOffset<kArmPointerSize>().Int32Value()); |
| 497 | } |
| 498 | |
| 499 | static void GenUnsafeGet(HInvoke* invoke, |
| 500 | Primitive::Type type, |
| 501 | bool is_volatile, |
| 502 | CodeGeneratorARM* codegen) { |
| 503 | LocationSummary* locations = invoke->GetLocations(); |
| 504 | DCHECK((type == Primitive::kPrimInt) || |
| 505 | (type == Primitive::kPrimLong) || |
| 506 | (type == Primitive::kPrimNot)); |
| 507 | ArmAssembler* assembler = codegen->GetAssembler(); |
| 508 | Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer. |
| 509 | Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only. |
| 510 | |
| 511 | if (type == Primitive::kPrimLong) { |
| 512 | Register trg_lo = locations->Out().AsRegisterPairLow<Register>(); |
| 513 | __ add(IP, base, ShifterOperand(offset)); |
| 514 | if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) { |
| 515 | Register trg_hi = locations->Out().AsRegisterPairHigh<Register>(); |
| 516 | __ ldrexd(trg_lo, trg_hi, IP); |
| 517 | } else { |
| 518 | __ ldrd(trg_lo, Address(IP)); |
| 519 | } |
| 520 | } else { |
| 521 | Register trg = locations->Out().AsRegister<Register>(); |
| 522 | __ ldr(trg, Address(base, offset)); |
| 523 | } |
| 524 | |
| 525 | if (is_volatile) { |
| 526 | __ dmb(ISH); |
| 527 | } |
| 528 | } |
| 529 | |
| 530 | static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 531 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 532 | LocationSummary::kNoCall, |
| 533 | kIntrinsified); |
| 534 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 535 | locations->SetInAt(1, Location::RequiresRegister()); |
| 536 | locations->SetInAt(2, Location::RequiresRegister()); |
| 537 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 538 | } |
| 539 | |
| 540 | void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) { |
| 541 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 542 | } |
| 543 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 544 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 545 | } |
| 546 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) { |
| 547 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 548 | } |
| 549 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 550 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 551 | } |
| 552 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) { |
| 553 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 554 | } |
| 555 | void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 556 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 557 | } |
| 558 | |
| 559 | void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) { |
| 560 | GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); |
| 561 | } |
| 562 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 563 | GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); |
| 564 | } |
| 565 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) { |
| 566 | GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); |
| 567 | } |
| 568 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 569 | GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); |
| 570 | } |
| 571 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) { |
| 572 | GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); |
| 573 | } |
| 574 | void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 575 | GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); |
| 576 | } |
| 577 | |
| 578 | static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, |
| 579 | const ArmInstructionSetFeatures& features, |
| 580 | Primitive::Type type, |
| 581 | bool is_volatile, |
| 582 | HInvoke* invoke) { |
| 583 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 584 | LocationSummary::kNoCall, |
| 585 | kIntrinsified); |
| 586 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 587 | locations->SetInAt(1, Location::RequiresRegister()); |
| 588 | locations->SetInAt(2, Location::RequiresRegister()); |
| 589 | locations->SetInAt(3, Location::RequiresRegister()); |
| 590 | |
| 591 | if (type == Primitive::kPrimLong) { |
| 592 | // Potentially need temps for ldrexd-strexd loop. |
| 593 | if (is_volatile && !features.HasAtomicLdrdAndStrd()) { |
| 594 | locations->AddTemp(Location::RequiresRegister()); // Temp_lo. |
| 595 | locations->AddTemp(Location::RequiresRegister()); // Temp_hi. |
| 596 | } |
| 597 | } else if (type == Primitive::kPrimNot) { |
| 598 | // Temps for card-marking. |
| 599 | locations->AddTemp(Location::RequiresRegister()); // Temp. |
| 600 | locations->AddTemp(Location::RequiresRegister()); // Card. |
| 601 | } |
| 602 | } |
| 603 | |
| 604 | void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) { |
| 605 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke); |
| 606 | } |
| 607 | void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 608 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke); |
| 609 | } |
| 610 | void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 611 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke); |
| 612 | } |
| 613 | void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) { |
| 614 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke); |
| 615 | } |
| 616 | void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 617 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke); |
| 618 | } |
| 619 | void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 620 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke); |
| 621 | } |
| 622 | void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) { |
| 623 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke); |
| 624 | } |
| 625 | void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 626 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke); |
| 627 | } |
| 628 | void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 629 | CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke); |
| 630 | } |
| 631 | |
| 632 | static void GenUnsafePut(LocationSummary* locations, |
| 633 | Primitive::Type type, |
| 634 | bool is_volatile, |
| 635 | bool is_ordered, |
| 636 | CodeGeneratorARM* codegen) { |
| 637 | ArmAssembler* assembler = codegen->GetAssembler(); |
| 638 | |
| 639 | Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer. |
| 640 | Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only. |
| 641 | Register value; |
| 642 | |
| 643 | if (is_volatile || is_ordered) { |
| 644 | __ dmb(ISH); |
| 645 | } |
| 646 | |
| 647 | if (type == Primitive::kPrimLong) { |
| 648 | Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>(); |
| 649 | value = value_lo; |
| 650 | if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) { |
| 651 | Register temp_lo = locations->GetTemp(0).AsRegister<Register>(); |
| 652 | Register temp_hi = locations->GetTemp(1).AsRegister<Register>(); |
| 653 | Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>(); |
| 654 | |
| 655 | __ add(IP, base, ShifterOperand(offset)); |
| 656 | Label loop_head; |
| 657 | __ Bind(&loop_head); |
| 658 | __ ldrexd(temp_lo, temp_hi, IP); |
| 659 | __ strexd(temp_lo, value_lo, value_hi, IP); |
| 660 | __ cmp(temp_lo, ShifterOperand(0)); |
| 661 | __ b(&loop_head, NE); |
| 662 | } else { |
| 663 | __ add(IP, base, ShifterOperand(offset)); |
| 664 | __ strd(value_lo, Address(IP)); |
| 665 | } |
| 666 | } else { |
| 667 | value = locations->InAt(3).AsRegister<Register>(); |
| 668 | __ str(value, Address(base, offset)); |
| 669 | } |
| 670 | |
| 671 | if (is_volatile) { |
| 672 | __ dmb(ISH); |
| 673 | } |
| 674 | |
| 675 | if (type == Primitive::kPrimNot) { |
| 676 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 677 | Register card = locations->GetTemp(1).AsRegister<Register>(); |
| 678 | codegen->MarkGCCard(temp, card, base, value); |
| 679 | } |
| 680 | } |
| 681 | |
| 682 | void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) { |
| 683 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); |
| 684 | } |
| 685 | void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 686 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); |
| 687 | } |
| 688 | void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 689 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); |
| 690 | } |
| 691 | void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) { |
| 692 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); |
| 693 | } |
| 694 | void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 695 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); |
| 696 | } |
| 697 | void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 698 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); |
| 699 | } |
| 700 | void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) { |
| 701 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); |
| 702 | } |
| 703 | void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 704 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); |
| 705 | } |
| 706 | void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 707 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); |
| 708 | } |
| 709 | |
| 710 | static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, |
| 711 | HInvoke* invoke) { |
| 712 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 713 | LocationSummary::kNoCall, |
| 714 | kIntrinsified); |
| 715 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 716 | locations->SetInAt(1, Location::RequiresRegister()); |
| 717 | locations->SetInAt(2, Location::RequiresRegister()); |
| 718 | locations->SetInAt(3, Location::RequiresRegister()); |
| 719 | locations->SetInAt(4, Location::RequiresRegister()); |
| 720 | |
| 721 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 722 | |
| 723 | locations->AddTemp(Location::RequiresRegister()); // Pointer. |
| 724 | locations->AddTemp(Location::RequiresRegister()); // Temp 1. |
| 725 | locations->AddTemp(Location::RequiresRegister()); // Temp 2. |
| 726 | } |
| 727 | |
| 728 | static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) { |
| 729 | DCHECK_NE(type, Primitive::kPrimLong); |
| 730 | |
| 731 | ArmAssembler* assembler = codegen->GetAssembler(); |
| 732 | |
| 733 | Register out = locations->Out().AsRegister<Register>(); // Boolean result. |
| 734 | |
| 735 | Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer. |
| 736 | Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B). |
| 737 | Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected. |
| 738 | Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value. |
| 739 | |
| 740 | Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory. |
| 741 | Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory. |
| 742 | |
| 743 | if (type == Primitive::kPrimNot) { |
| 744 | // Mark card for object assuming new value is stored. Worst case we will mark an unchanged |
| 745 | // object and scan the receiver at the next GC for nothing. |
| 746 | codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo); |
| 747 | } |
| 748 | |
| 749 | // Prevent reordering with prior memory operations. |
| 750 | __ dmb(ISH); |
| 751 | |
| 752 | __ add(tmp_ptr, base, ShifterOperand(offset)); |
| 753 | |
| 754 | // do { |
| 755 | // tmp = [r_ptr] - expected; |
| 756 | // } while (tmp == 0 && failure([r_ptr] <- r_new_value)); |
| 757 | // result = tmp != 0; |
| 758 | |
| 759 | Label loop_head; |
| 760 | __ Bind(&loop_head); |
| 761 | |
| 762 | __ ldrex(tmp_lo, tmp_ptr); |
| 763 | |
| 764 | __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo)); |
| 765 | |
| 766 | __ it(EQ, ItState::kItT); |
| 767 | __ strex(tmp_lo, value_lo, tmp_ptr, EQ); |
| 768 | __ cmp(tmp_lo, ShifterOperand(1), EQ); |
| 769 | |
| 770 | __ b(&loop_head, EQ); |
| 771 | |
| 772 | __ dmb(ISH); |
| 773 | |
| 774 | __ rsbs(out, tmp_lo, ShifterOperand(1)); |
| 775 | __ it(CC); |
| 776 | __ mov(out, ShifterOperand(0), CC); |
| 777 | } |
| 778 | |
| 779 | void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke ATTRIBUTE_UNUSED) { |
| 780 | CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke); |
| 781 | } |
| 782 | void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke ATTRIBUTE_UNUSED) { |
| 783 | CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke); |
| 784 | } |
| 785 | void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) { |
| 786 | GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); |
| 787 | } |
| 788 | void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) { |
| 789 | GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); |
| 790 | } |
| 791 | |
| 792 | void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) { |
| 793 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 794 | LocationSummary::kCallOnSlowPath, |
| 795 | kIntrinsified); |
| 796 | locations->SetInAt(0, Location::RequiresRegister()); |
| 797 | locations->SetInAt(1, Location::RequiresRegister()); |
| 798 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 799 | |
| 800 | locations->AddTemp(Location::RequiresRegister()); |
| 801 | locations->AddTemp(Location::RequiresRegister()); |
| 802 | } |
| 803 | |
| 804 | void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) { |
| 805 | ArmAssembler* assembler = GetAssembler(); |
| 806 | LocationSummary* locations = invoke->GetLocations(); |
| 807 | |
| 808 | // Location of reference to data array |
| 809 | const MemberOffset value_offset = mirror::String::ValueOffset(); |
| 810 | // Location of count |
| 811 | const MemberOffset count_offset = mirror::String::CountOffset(); |
| 812 | // Starting offset within data array |
| 813 | const MemberOffset offset_offset = mirror::String::OffsetOffset(); |
| 814 | // Start of char data with array_ |
| 815 | const MemberOffset data_offset = mirror::Array::DataOffset(sizeof(uint16_t)); |
| 816 | |
| 817 | Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer. |
| 818 | Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character. |
| 819 | Register out = locations->Out().AsRegister<Register>(); // Result character. |
| 820 | |
| 821 | Register temp = locations->GetTemp(0).AsRegister<Register>(); |
| 822 | Register array_temp = locations->GetTemp(1).AsRegister<Register>(); |
| 823 | |
| 824 | // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth |
| 825 | // the cost. |
| 826 | // TODO: For simplicity, the index parameter is requested in a register, so different from Quick |
| 827 | // we will not optimize the code for constants (which would save a register). |
| 828 | |
| 829 | SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); |
| 830 | codegen_->AddSlowPath(slow_path); |
| 831 | |
| 832 | __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length. |
| 833 | codegen_->MaybeRecordImplicitNullCheck(invoke); |
| 834 | __ cmp(idx, ShifterOperand(temp)); |
| 835 | __ b(slow_path->GetEntryLabel(), CS); |
| 836 | |
| 837 | // Index computation. |
| 838 | __ ldr(temp, Address(obj, offset_offset.Int32Value())); // temp := str.offset. |
| 839 | __ ldr(array_temp, Address(obj, value_offset.Int32Value())); // array_temp := str.offset. |
| 840 | __ add(temp, temp, ShifterOperand(idx)); |
| 841 | DCHECK_EQ(data_offset.Int32Value() % 2, 0); // We'll compensate by shifting. |
| 842 | __ add(temp, temp, ShifterOperand(data_offset.Int32Value() / 2)); |
| 843 | |
| 844 | // Load the value. |
| 845 | __ ldrh(out, Address(array_temp, temp, LSL, 1)); // out := array_temp[temp]. |
| 846 | |
| 847 | __ Bind(slow_path->GetExitLabel()); |
| 848 | } |
| 849 | |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 850 | void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) { |
| 851 | // The inputs plus one temp. |
| 852 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 853 | LocationSummary::kCall, |
| 854 | kIntrinsified); |
| 855 | InvokeRuntimeCallingConvention calling_convention; |
| 856 | locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); |
| 857 | locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); |
| 858 | locations->SetOut(Location::RegisterLocation(R0)); |
| 859 | } |
| 860 | |
| 861 | void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) { |
| 862 | ArmAssembler* assembler = GetAssembler(); |
| 863 | LocationSummary* locations = invoke->GetLocations(); |
| 864 | |
Nicolas Geoffray | 512e04d | 2015-03-27 17:21:24 +0000 | [diff] [blame] | 865 | // Note that the null check must have been done earlier. |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 866 | DCHECK(!invoke->CanDoImplicitNullCheck()); |
| 867 | |
| 868 | Register argument = locations->InAt(1).AsRegister<Register>(); |
| 869 | __ cmp(argument, ShifterOperand(0)); |
| 870 | SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke); |
| 871 | codegen_->AddSlowPath(slow_path); |
| 872 | __ b(slow_path->GetEntryLabel(), EQ); |
| 873 | |
| 874 | __ LoadFromOffset( |
| 875 | kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value()); |
| 876 | __ blx(LR); |
| 877 | __ Bind(slow_path->GetExitLabel()); |
| 878 | } |
| 879 | |
Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 880 | // Unimplemented intrinsics. |
| 881 | |
| 882 | #define UNIMPLEMENTED_INTRINSIC(Name) \ |
| 883 | void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 884 | } \ |
| 885 | void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 886 | } |
| 887 | |
| 888 | UNIMPLEMENTED_INTRINSIC(IntegerReverse) |
| 889 | UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes) |
| 890 | UNIMPLEMENTED_INTRINSIC(LongReverse) |
| 891 | UNIMPLEMENTED_INTRINSIC(LongReverseBytes) |
| 892 | UNIMPLEMENTED_INTRINSIC(ShortReverseBytes) |
| 893 | UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble) |
| 894 | UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat) |
| 895 | UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble) |
| 896 | UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat) |
| 897 | UNIMPLEMENTED_INTRINSIC(MathMinLongLong) |
| 898 | UNIMPLEMENTED_INTRINSIC(MathMaxLongLong) |
| 899 | UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe? |
| 900 | UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe? |
| 901 | UNIMPLEMENTED_INTRINSIC(MathRint) |
| 902 | UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe? |
| 903 | UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe? |
| 904 | UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure. |
| 905 | UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar) |
Andreas Gampe | 2bcf9bf | 2015-01-29 09:56:07 -0800 | [diff] [blame] | 906 | UNIMPLEMENTED_INTRINSIC(StringIndexOf) |
| 907 | UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter) |
| 908 | UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) |
| 909 | |
| 910 | } // namespace arm |
| 911 | } // namespace art |