diff options
Diffstat (limited to 'compiler/optimizing/intrinsics_x86.cc')
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 156 |
1 files changed, 0 insertions, 156 deletions
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index f71689230d..c1ec8bda82 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -1731,74 +1731,6 @@ static void GenUnsafeGet(HInvoke* invoke, } } -static void GenUnsafeGetAbsolute(HInvoke* invoke, - DataType::Type type, - bool is_volatile, - CodeGeneratorX86* codegen) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); - LocationSummary* locations = invoke->GetLocations(); - Register address = locations->InAt(1).AsRegisterPairLow<Register>(); - Address address_offset(address, 0); - Location output_loc = locations->Out(); - - switch (type) { - case DataType::Type::kInt8: { - Register output = output_loc.AsRegister<Register>(); - __ movsxb(output, address_offset); - break; - } - - case DataType::Type::kInt32: { - Register output = output_loc.AsRegister<Register>(); - __ movl(output, address_offset); - break; - } - - case DataType::Type::kInt64: { - Register output_lo = output_loc.AsRegisterPairLow<Register>(); - Register output_hi = output_loc.AsRegisterPairHigh<Register>(); - if (is_volatile) { - // Need to use a XMM to read atomically. - XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); - __ movsd(temp, address_offset); - __ movd(output_lo, temp); - __ psrlq(temp, Immediate(32)); - __ movd(output_hi, temp); - } else { - Address address_hi(address, 4); - __ movl(output_lo, address_offset); - __ movl(output_hi, address_hi); - } - } - break; - - default: - LOG(FATAL) << "Unsupported op size " << type; - UNREACHABLE(); - } -} - -static void CreateIntIntToIntLocations(ArenaAllocator* allocator, - HInvoke* invoke, - DataType::Type type, - bool is_volatile) { - LocationSummary* locations = - new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); - locations->SetInAt(0, Location::NoLocation()); // Unused receiver. - locations->SetInAt(1, Location::RequiresRegister()); - if (type == DataType::Type::kInt64) { - if (is_volatile) { - // Need to use XMM to read volatile. - locations->AddTemp(Location::RequiresFpuRegister()); - locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); - } else { - locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); - } - } else { - locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); - } -} - static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke, CodeGeneratorX86* codegen, @@ -1834,9 +1766,6 @@ static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator, void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) { VisitJdkUnsafeGet(invoke); } -void IntrinsicLocationsBuilderX86::VisitUnsafeGetAbsolute(HInvoke* invoke) { - VisitJdkUnsafeGetAbsolute(invoke); -} void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) { VisitJdkUnsafeGetVolatile(invoke); } @@ -1859,9 +1788,6 @@ void IntrinsicLocationsBuilderX86::VisitUnsafeGetByte(HInvoke* invoke) { void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) { VisitJdkUnsafeGet(invoke); } -void IntrinsicCodeGeneratorX86::VisitUnsafeGetAbsolute(HInvoke* invoke) { - VisitJdkUnsafeGetAbsolute(invoke); -} void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) { VisitJdkUnsafeGetVolatile(invoke); } @@ -1885,9 +1811,6 @@ void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGet(HInvoke* invoke) { CreateIntIntIntToIntLocations( allocator_, invoke, codegen_, DataType::Type::kInt32, /*is_volatile=*/ false); } -void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) { - CreateIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32, /*is_volatile=*/false); -} void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetVolatile(HInvoke* invoke) { CreateIntIntIntToIntLocations( allocator_, invoke, codegen_, DataType::Type::kInt32, /*is_volatile=*/ true); @@ -1928,9 +1851,6 @@ void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetByte(HInvoke* invoke) { void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGet(HInvoke* invoke) { GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); } -void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) { - GenUnsafeGetAbsolute(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); -} void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetVolatile(HInvoke* invoke) { GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_); } @@ -1959,26 +1879,6 @@ void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetByte(HInvoke* invoke) { GenUnsafeGet(invoke, DataType::Type::kInt8, /*is_volatile=*/ false, codegen_); } -static void CreateIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator, - DataType::Type type, - HInvoke* invoke, - bool is_volatile) { - LocationSummary* locations = - new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); - locations->SetInAt(0, Location::NoLocation()); // Unused receiver. - locations->SetInAt(1, Location::RequiresRegister()); - if (type == DataType::Type::kInt8 || type == DataType::Type::kUint8) { - // Ensure the value is in a byte register - locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, invoke->InputAt(3))); - } else { - locations->SetInAt(2, Location::RequiresRegister()); - } - if (type == DataType::Type::kInt64 && is_volatile) { - locations->AddTemp(Location::RequiresFpuRegister()); - locations->AddTemp(Location::RequiresFpuRegister()); - } -} - static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator, DataType::Type type, HInvoke* invoke, @@ -2008,9 +1908,6 @@ static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) { VisitJdkUnsafePut(invoke); } -void IntrinsicLocationsBuilderX86::VisitUnsafePutAbsolute(HInvoke* invoke) { - VisitJdkUnsafePutAbsolute(invoke); -} void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) { VisitJdkUnsafePutOrdered(invoke); } @@ -2043,10 +1940,6 @@ void IntrinsicLocationsBuilderX86::VisitJdkUnsafePut(HInvoke* invoke) { CreateIntIntIntIntToVoidPlusTempsLocations( allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ false); } -void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutAbsolute(HInvoke* invoke) { - CreateIntIntIntToVoidPlusTempsLocations( - allocator_, DataType::Type::kInt64, invoke, /*is_volatile=*/ false); -} void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutOrdered(HInvoke* invoke) { CreateIntIntIntIntToVoidPlusTempsLocations( allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ false); @@ -2152,54 +2045,9 @@ static void GenUnsafePut(LocationSummary* locations, } } -// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86 -// memory model. -static void GenUnsafePutAbsolute(LocationSummary* locations, - DataType::Type type, - bool is_volatile, - CodeGeneratorX86* codegen) { - X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler()); - Register address = locations->InAt(1).AsRegisterPairLow<Register>(); - Address address_offset(address, 0); - Location value_loc = locations->InAt(2); - - if (type == DataType::Type::kInt64) { - Register value_lo = value_loc.AsRegisterPairLow<Register>(); - Register value_hi = value_loc.AsRegisterPairHigh<Register>(); - if (is_volatile) { - XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>(); - XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>(); - __ movd(temp1, value_lo); - __ movd(temp2, value_hi); - __ punpckldq(temp1, temp2); - __ movsd(address_offset, temp1); - } else { - __ movl(address_offset, value_lo); - __ movl(Address(address, 4), value_hi); - } - } else if (type == DataType::Type::kInt32) { - __ movl(address_offset, value_loc.AsRegister<Register>()); - } else { - CHECK_EQ(type, DataType::Type::kInt8) << "Unimplemented GenUnsafePut data type"; - if (value_loc.IsRegister()) { - __ movb(address_offset, value_loc.AsRegister<ByteRegister>()); - } else { - __ movb(address_offset, - Immediate(CodeGenerator::GetInt8ValueOf(value_loc.GetConstant()))); - } - } - - if (is_volatile) { - codegen->MemoryFence(); - } -} - void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) { VisitJdkUnsafePut(invoke); } -void IntrinsicCodeGeneratorX86::VisitUnsafePutAbsolute(HInvoke* invoke) { - VisitJdkUnsafePutAbsolute(invoke); -} void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) { VisitJdkUnsafePutOrdered(invoke); } @@ -2231,10 +2079,6 @@ void IntrinsicCodeGeneratorX86::VisitUnsafePutByte(HInvoke* invoke) { void IntrinsicCodeGeneratorX86::VisitJdkUnsafePut(HInvoke* invoke) { GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); } -void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutAbsolute(HInvoke* invoke) { - GenUnsafePutAbsolute( - invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/false, codegen_); -} void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutOrdered(HInvoke* invoke) { GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_); } |