diff options
Diffstat (limited to 'compiler/optimizing/intrinsics_x86.cc')
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 52 |
1 files changed, 51 insertions, 1 deletions
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index e13a965dae..7bd6b04ba5 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -31,6 +31,7 @@ #include "mirror/object_array-inl.h" #include "mirror/reference.h" #include "mirror/string.h" +#include "mirror/var_handle.h" #include "scoped_thread_state_change-inl.h" #include "thread-current-inl.h" #include "utils/x86/assembler_x86.h" @@ -3064,6 +3065,56 @@ void IntrinsicCodeGeneratorX86::VisitIntegerDivideUnsigned(HInvoke* invoke) { __ Bind(slow_path->GetExitLabel()); } +static void CreateVarHandleLocationSummary(HInvoke* invoke, ArenaAllocator* allocator) { + InvokeDexCallingConventionVisitorX86 visitor; + LocationSummary* locations = + new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified); + + for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) { + HInstruction* input = invoke->InputAt(i); + locations->SetInAt(i, visitor.GetNextLocation(input->GetType())); + } + + locations->SetOut(visitor.GetReturnLocation(invoke->GetType())); +} + +#define INTRINSIC_VARHANDLE_LOCATIONS_BUILDER(Name) \ +void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke) { \ + CreateVarHandleLocationSummary(invoke, allocator_); \ +} + +INTRINSIC_VARHANDLE_LOCATIONS_BUILDER(VarHandleGet) + +static void GenerateVarHandleCode(HInvoke* invoke, CodeGeneratorX86* codegen) { + X86Assembler* assembler = codegen->GetAssembler(); + Register varhandle_object = invoke->GetLocations()->InAt(0).AsRegister<Register>(); + const uint32_t access_modes_bitmask_offset = + mirror::VarHandle::AccessModesBitMaskOffset().Uint32Value(); + mirror::VarHandle::AccessMode access_mode = + mirror::VarHandle::GetAccessModeByIntrinsic(invoke->GetIntrinsic()); + const uint32_t access_mode_bit = 1u << static_cast<uint32_t>(access_mode); + + // If the access mode is not supported, bail to runtime implementation to handle + __ testl(Address(varhandle_object, access_modes_bitmask_offset), Immediate(access_mode_bit)); + SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke); + codegen->AddSlowPath(slow_path); + __ j(kZero, slow_path->GetEntryLabel()); + + // For now, none of the access modes are compiled. The runtime handles them on + // both slow path and main path. + // TODO: replace calling the runtime with actual assembly code + codegen->GenerateInvokePolymorphicCall(invoke->AsInvokePolymorphic()); + + __ Bind(slow_path->GetExitLabel()); +} + +#define INTRINSIC_VARHANDLE_CODE_GENERATOR(Name) \ +void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke) { \ + GenerateVarHandleCode(invoke, codegen_); \ +} + +INTRINSIC_VARHANDLE_CODE_GENERATOR(VarHandleGet) + UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble) UNIMPLEMENTED_INTRINSIC(X86, ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite) @@ -3119,7 +3170,6 @@ UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchange) UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchangeAcquire) UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchangeRelease) UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndSet) -UNIMPLEMENTED_INTRINSIC(X86, VarHandleGet) UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAcquire) UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAdd) UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAddAcquire) |