diff options
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 8 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm_vixl.cc | 34 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 7 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 7 | ||||
-rw-r--r-- | compiler/optimizing/instruction_builder.cc | 21 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.h | 40 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 39 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm_vixl.cc | 39 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 39 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 39 | ||||
-rw-r--r-- | compiler/optimizing/nodes.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 5 |
12 files changed, 222 insertions, 62 deletions
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 7d1af05f12..8a8530f63c 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -4605,10 +4605,18 @@ void CodeGeneratorARM64::MoveFromReturnRegister(Location trg, DataType::Type typ } void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_); + if (intrinsic.TryDispatch(invoke)) { + return; + } HandleInvoke(invoke); } void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + if (TryGenerateIntrinsicCode(invoke, codegen_)) { + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__); + return; + } codegen_->GenerateInvokePolymorphicCall(invoke); codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ __LINE__); } diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc index 55e69d0f3c..0f47a8bd21 100644 --- a/compiler/optimizing/code_generator_arm_vixl.cc +++ b/compiler/optimizing/code_generator_arm_vixl.cc @@ -3531,12 +3531,20 @@ void InstructionCodeGeneratorARMVIXL::VisitInvokeInterface(HInvokeInterface* inv } void LocationsBuilderARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + IntrinsicLocationsBuilderARMVIXL intrinsic(codegen_); + if (intrinsic.TryDispatch(invoke)) { + return; + } HandleInvoke(invoke); } void InstructionCodeGeneratorARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + if (TryGenerateIntrinsicCode(invoke, codegen_)) { + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 9); + return; + } codegen_->GenerateInvokePolymorphicCall(invoke); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 9); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 10); } void LocationsBuilderARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) { @@ -3545,7 +3553,7 @@ void LocationsBuilderARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) { void InstructionCodeGeneratorARMVIXL::VisitInvokeCustom(HInvokeCustom* invoke) { codegen_->GenerateInvokeCustomCall(invoke); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 10); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 11); } void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) { @@ -5376,7 +5384,7 @@ void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) { void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction) { codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc()); CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 11); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 12); } void LocationsBuilderARMVIXL::VisitNewArray(HNewArray* instruction) { @@ -5394,7 +5402,7 @@ void InstructionCodeGeneratorARMVIXL::VisitNewArray(HNewArray* instruction) { codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc()); CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>(); DCHECK(!codegen_->IsLeafMethod()); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 12); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 13); } void LocationsBuilderARMVIXL::VisitParameterValue(HParameterValue* instruction) { @@ -6993,7 +7001,7 @@ void InstructionCodeGeneratorARMVIXL::VisitSuspendCheck(HSuspendCheck* instructi return; } GenerateSuspendCheck(instruction, nullptr); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 13); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 14); } void InstructionCodeGeneratorARMVIXL::GenerateSuspendCheck(HSuspendCheck* instruction, @@ -7341,7 +7349,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_ HLoadClass::LoadKind load_kind = cls->GetLoadKind(); if (load_kind == HLoadClass::LoadKind::kRuntimeCall) { codegen_->GenerateLoadClassRuntimeCall(cls); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 14); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 15); return; } DCHECK(!cls->NeedsAccessCheck()); @@ -7427,7 +7435,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_ } else { __ Bind(slow_path->GetExitLabel()); } - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 15); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 16); } } @@ -7620,7 +7628,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE codegen_->AddSlowPath(slow_path); __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel()); __ Bind(slow_path->GetExitLabel()); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 16); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 17); return; } case HLoadString::LoadKind::kJitBootImageAddress: { @@ -7648,7 +7656,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE __ Mov(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_); codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc()); CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 17); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 18); } static int32_t GetExceptionTlsOffset() { @@ -8278,7 +8286,7 @@ void InstructionCodeGeneratorARMVIXL::VisitMonitorOperation(HMonitorOperation* i } else { CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>(); } - codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 18); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code= */ 19); } void LocationsBuilderARMVIXL::VisitAnd(HAnd* instruction) { @@ -8749,7 +8757,7 @@ void CodeGeneratorARMVIXL::GenerateGcRootFieldLoad( // Note that GC roots are not affected by heap poisoning, thus we // do not have to unpoison `root_reg` here. } - MaybeGenerateMarkingRegisterCheck(/* code= */ 19); + MaybeGenerateMarkingRegisterCheck(/* code= */ 20); } void CodeGeneratorARMVIXL::GenerateUnsafeCasOldValueAddWithBakerReadBarrier( @@ -8849,7 +8857,7 @@ void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(HInstruction* i narrow ? BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET : BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET); } - MaybeGenerateMarkingRegisterCheck(/* code= */ 20, /* temp_loc= */ LocationFrom(ip)); + MaybeGenerateMarkingRegisterCheck(/* code= */ 21, /* temp_loc= */ LocationFrom(ip)); } void CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction, @@ -8937,7 +8945,7 @@ void CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier(Location ref, DCHECK_EQ(old_offset - GetVIXLAssembler()->GetBuffer()->GetCursorOffset(), BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET); } - MaybeGenerateMarkingRegisterCheck(/* code= */ 21, /* temp_loc= */ LocationFrom(ip)); + MaybeGenerateMarkingRegisterCheck(/* code= */ 22, /* temp_loc= */ LocationFrom(ip)); } void CodeGeneratorARMVIXL::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) { diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 934eebbb3d..04123cf70e 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -2483,10 +2483,17 @@ void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) } void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + IntrinsicLocationsBuilderX86 intrinsic(codegen_); + if (intrinsic.TryDispatch(invoke)) { + return; + } HandleInvoke(invoke); } void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + if (TryGenerateIntrinsicCode(invoke, codegen_)) { + return; + } codegen_->GenerateInvokePolymorphicCall(invoke); } diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 2b918f6742..b4210794e4 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -2757,10 +2757,17 @@ void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invo } void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + IntrinsicLocationsBuilderX86_64 intrinsic(codegen_); + if (intrinsic.TryDispatch(invoke)) { + return; + } HandleInvoke(invoke); } void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) { + if (TryGenerateIntrinsicCode(invoke, codegen_)) { + return; + } codegen_->GenerateInvokePolymorphicCall(invoke); } diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc index 60e1279685..ac714abd59 100644 --- a/compiler/optimizing/instruction_builder.cc +++ b/compiler/optimizing/instruction_builder.cc @@ -931,10 +931,13 @@ static ArtMethod* ResolveMethod(uint16_t method_idx, } else if (*invoke_type == kVirtual) { // For HInvokeVirtual we need the vtable index. *target_method = MethodReference(/*file=*/ nullptr, resolved_method->GetVtableIndex()); - } else { - DCHECK_EQ(*invoke_type, kInterface); + } else if (*invoke_type == kInterface) { // For HInvokeInterface we need the IMT index. *target_method = MethodReference(/*file=*/ nullptr, ImTable::GetImtIndex(resolved_method)); + } else { + // For HInvokePolymorphic we don't need the target method yet + DCHECK_EQ(*invoke_type, kPolymorphic); + DCHECK(target_method == nullptr); } *is_string_constructor = @@ -1082,11 +1085,23 @@ bool HInstructionBuilder::BuildInvokePolymorphic(uint32_t dex_pc, DCHECK_EQ(1 + ArtMethod::NumArgRegisters(shorty), operands.GetNumberOfOperands()); DataType::Type return_type = DataType::FromShorty(shorty[0]); size_t number_of_arguments = strlen(shorty); + // We use ResolveMethod which is also used in BuildInvoke in order to + // not duplicate code. As such, we need to provide is_string_constructor + // even if we don't need it afterwards. + InvokeType invoke_type = InvokeType::kPolymorphic; + bool is_string_constructor = false; + ArtMethod* resolved_method = ResolveMethod(method_idx, + graph_->GetArtMethod(), + *dex_compilation_unit_, + &invoke_type, + /* target_method= */ nullptr, + &is_string_constructor); HInvoke* invoke = new (allocator_) HInvokePolymorphic(allocator_, number_of_arguments, return_type, dex_pc, - method_idx); + method_idx, + resolved_method); return HandleInvoke(invoke, operands, shorty, /* is_unresolved= */ false); } diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h index ab68cce304..f3c6d8b4c2 100644 --- a/compiler/optimizing/intrinsics.h +++ b/compiler/optimizing/intrinsics.h @@ -285,45 +285,7 @@ UNREACHABLE_INTRINSIC(Arch, StringIsEmpty) \ UNREACHABLE_INTRINSIC(Arch, StringLength) \ UNREACHABLE_INTRINSIC(Arch, UnsafeLoadFence) \ UNREACHABLE_INTRINSIC(Arch, UnsafeStoreFence) \ -UNREACHABLE_INTRINSIC(Arch, UnsafeFullFence) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleFullFence) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleAcquireFence) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleReleaseFence) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleLoadLoadFence) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleStoreStoreFence) \ -UNREACHABLE_INTRINSIC(Arch, MethodHandleInvokeExact) \ -UNREACHABLE_INTRINSIC(Arch, MethodHandleInvoke) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchange) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchangeAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndExchangeRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleCompareAndSet) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGet) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAdd) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAddAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndAddRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAnd) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAndAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseAndRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOr) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOrAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseOrRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXor) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXorAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndBitwiseXorRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSet) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSetAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetAndSetRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetOpaque) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleGetVolatile) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleSet) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleSetOpaque) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleSetRelease) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleSetVolatile) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSet) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetAcquire) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetPlain) \ -UNREACHABLE_INTRINSIC(Arch, VarHandleWeakCompareAndSetRelease) +UNREACHABLE_INTRINSIC(Arch, UnsafeFullFence) template <typename IntrinsicLocationsBuilder, typename Codegenerator> bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) { diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 4b495e7fc0..aa4452fcdd 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -3355,6 +3355,45 @@ UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetInt) UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetLong) UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetObject) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleFullFence) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleAcquireFence) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleReleaseFence) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleLoadLoadFence) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleStoreStoreFence) +UNIMPLEMENTED_INTRINSIC(ARM64, MethodHandleInvokeExact) +UNIMPLEMENTED_INTRINSIC(ARM64, MethodHandleInvoke) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleCompareAndExchange) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleCompareAndExchangeAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleCompareAndExchangeRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleCompareAndSet) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGet) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndAdd) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndAddAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndAddRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseAnd) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseAndAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseAndRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseOr) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseOrAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseOrRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseXor) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseXorAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndBitwiseXorRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndSet) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetAndSetRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetOpaque) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleGetVolatile) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleSet) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleSetOpaque) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleSetRelease) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleSetVolatile) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleWeakCompareAndSet) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleWeakCompareAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleWeakCompareAndSetPlain) +UNIMPLEMENTED_INTRINSIC(ARM64, VarHandleWeakCompareAndSetRelease) + UNREACHABLE_INTRINSICS(ARM64) #undef __ diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index c5284c5978..b8df06093b 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -3056,6 +3056,45 @@ UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndSetInt) UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndSetLong) UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndSetObject) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleFullFence) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleAcquireFence) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleReleaseFence) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleLoadLoadFence) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleStoreStoreFence) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, MethodHandleInvokeExact) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, MethodHandleInvoke) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleCompareAndExchange) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleCompareAndExchangeAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleCompareAndExchangeRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleCompareAndSet) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGet) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndAdd) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndAddAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndAddRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseAnd) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseAndAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseAndRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseOr) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseOrAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseOrRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseXor) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseXorAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndBitwiseXorRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndSet) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetAndSetRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetOpaque) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleGetVolatile) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleSet) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleSetOpaque) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleSetRelease) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleSetVolatile) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleWeakCompareAndSet) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleWeakCompareAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleWeakCompareAndSetPlain) +UNIMPLEMENTED_INTRINSIC(ARMVIXL, VarHandleWeakCompareAndSetRelease) + UNREACHABLE_INTRINSICS(ARMVIXL) #undef __ diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 2bad0b5265..e13a965dae 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -3108,6 +3108,45 @@ UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt) UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong) UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleFullFence) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleAcquireFence) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleReleaseFence) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleLoadLoadFence) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleStoreStoreFence) +UNIMPLEMENTED_INTRINSIC(X86, MethodHandleInvokeExact) +UNIMPLEMENTED_INTRINSIC(X86, MethodHandleInvoke) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchange) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchangeAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndExchangeRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleCompareAndSet) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGet) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAdd) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAddAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndAddRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseAnd) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseAndAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseAndRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseOr) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseOrAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseOrRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseXor) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseXorAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndBitwiseXorRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndSet) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetAndSetRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetOpaque) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetVolatile) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleSet) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetOpaque) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetRelease) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetVolatile) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSet) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetPlain) +UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetRelease) + UNREACHABLE_INTRINSICS(X86) #undef __ diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 7dc43876ea..eaa3abe380 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -2761,6 +2761,45 @@ UNIMPLEMENTED_INTRINSIC(X86_64, UnsafeGetAndSetInt) UNIMPLEMENTED_INTRINSIC(X86_64, UnsafeGetAndSetLong) UNIMPLEMENTED_INTRINSIC(X86_64, UnsafeGetAndSetObject) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleFullFence) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleAcquireFence) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleReleaseFence) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleLoadLoadFence) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleStoreStoreFence) +UNIMPLEMENTED_INTRINSIC(X86_64, MethodHandleInvokeExact) +UNIMPLEMENTED_INTRINSIC(X86_64, MethodHandleInvoke) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleCompareAndExchange) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleCompareAndExchangeAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleCompareAndExchangeRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleCompareAndSet) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGet) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndAdd) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndAddAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndAddRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseAnd) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseAndAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseAndRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseOr) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseOrAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseOrRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseXor) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseXorAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndBitwiseXorRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndSet) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetAndSetRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetOpaque) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleGetVolatile) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleSet) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleSetOpaque) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleSetRelease) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleSetVolatile) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleWeakCompareAndSet) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleWeakCompareAndSetAcquire) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleWeakCompareAndSetPlain) +UNIMPLEMENTED_INTRINSIC(X86_64, VarHandleWeakCompareAndSetRelease) + UNREACHABLE_INTRINSICS(X86_64) #undef __ diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index 64e62fd01e..22e165767d 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -3159,11 +3159,7 @@ static inline IntrinsicExceptions GetExceptionsIntrinsic(Intrinsics i) { } void HInvoke::SetResolvedMethod(ArtMethod* method) { - // TODO: b/65872996 The intent is that polymorphic signature methods should - // be compiler intrinsics. At present, they are only interpreter intrinsics. - if (method != nullptr && - method->IsIntrinsic() && - !method->IsPolymorphicSignature()) { + if (method != nullptr && method->IsIntrinsic()) { Intrinsics intrinsic = static_cast<Intrinsics>(method->GetIntrinsic()); SetIntrinsic(intrinsic, NeedsEnvironmentOrCacheIntrinsic(intrinsic), diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 214c7baad0..e6e60b7837 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -4507,7 +4507,8 @@ class HInvokePolymorphic final : public HInvoke { uint32_t number_of_arguments, DataType::Type return_type, uint32_t dex_pc, - uint32_t dex_method_index) + uint32_t dex_method_index, + ArtMethod* resolved_method) : HInvoke(kInvokePolymorphic, allocator, number_of_arguments, @@ -4515,7 +4516,7 @@ class HInvokePolymorphic final : public HInvoke { return_type, dex_pc, dex_method_index, - nullptr, + resolved_method, kVirtual) { } |