diff options
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 28 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm_vixl.cc | 25 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_mips.cc | 20 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_mips64.cc | 20 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 21 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 19 | ||||
-rw-r--r-- | compiler/optimizing/scheduler_arm.cc | 2 | ||||
-rw-r--r-- | runtime/art_method-inl.h | 40 |
8 files changed, 115 insertions, 60 deletions
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 00bf2f1c51..3446dd63be 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -164,6 +164,16 @@ Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return return ARM64ReturnLocation(return_type); } +static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() { + InvokeRuntimeCallingConvention calling_convention; + RegisterSet caller_saves = RegisterSet::Empty(); + caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode())); + DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), + RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference), + DataType::Type::kReference).GetCode()); + return caller_saves; +} + // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value() @@ -3178,6 +3188,8 @@ void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) { if (check->HasUses()) { locations->SetOut(Location::SameAsFirstInput()); } + // Rely on the type initialization to save everything we need. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) { @@ -5053,13 +5065,7 @@ void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) { if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the type resolution or initialization and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode())); - DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), - RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference), - DataType::Type::kReference).GetCode()); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } @@ -5257,13 +5263,7 @@ void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the pResolveString and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode())); - DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), - RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference), - DataType::Type::kReference).GetCode()); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc index d1b5bcb66e..4e70d8b6c6 100644 --- a/compiler/optimizing/code_generator_arm_vixl.cc +++ b/compiler/optimizing/code_generator_arm_vixl.cc @@ -150,6 +150,15 @@ class EmitAdrCode { int32_t adr_location_; }; +static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() { + InvokeRuntimeCallingConventionARMVIXL calling_convention; + RegisterSet caller_saves = RegisterSet::Empty(); + caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0))); + // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK() + // that the the kPrimNot result register is the same as the first argument register. + return caller_saves; +} + // SaveLiveRegisters and RestoreLiveRegisters from SlowPathCodeARM operate on sets of S registers, // for each live D registers they treat two corresponding S registers as live ones. // @@ -7416,12 +7425,7 @@ void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) { if (load_kind == HLoadClass::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the type resolution or initialization and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConventionARMVIXL calling_convention; - caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0))); - // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK() - // that the the kPrimNot result register is the same as the first argument register. - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } @@ -7549,6 +7553,8 @@ void LocationsBuilderARMVIXL::VisitClinitCheck(HClinitCheck* check) { if (check->HasUses()) { locations->SetOut(Location::SameAsFirstInput()); } + // Rely on the type initialization to save everything we need. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } void InstructionCodeGeneratorARMVIXL::VisitClinitCheck(HClinitCheck* check) { @@ -7668,12 +7674,7 @@ void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) { if (load_kind == HLoadString::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the pResolveString and marking to save everything we need, including temps. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConventionARMVIXL calling_convention; - caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0))); - // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK() - // that the the kPrimNot result register is the same as the first argument register. - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 60bbf4c9f0..8b7479a861 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -160,6 +160,14 @@ Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) return MipsReturnLocation(type); } +static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() { + InvokeRuntimeCallingConvention calling_convention; + RegisterSet caller_saves = RegisterSet::Empty(); + caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + // The reference is returned in the same register. This differs from the standard return location. + return caller_saves; +} + // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. #define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value() @@ -3594,6 +3602,8 @@ void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) { if (check->HasUses()) { locations->SetOut(Location::SameAsFirstInput()); } + // Rely on the type initialization to save everything we need. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) { @@ -8134,10 +8144,7 @@ void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) { if (load_kind == HLoadClass::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the type resolution or initialization and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barriers we have a temp-clobbering call. } @@ -8368,10 +8375,7 @@ void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) { if (load_kind == HLoadString::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the pResolveString and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barriers we have a temp-clobbering call. } diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index 81d86a9a3f..1d93fc809f 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -112,6 +112,14 @@ Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) return Mips64ReturnLocation(type); } +static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() { + InvokeRuntimeCallingConvention calling_convention; + RegisterSet caller_saves = RegisterSet::Empty(); + caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + // The reference is returned in the same register. This differs from the standard return location. + return caller_saves; +} + // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value() @@ -3149,6 +3157,8 @@ void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) { if (check->HasUses()) { locations->SetOut(Location::SameAsFirstInput()); } + // Rely on the type initialization to save everything we need. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) { @@ -6206,10 +6216,7 @@ void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) { if (load_kind == HLoadClass::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the type resolution or initialization and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barriers we have a temp-clobbering call. } @@ -6381,10 +6388,7 @@ void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) { if (load_kind == HLoadString::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the pResolveString and marking to save everything we need. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barriers we have a temp-clobbering call. } diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 83ce734797..46bd479c2c 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -55,6 +55,15 @@ static constexpr int kFakeReturnRegister = Register(8); static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000); static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000); +static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() { + InvokeRuntimeCallingConvention calling_convention; + RegisterSet caller_saves = RegisterSet::Empty(); + caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); + // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK() + // that the the kPrimNot result register is the same as the first argument register. + return caller_saves; +} + // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value() @@ -6489,10 +6498,7 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) { if (load_kind == HLoadClass::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the type resolution and/or initialization to save everything. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } @@ -6631,6 +6637,8 @@ void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) { if (check->HasUses()) { locations->SetOut(Location::SameAsFirstInput()); } + // Rely on the type initialization to save everything we need. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) { @@ -6710,10 +6718,7 @@ void LocationsBuilderX86::VisitLoadString(HLoadString* load) { if (load_kind == HLoadString::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the pResolveString to save everything. - RegisterSet caller_saves = RegisterSet::Empty(); - InvokeRuntimeCallingConvention calling_convention; - caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index aabf2e0be4..7e3fdedce0 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -56,6 +56,13 @@ static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 static constexpr int kC2ConditionMask = 0x400; +static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() { + // Custom calling convention: RAX serves as both input and output. + RegisterSet caller_saves = RegisterSet::Empty(); + caller_saves.Add(Location::RegisterLocation(RAX)); + return caller_saves; +} + // NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy. #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value() @@ -5832,10 +5839,7 @@ void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) { if (load_kind == HLoadClass::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the type resolution and/or initialization to save everything. - // Custom calling convention: RAX serves as both input and output. - RegisterSet caller_saves = RegisterSet::Empty(); - caller_saves.Add(Location::RegisterLocation(RAX)); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } @@ -5950,6 +5954,8 @@ void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) { if (check->HasUses()) { locations->SetOut(Location::SameAsFirstInput()); } + // Rely on the type initialization to save everything we need. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) { @@ -6009,10 +6015,7 @@ void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) { if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) { if (!kUseReadBarrier || kUseBakerReadBarrier) { // Rely on the pResolveString to save everything. - // Custom calling convention: RAX serves as both input and output. - RegisterSet caller_saves = RegisterSet::Empty(); - caller_saves.Add(Location::RegisterLocation(RAX)); - locations->SetCustomSlowPathCallerSaves(caller_saves); + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); } else { // For non-Baker read barrier we have a temp-clobbering call. } diff --git a/compiler/optimizing/scheduler_arm.cc b/compiler/optimizing/scheduler_arm.cc index 8dcadaad2e..d89d1171a1 100644 --- a/compiler/optimizing/scheduler_arm.cc +++ b/compiler/optimizing/scheduler_arm.cc @@ -679,7 +679,7 @@ void SchedulingLatencyVisitorARM::VisitArrayGet(HArrayGet* instruction) { } else { last_visited_internal_latency_ += kArmIntegerOpLatency; } - last_visited_internal_latency_ = kArmMemoryLoadLatency; + last_visited_latency_ = kArmMemoryLoadLatency; } } break; diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h index ac22f07a34..f693524a6c 100644 --- a/runtime/art_method-inl.h +++ b/runtime/art_method-inl.h @@ -374,12 +374,50 @@ inline HiddenApiAccessFlags::ApiList ArtMethod::GetHiddenApiAccessFlags() case Intrinsics::kSystemArrayCopyChar: case Intrinsics::kStringGetCharsNoCheck: case Intrinsics::kReferenceGetReferent: + case Intrinsics::kMemoryPeekByte: + case Intrinsics::kMemoryPokeByte: + case Intrinsics::kUnsafeCASInt: + case Intrinsics::kUnsafeCASLong: + case Intrinsics::kUnsafeCASObject: + case Intrinsics::kUnsafeGet: + case Intrinsics::kUnsafeGetAndAddInt: + case Intrinsics::kUnsafeGetAndAddLong: + case Intrinsics::kUnsafeGetAndSetInt: + case Intrinsics::kUnsafeGetAndSetLong: + case Intrinsics::kUnsafeGetAndSetObject: + case Intrinsics::kUnsafeGetLong: + case Intrinsics::kUnsafeGetLongVolatile: + case Intrinsics::kUnsafeGetObject: + case Intrinsics::kUnsafeGetObjectVolatile: + case Intrinsics::kUnsafeGetVolatile: + case Intrinsics::kUnsafePut: + case Intrinsics::kUnsafePutLong: + case Intrinsics::kUnsafePutLongOrdered: + case Intrinsics::kUnsafePutLongVolatile: + case Intrinsics::kUnsafePutObject: + case Intrinsics::kUnsafePutObjectOrdered: + case Intrinsics::kUnsafePutObjectVolatile: + case Intrinsics::kUnsafePutOrdered: + case Intrinsics::kUnsafePutVolatile: + case Intrinsics::kUnsafeLoadFence: + case Intrinsics::kUnsafeStoreFence: + case Intrinsics::kUnsafeFullFence: // These intrinsics are on the light greylist and will fail a DCHECK in // SetIntrinsic() if their flags change on the respective dex methods. // Note that the DCHECK currently won't fail if the dex methods are // whitelisted, e.g. in the core image (b/77733081). As a result, we // might print warnings but we won't change the semantics. return HiddenApiAccessFlags::kLightGreylist; + case Intrinsics::kStringNewStringFromBytes: + case Intrinsics::kStringNewStringFromChars: + case Intrinsics::kStringNewStringFromString: + case Intrinsics::kMemoryPeekIntNative: + case Intrinsics::kMemoryPeekLongNative: + case Intrinsics::kMemoryPeekShortNative: + case Intrinsics::kMemoryPokeIntNative: + case Intrinsics::kMemoryPokeLongNative: + case Intrinsics::kMemoryPokeShortNative: + return HiddenApiAccessFlags::kDarkGreylist; case Intrinsics::kVarHandleFullFence: case Intrinsics::kVarHandleAcquireFence: case Intrinsics::kVarHandleReleaseFence: @@ -475,7 +513,7 @@ inline void ArtMethod::SetIntrinsic(uint32_t intrinsic) { // (b) only VarHandle intrinsics are blacklisted at the moment and they // should not be used outside tests with disabled API checks. if (hidden_api_flags != HiddenApiAccessFlags::kWhitelist) { - DCHECK_EQ(hidden_api_flags, GetHiddenApiAccessFlags()); + DCHECK_EQ(hidden_api_flags, GetHiddenApiAccessFlags()) << PrettyMethod(); } } else { SetAccessFlags(new_value); |