diff options
author | 2021-02-01 09:31:02 +0000 | |
---|---|---|
committer | 2021-02-05 10:48:17 +0000 | |
commit | ac27ac01490f53f9e2413dc9b66fbb2880904c96 (patch) | |
tree | 271018e1ef33667bee8d57c40ffa3f4d9f8cf930 | |
parent | 26bf47a60064fcc42e1b5e7b4b41deb8312d7330 (diff) |
Implement Reference.refersTo() intrinsic.
Test: Added tests to 122-npe and 160-read-barrier-stress
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: run-gtests.sh
Test: testrunner.py --target --optimizing
Bug: 172573708
Change-Id: I8342510565289058df218d3249ffac1eb993ca4f
-rw-r--r-- | compiler/optimizing/intrinsics.cc | 14 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.h | 1 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 55 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm_vixl.cc | 62 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 60 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 54 | ||||
-rw-r--r-- | compiler/optimizing/nodes.cc | 13 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 5 | ||||
-rw-r--r-- | runtime/hidden_api.h | 1 | ||||
-rw-r--r-- | runtime/image.cc | 4 | ||||
-rw-r--r-- | runtime/interpreter/interpreter_intrinsics.cc | 1 | ||||
-rw-r--r-- | runtime/intrinsics_list.h | 1 | ||||
-rw-r--r-- | test/122-npe/src/Main.java | 16 | ||||
-rw-r--r-- | test/160-read-barrier-stress/src/Main.java | 29 | ||||
-rw-r--r-- | test/knownfailures.json | 5 |
15 files changed, 312 insertions, 9 deletions
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc index 10d0b8992c..16c3029770 100644 --- a/compiler/optimizing/intrinsics.cc +++ b/compiler/optimizing/intrinsics.cc @@ -392,6 +392,20 @@ void IntrinsicVisitor::CreateReferenceGetReferentLocations(HInvoke* invoke, locations->SetOut(Location::RequiresRegister()); } +void IntrinsicVisitor::CreateReferenceRefersToLocations(HInvoke* invoke) { + if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) { + // Unimplemented for non-Baker read barrier. + return; + } + + ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); + LocationSummary* locations = + new (allocator) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified); + locations->SetInAt(0, Location::RequiresRegister()); + locations->SetInAt(1, Location::RequiresRegister()); + locations->SetOut(Location::RequiresRegister()); +} + void IntrinsicVisitor::AssertNonMovableStringClass() { if (kIsDebugBuild) { ScopedObjectAccess soa(Thread::Current()); diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h index 48a103530e..62b5faa7d0 100644 --- a/compiler/optimizing/intrinsics.h +++ b/compiler/optimizing/intrinsics.h @@ -139,6 +139,7 @@ class IntrinsicVisitor : public ValueObject { static MemberOffset GetReferenceDisableIntrinsicOffset(); static MemberOffset GetReferenceSlowPathEnabledOffset(); static void CreateReferenceGetReferentLocations(HInvoke* invoke, CodeGenerator* codegen); + static void CreateReferenceRefersToLocations(HInvoke* invoke); protected: IntrinsicVisitor() {} diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 252865f230..6774a2600f 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -3292,6 +3292,61 @@ void IntrinsicCodeGeneratorARM64::VisitReferenceGetReferent(HInvoke* invoke) { __ Bind(slow_path->GetExitLabel()); } +void IntrinsicLocationsBuilderARM64::VisitReferenceRefersTo(HInvoke* invoke) { + IntrinsicVisitor::CreateReferenceRefersToLocations(invoke); +} + +void IntrinsicCodeGeneratorARM64::VisitReferenceRefersTo(HInvoke* invoke) { + LocationSummary* locations = invoke->GetLocations(); + MacroAssembler* masm = codegen_->GetVIXLAssembler(); + UseScratchRegisterScope temps(masm); + + Register obj = WRegisterFrom(locations->InAt(0)); + Register other = WRegisterFrom(locations->InAt(1)); + Register out = WRegisterFrom(locations->Out()); + Register tmp = temps.AcquireW(); + + uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); + uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); + + MemOperand field = HeapOperand(obj, referent_offset); + codegen_->LoadAcquire(invoke, DataType::Type::kReference, tmp, field, /*needs_null_check=*/ true); + + __ Cmp(tmp, other); + + if (kEmitCompilerReadBarrier) { + DCHECK(kUseBakerReadBarrier); + + vixl::aarch64::Label calculate_result; + + // If the GC is not marking, the comparison result is final. + __ Cbz(mr, &calculate_result); + + __ B(&calculate_result, eq); // ZF set if taken. + + // Check if the loaded reference is null. + __ Cbz(tmp, &calculate_result); // ZF clear if taken. + + // For correct memory visibility, we need a barrier before loading the lock word. + codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); + + // Load the lockword and check if it is a forwarding address. + static_assert(LockWord::kStateShift == 30u); + static_assert(LockWord::kStateForwardingAddress == 3u); + __ Ldr(tmp, HeapOperand(tmp, monitor_offset)); + __ Cmp(tmp, Operand(0xc0000000)); + __ B(&calculate_result, lo); // ZF clear if taken. + + // Extract the forwarding address and compare with `other`. + __ Cmp(other, Operand(tmp, LSL, LockWord::kForwardingAddressShift)); + + __ Bind(&calculate_result); + } + + // Convert ZF into the Boolean result. + __ Cset(out, eq); +} + void IntrinsicLocationsBuilderARM64::VisitThreadInterrupted(HInvoke* invoke) { LocationSummary* locations = new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index 3766f35498..b652234289 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -2559,6 +2559,68 @@ void IntrinsicCodeGeneratorARMVIXL::VisitReferenceGetReferent(HInvoke* invoke) { __ Bind(slow_path->GetExitLabel()); } +void IntrinsicLocationsBuilderARMVIXL::VisitReferenceRefersTo(HInvoke* invoke) { + IntrinsicVisitor::CreateReferenceRefersToLocations(invoke); +} + +void IntrinsicCodeGeneratorARMVIXL::VisitReferenceRefersTo(HInvoke* invoke) { + LocationSummary* locations = invoke->GetLocations(); + ArmVIXLAssembler* assembler = GetAssembler(); + UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); + + vixl32::Register obj = RegisterFrom(locations->InAt(0)); + vixl32::Register other = RegisterFrom(locations->InAt(1)); + vixl32::Register out = RegisterFrom(locations->Out()); + vixl32::Register tmp = temps.Acquire(); + + uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); + uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); + + { + // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted. + // Loading scratch register always uses 32-bit encoding. + vixl::ExactAssemblyScope eas(assembler->GetVIXLAssembler(), + vixl32::k32BitT32InstructionSizeInBytes); + __ ldr(tmp, MemOperand(obj, referent_offset)); + codegen_->MaybeRecordImplicitNullCheck(invoke); + } + codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. + + if (kEmitCompilerReadBarrier) { + DCHECK(kUseBakerReadBarrier); + + vixl32::Label calculate_result; + __ Subs(out, tmp, other); + __ B(eq, &calculate_result); // `out` is 0 if taken. + + // Check if the loaded reference is null. + __ Cmp(tmp, 0); + __ B(eq, &calculate_result); // `out` is not 0 if taken. + + // For correct memory visibility, we need a barrier before loading the lock word + // but we already have the barrier emitted for volatile load above which is sufficient. + + // Load the lockword and check if it is a forwarding address. + static_assert(LockWord::kStateShift == 30u); + static_assert(LockWord::kStateForwardingAddress == 3u); + __ Ldr(tmp, MemOperand(tmp, monitor_offset)); + __ Cmp(tmp, Operand(0xc0000000)); + __ B(lo, &calculate_result); // `out` is not 0 if taken. + + // Extract the forwarding address and subtract from `other`. + __ Sub(out, other, Operand(tmp, LSL, LockWord::kForwardingAddressShift)); + + __ Bind(&calculate_result); + } else { + DCHECK(!kEmitCompilerReadBarrier); + __ Sub(out, tmp, other); + } + + // Convert 0 to 1 and non-zero to 0 for the Boolean result (`out = (out == 0)`). + __ Clz(out, out); + __ Lsr(out, out, WhichPowerOf2(out.GetSizeInBits())); +} + void IntrinsicLocationsBuilderARMVIXL::VisitThreadInterrupted(HInvoke* invoke) { LocationSummary* locations = new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 83a2e39e5f..cda3500d7f 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -3167,6 +3167,66 @@ void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) { __ Bind(slow_path->GetExitLabel()); } +void IntrinsicLocationsBuilderX86::VisitReferenceRefersTo(HInvoke* invoke) { + IntrinsicVisitor::CreateReferenceRefersToLocations(invoke); +} + +void IntrinsicCodeGeneratorX86::VisitReferenceRefersTo(HInvoke* invoke) { + X86Assembler* assembler = GetAssembler(); + LocationSummary* locations = invoke->GetLocations(); + + Register obj = locations->InAt(0).AsRegister<Register>(); + Register other = locations->InAt(1).AsRegister<Register>(); + Register out = locations->Out().AsRegister<Register>(); + + uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); + uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); + + __ movl(out, Address(obj, referent_offset)); + codegen_->MaybeRecordImplicitNullCheck(invoke); + // Note that the fence is a no-op, thanks to the x86 memory model. + codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. + + NearLabel end, return_true, return_false; + __ cmpl(out, other); + + if (kEmitCompilerReadBarrier) { + DCHECK(kUseBakerReadBarrier); + + __ j(kEqual, &return_true); + + // Check if the loaded reference is null. + __ testl(out, out); + __ j(kZero, &return_false); + + // For correct memory visibility, we need a barrier before loading the lock word + // but we already have the barrier emitted for volatile load above which is sufficient. + + // Load the lockword and check if it is a forwarding address. + static_assert(LockWord::kStateShift == 30u); + static_assert(LockWord::kStateForwardingAddress == 3u); + __ movl(out, Address(out, monitor_offset)); + __ cmpl(out, Immediate(static_cast<int32_t>(0xc0000000))); + __ j(kBelow, &return_false); + + // Extract the forwarding address and compare with `other`. + __ shll(out, Immediate(LockWord::kForwardingAddressShift)); + __ cmpl(out, other); + } + + __ j(kNotEqual, &return_false); + + // Return true and exit the function. + __ Bind(&return_true); + __ movl(out, Immediate(1)); + __ jmp(&end); + + // Return false and exit the function. + __ Bind(&return_false); + __ xorl(out, out); + __ Bind(&end); +} + void IntrinsicLocationsBuilderX86::VisitThreadInterrupted(HInvoke* invoke) { LocationSummary* locations = new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 73010afc69..5f05c4333d 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -2712,6 +2712,60 @@ void IntrinsicCodeGeneratorX86_64::VisitReferenceGetReferent(HInvoke* invoke) { __ Bind(slow_path->GetExitLabel()); } +void IntrinsicLocationsBuilderX86_64::VisitReferenceRefersTo(HInvoke* invoke) { + IntrinsicVisitor::CreateReferenceRefersToLocations(invoke); +} + +void IntrinsicCodeGeneratorX86_64::VisitReferenceRefersTo(HInvoke* invoke) { + X86_64Assembler* assembler = GetAssembler(); + LocationSummary* locations = invoke->GetLocations(); + + CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>(); + CpuRegister other = locations->InAt(1).AsRegister<CpuRegister>(); + CpuRegister out = locations->Out().AsRegister<CpuRegister>(); + + uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); + uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value(); + + __ movl(out, Address(obj, referent_offset)); + codegen_->MaybeRecordImplicitNullCheck(invoke); + // Note that the fence is a no-op, thanks to the x86-64 memory model. + codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. + + __ cmpl(out, other); + + if (kEmitCompilerReadBarrier) { + DCHECK(kUseBakerReadBarrier); + + NearLabel calculate_result; + __ j(kEqual, &calculate_result); // ZF set if taken. + + // Check if the loaded reference is null in a way that leaves ZF clear for null. + __ cmpl(out, Immediate(1)); + __ j(kBelow, &calculate_result); // ZF clear if taken. + + // For correct memory visibility, we need a barrier before loading the lock word + // but we already have the barrier emitted for volatile load above which is sufficient. + + // Load the lockword and check if it is a forwarding address. + static_assert(LockWord::kStateShift == 30u); + static_assert(LockWord::kStateForwardingAddress == 3u); + __ movl(out, Address(out, monitor_offset)); + __ cmpl(out, Immediate(static_cast<int32_t>(0xc0000000))); + __ j(kBelow, &calculate_result); // ZF clear if taken. + + // Extract the forwarding address and compare with `other`. + __ shll(out, Immediate(LockWord::kForwardingAddressShift)); + __ cmpl(out, other); + + __ Bind(&calculate_result); + } + + // Convert ZF into the Boolean result. + __ setcc(kEqual, out); + __ movzxb(out, out); +} + void IntrinsicLocationsBuilderX86_64::VisitThreadInterrupted(HInvoke* invoke) { LocationSummary* locations = new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index e815474b6e..4bb7ffc792 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -3199,6 +3199,19 @@ std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckReq } } +bool HInvokeVirtual::CanDoImplicitNullCheckOn(HInstruction* obj) const { + if (obj != InputAt(0)) { + return false; + } + switch (GetIntrinsic()) { + case Intrinsics::kReferenceRefersTo: + return true; + default: + // TODO: Add implicit null checks in more intrinsics. + return false; + } +} + bool HLoadClass::InstructionDataEquals(const HInstruction* other) const { const HLoadClass* other_load_class = other->AsLoadClass(); // TODO: To allow GVN for HLoadClass from different dex files, we should compare the type diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 6381b2c356..c47ca3bbb5 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -5112,10 +5112,7 @@ class HInvokeVirtual final : public HInvoke { } } - bool CanDoImplicitNullCheckOn(HInstruction* obj) const override { - // TODO: Add implicit null checks in intrinsics. - return (obj == InputAt(0)) && !IsIntrinsic(); - } + bool CanDoImplicitNullCheckOn(HInstruction* obj) const override; uint32_t GetVTableIndex() const { return vtable_index_; } diff --git a/runtime/hidden_api.h b/runtime/hidden_api.h index 568906ce73..16c2fe8cf6 100644 --- a/runtime/hidden_api.h +++ b/runtime/hidden_api.h @@ -284,6 +284,7 @@ ALWAYS_INLINE inline uint32_t GetRuntimeFlags(ArtMethod* method) case Intrinsics::kSystemArrayCopyChar: case Intrinsics::kStringGetCharsNoCheck: case Intrinsics::kReferenceGetReferent: + case Intrinsics::kReferenceRefersTo: case Intrinsics::kMemoryPeekByte: case Intrinsics::kMemoryPokeByte: case Intrinsics::kCRC32Update: diff --git a/runtime/image.cc b/runtime/image.cc index 57a2972dac..6c7be3c24d 100644 --- a/runtime/image.cc +++ b/runtime/image.cc @@ -29,8 +29,8 @@ namespace art { const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' }; -// Last change: Fix clinit with nterp. -const uint8_t ImageHeader::kImageVersion[] = { '0', '9', '3', '\0' }; +// Last change: Reference.refersTo() intrinsic. +const uint8_t ImageHeader::kImageVersion[] = { '0', '9', '4', '\0' }; ImageHeader::ImageHeader(uint32_t image_reservation_size, uint32_t component_count, diff --git a/runtime/interpreter/interpreter_intrinsics.cc b/runtime/interpreter/interpreter_intrinsics.cc index c174ede962..369615c9ee 100644 --- a/runtime/interpreter/interpreter_intrinsics.cc +++ b/runtime/interpreter/interpreter_intrinsics.cc @@ -570,6 +570,7 @@ bool MterpHandleIntrinsic(ShadowFrame* shadow_frame, UNIMPLEMENTED_CASE(UnsafeStoreFence /* ()V */) UNIMPLEMENTED_CASE(UnsafeFullFence /* ()V */) UNIMPLEMENTED_CASE(ReferenceGetReferent /* ()Ljava/lang/Object; */) + UNIMPLEMENTED_CASE(ReferenceRefersTo /* (Ljava/lang/Object;)Z */) UNIMPLEMENTED_CASE(IntegerValueOf /* (I)Ljava/lang/Integer; */) UNIMPLEMENTED_CASE(ThreadInterrupted /* ()Z */) UNIMPLEMENTED_CASE(CRC32Update /* (II)I */) diff --git a/runtime/intrinsics_list.h b/runtime/intrinsics_list.h index 44463b1cb3..86d35cbc0a 100644 --- a/runtime/intrinsics_list.h +++ b/runtime/intrinsics_list.h @@ -231,6 +231,7 @@ V(UnsafeStoreFence, kVirtual, kNeedsEnvironment, kAllSideEffects, kCanThrow, "Lsun/misc/Unsafe;", "storeFence", "()V") \ V(UnsafeFullFence, kVirtual, kNeedsEnvironment, kAllSideEffects, kCanThrow, "Lsun/misc/Unsafe;", "fullFence", "()V") \ V(ReferenceGetReferent, kDirect, kNeedsEnvironment, kAllSideEffects, kCanThrow, "Ljava/lang/ref/Reference;", "getReferent", "()Ljava/lang/Object;") \ + V(ReferenceRefersTo, kVirtual, kNeedsEnvironment, kAllSideEffects, kCanThrow, "Ljava/lang/ref/Reference;", "refersTo", "(Ljava/lang/Object;)Z") \ V(IntegerValueOf, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Integer;", "valueOf", "(I)Ljava/lang/Integer;") \ V(ThreadInterrupted, kStatic, kNeedsEnvironment, kAllSideEffects, kNoThrow, "Ljava/lang/Thread;", "interrupted", "()Z") \ V(VarHandleFullFence, kStatic, kNeedsEnvironment, kWriteSideEffects, kNoThrow, "Ljava/lang/invoke/VarHandle;", "fullFence", "()V") \ diff --git a/test/122-npe/src/Main.java b/test/122-npe/src/Main.java index 8f6820573f..9cd163f64f 100644 --- a/test/122-npe/src/Main.java +++ b/test/122-npe/src/Main.java @@ -14,6 +14,8 @@ * limitations under the License. */ +import java.lang.ref.WeakReference; + /** * Test that null pointer exceptions are thrown by the VM. */ @@ -38,7 +40,7 @@ public class Main { static void methodTwo() { NullPointerException npe = null; - int thisLine = 41; + int thisLine = 43; new Object().getClass(); // Ensure compiled. try { @@ -547,6 +549,14 @@ public class Main { npe = e; } check(npe, thisLine += 14); + + npe = null; + try { + useInt(((WeakReference<Object>) null).refersTo(null) ? 1 : 0); + } catch (NullPointerException e) { + npe = e; + } + check(npe, thisLine += 8); } static void check(NullPointerException npe, int firstLine) { @@ -558,8 +568,8 @@ public class Main { } StackTraceElement[] trace = npe.getStackTrace(); checkElement(trace[0], "Main", "methodTwo", "Main.java", firstLine); - checkElement(trace[1], "Main", "methodOne", "Main.java", 27); - checkElement(trace[2], "Main", "main", "Main.java", 23); + checkElement(trace[1], "Main", "methodOne", "Main.java", 29); + checkElement(trace[2], "Main", "main", "Main.java", 25); } static void checkElement(StackTraceElement element, diff --git a/test/160-read-barrier-stress/src/Main.java b/test/160-read-barrier-stress/src/Main.java index 27b7af7fae..ab23358dff 100644 --- a/test/160-read-barrier-stress/src/Main.java +++ b/test/160-read-barrier-stress/src/Main.java @@ -16,6 +16,7 @@ import java.lang.invoke.MethodHandles; import java.lang.invoke.VarHandle; +import java.lang.ref.WeakReference; import java.lang.reflect.Field; import sun.misc.Unsafe; @@ -31,6 +32,7 @@ public class Main { testVarHandleCompareAndSet(); testVarHandleCompareAndExchange(); testVarHandleGetAndSet(); + testReferenceRefersTo(); } public static void testFieldReads() { @@ -402,6 +404,33 @@ public class Main { } } + public static void testReferenceRefersTo() throws Exception { + // Initialize local variables for comparison. + manyFields.testField0000 = new Object(); + manyFields.testField1024 = new Object(); + manyFields.testField4444 = new Object(); + manyFields.testField4999 = new Object(); + WeakReference<Object> f0000 = new WeakReference<Object>(manyFields.testField0000); + WeakReference<Object> f1024 = new WeakReference<Object>(manyFields.testField1024); + WeakReference<Object> f4444 = new WeakReference<Object>(manyFields.testField4444); + WeakReference<Object> f4999 = new WeakReference<Object>(manyFields.testField4999); + + // Continually check reads from `manyFields` while allocating + // over 64MiB memory (with heap size limited to 16MiB), ensuring we run GC and stress the + // read barrier implementation in Reference.refersTo() if concurrent collector is enabled. + for (int i = 0; i != 64 * 1024; ++i) { + allocateAtLeast1KiB(); + ManyFields mf = manyFields; // Load the volatile `manyFields` once on each iteration. + // Test Reference.refersTo() with reference field access. + assertEqual(true, f0000.refersTo(mf.testField0000)); + assertEqual(false, f0000.refersTo(mf.testField0001)); + assertEqual(true, f1024.refersTo(mf.testField1024)); + assertEqual(true, f4444.refersTo(mf.testField4444)); + assertEqual(false, f4999.refersTo(mf.testField4998)); + assertEqual(true, f4999.refersTo(mf.testField4999)); + } + } + public static int $noinline$foo() { return 42; } public static void assertDifferentObject(Object lhs, Object rhs) { diff --git a/test/knownfailures.json b/test/knownfailures.json index 507ebb8a93..5662327c78 100644 --- a/test/knownfailures.json +++ b/test/knownfailures.json @@ -1354,6 +1354,11 @@ "description": ["Failing on RI. Needs further investigating."] }, { + "tests": ["122-npe", "160-read-barrier-stress"], + "variant": "jvm", + "description": ["Reference.refersTo() not supported on old version of RI."] + }, + { "tests": ["2232-write-metrics-to-log"], "variant": "jvm", "description": ["RI does not support ART metrics."] |