Implement heap poisoning in ART's Optimizing compiler.
- Instrument ARM, ARM64, x86 and x86-64 code generators.
- Note: To turn heap poisoning on in Optimizing, set the
environment variable `ART_HEAP_POISONING' to "true"
before compiling ART.
Bug: 12687968
Change-Id: Ib3120b38cf805a8a50207a314b9ccc90c8d93740
diff --git a/compiler/common_compiler_test.h b/compiler/common_compiler_test.h
index b828fcf..d215662 100644
--- a/compiler/common_compiler_test.h
+++ b/compiler/common_compiler_test.h
@@ -108,6 +108,13 @@
std::list<std::vector<uint8_t>> header_code_and_maps_chunks_;
};
+// TODO: When heap reference poisoning works with all compilers in use, get rid of this.
+#define TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK() \
+ if (kPoisonHeapReferences && GetCompilerKind() == Compiler::kQuick) { \
+ printf("WARNING: TEST DISABLED FOR HEAP REFERENCE POISONING WITH QUICK\n"); \
+ return; \
+ }
+
// TODO: When non-PIC works with all compilers in use, get rid of this.
#define TEST_DISABLED_FOR_NON_PIC_COMPILING_WITH_OPTIMIZING() \
if (GetCompilerKind() == Compiler::kOptimizing) { \
diff --git a/compiler/dex/quick/quick_compiler.cc b/compiler/dex/quick/quick_compiler.cc
index 28c485a..39496a4 100644
--- a/compiler/dex/quick/quick_compiler.cc
+++ b/compiler/dex/quick/quick_compiler.cc
@@ -653,6 +653,12 @@
uint32_t method_idx,
jobject class_loader,
const DexFile& dex_file) const {
+ if (kPoisonHeapReferences) {
+ VLOG(compiler) << "Skipping method : " << PrettyMethod(method_idx, dex_file)
+ << " Reason = Quick does not support heap poisoning.";
+ return nullptr;
+ }
+
// TODO: check method fingerprint here to determine appropriate backend type. Until then, use
// build default.
CompilerDriver* driver = GetCompilerDriver();
diff --git a/compiler/driver/compiler_driver_test.cc b/compiler/driver/compiler_driver_test.cc
index ba03f5a..b358f4f 100644
--- a/compiler/driver/compiler_driver_test.cc
+++ b/compiler/driver/compiler_driver_test.cc
@@ -146,7 +146,7 @@
}
TEST_F(CompilerDriverTest, AbstractMethodErrorStub) {
- TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK();
jobject class_loader;
{
ScopedObjectAccess soa(Thread::Current());
@@ -192,6 +192,7 @@
};
TEST_F(CompilerDriverMethodsTest, Selection) {
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK();
Thread* self = Thread::Current();
jobject class_loader;
{
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 85fd696..953dfcb 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -138,7 +138,7 @@
FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset();
// Check handle scope offset is within frame
CHECK_LT(handle_scope_offset.Uint32Value(), frame_size);
- // Note this LoadRef() doesn't need heap poisoning since its from the ArtMethod.
+ // Note this LoadRef() doesn't need heap unpoisoning since it's from the ArtMethod.
// Note this LoadRef() does not include read barrier. It will be handled below.
__ LoadRef(main_jni_conv->InterproceduralScratchRegister(),
mr_conv->MethodRegister(), ArtMethod::DeclaringClassOffset(), false);
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 39c316f..e3683ef 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -332,8 +332,6 @@
};
#undef __
-
-#undef __
#define __ down_cast<ArmAssembler*>(GetAssembler())->
inline Condition ARMCondition(IfCondition cond) {
@@ -1383,6 +1381,7 @@
DCHECK(receiver.IsRegister());
__ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetMethodAt(method_offset);
uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kArmWordSize).Int32Value();
@@ -1422,6 +1421,7 @@
__ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
}
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetImtEntryAt(method_offset);
uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kArmWordSize).Int32Value();
@@ -2778,6 +2778,8 @@
void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
InvokeRuntimeCallingConvention calling_convention;
__ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
instruction->GetDexPc(),
@@ -2797,6 +2799,8 @@
void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
InvokeRuntimeCallingConvention calling_convention;
__ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
instruction->GetDexPc(),
@@ -3030,10 +3034,12 @@
bool generate_volatile = field_info.IsVolatile()
&& is_wide
&& !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
+ bool needs_write_barrier =
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
// Temporary registers for the write barrier.
// TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
- if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
- locations->AddTemp(Location::RequiresRegister());
+ if (needs_write_barrier) {
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
locations->AddTemp(Location::RequiresRegister());
} else if (generate_volatile) {
// Arm encoding have some additional constraints for ldrexd/strexd:
@@ -3066,6 +3072,8 @@
bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Primitive::Type field_type = field_info.GetFieldType();
uint32_t offset = field_info.GetFieldOffset().Uint32Value();
+ bool needs_write_barrier =
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
@@ -3086,7 +3094,18 @@
case Primitive::kPrimInt:
case Primitive::kPrimNot: {
- __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
+ if (kPoisonHeapReferences && needs_write_barrier) {
+ // Note that in the case where `value` is a null reference,
+ // we do not enter this block, as a null reference does not
+ // need poisoning.
+ DCHECK_EQ(field_type, Primitive::kPrimNot);
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ Mov(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ StoreToOffset(kStoreWord, temp, base, offset);
+ } else {
+ __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
+ }
break;
}
@@ -3265,6 +3284,10 @@
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(out.AsRegister<Register>());
+ }
}
void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
@@ -3352,8 +3375,9 @@
LocationSummary* locations = instruction->GetLocations();
Register obj = locations->InAt(0).AsRegister<Register>();
Location index = locations->InAt(1);
+ Primitive::Type type = instruction->GetType();
- switch (instruction->GetType()) {
+ switch (type) {
case Primitive::kPrimBoolean: {
uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Register out = locations->Out().AsRegister<Register>();
@@ -3470,10 +3494,15 @@
}
case Primitive::kPrimVoid:
- LOG(FATAL) << "Unreachable type " << instruction->GetType();
+ LOG(FATAL) << "Unreachable type " << type;
UNREACHABLE();
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (type == Primitive::kPrimNot) {
+ Register out = locations->Out().AsRegister<Register>();
+ __ MaybeUnpoisonHeapReference(out);
+ }
}
void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
@@ -3501,7 +3530,7 @@
if (needs_write_barrier) {
// Temporary registers for the write barrier.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -3552,14 +3581,25 @@
if (!needs_runtime_call) {
uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Register value = locations->InAt(2).AsRegister<Register>();
+ Register source = value;
+ if (kPoisonHeapReferences && needs_write_barrier) {
+ // Note that in the case where `value` is a null reference,
+ // we do not enter this block, as a null reference does not
+ // need poisoning.
+ DCHECK_EQ(value_type, Primitive::kPrimNot);
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ Mov(temp, value);
+ __ PoisonHeapReference(temp);
+ source = temp;
+ }
if (index.IsConstant()) {
size_t offset =
(index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
- __ StoreToOffset(kStoreWord, value, obj, offset);
+ __ StoreToOffset(kStoreWord, source, obj, offset);
} else {
DCHECK(index.IsRegister()) << index;
__ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
- __ StoreToOffset(kStoreWord, value, IP, data_offset);
+ __ StoreToOffset(kStoreWord, source, IP, data_offset);
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
if (needs_write_barrier) {
@@ -3570,6 +3610,8 @@
}
} else {
DCHECK_EQ(value_type, Primitive::kPrimNot);
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
instruction,
instruction->GetDexPc(),
@@ -3994,6 +4036,7 @@
current_method,
ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
__ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
+ __ MaybeUnpoisonHeapReference(out);
SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
@@ -4053,7 +4096,9 @@
__ LoadFromOffset(
kLoadWord, out, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
__ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
+ __ MaybeUnpoisonHeapReference(out);
__ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
+ __ MaybeUnpoisonHeapReference(out);
__ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -4111,6 +4156,7 @@
}
// Compare the class of `obj` with `cls`.
__ LoadFromOffset(kLoadWord, out, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(out);
__ cmp(out, ShifterOperand(cls));
if (instruction->IsClassFinal()) {
// Classes must be equal for the instanceof to succeed.
@@ -4164,7 +4210,10 @@
}
// Compare the class of `obj` with `cls`.
__ LoadFromOffset(kLoadWord, temp, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(temp);
__ cmp(temp, ShifterOperand(cls));
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ b(slow_path->GetEntryLabel(), NE);
__ Bind(slow_path->GetExitLabel());
}
@@ -4316,5 +4365,8 @@
LOG(FATAL) << "Unreachable";
}
+#undef __
+#undef QUICK_ENTRY_POINT
+
} // namespace arm
} // namespace art
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 9b7124d..a9a95d3 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1250,6 +1250,7 @@
void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
const FieldInfo& field_info) {
DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
+ Primitive::Type field_type = field_info.GetFieldType();
BlockPoolsScope block_pools(GetVIXLAssembler());
MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
@@ -1260,15 +1261,19 @@
// NB: LoadAcquire will record the pc info if needed.
codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
} else {
- codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
+ codegen_->Load(field_type, OutputCPURegister(instruction), field);
codegen_->MaybeRecordImplicitNullCheck(instruction);
// For IRIW sequential consistency kLoadAny is not sufficient.
GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
}
} else {
- codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
+ codegen_->Load(field_type, OutputCPURegister(instruction), field);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ GetAssembler()->MaybeUnpoisonHeapReference(OutputCPURegister(instruction).W());
+ }
}
void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
@@ -1290,23 +1295,38 @@
Register obj = InputRegisterAt(instruction, 0);
CPURegister value = InputCPURegisterAt(instruction, 1);
+ CPURegister source = value;
Offset offset = field_info.GetFieldOffset();
Primitive::Type field_type = field_info.GetFieldType();
bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
- if (field_info.IsVolatile()) {
- if (use_acquire_release) {
- codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- } else {
- GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
- codegen_->Store(field_type, value, HeapOperand(obj, offset));
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
+ {
+ // We use a block to end the scratch scope before the write barrier, thus
+ // freeing the temporary registers so they can be used in `MarkGCCard`.
+ UseScratchRegisterScope temps(GetVIXLAssembler());
+
+ if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp, value.W());
+ GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
}
- } else {
- codegen_->Store(field_type, value, HeapOperand(obj, offset));
- codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (field_info.IsVolatile()) {
+ if (use_acquire_release) {
+ codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ } else {
+ GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
+ codegen_->Store(field_type, source, HeapOperand(obj, offset));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
+ }
+ } else {
+ codegen_->Store(field_type, source, HeapOperand(obj, offset));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ }
}
if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
@@ -1464,6 +1484,10 @@
codegen_->Load(type, OutputCPURegister(instruction), source);
codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (type == Primitive::kPrimNot) {
+ GetAssembler()->MaybeUnpoisonHeapReference(OutputCPURegister(instruction).W());
+ }
}
void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
@@ -1506,12 +1530,15 @@
bool needs_runtime_call = locations->WillCall();
if (needs_runtime_call) {
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(
QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
} else {
Register obj = InputRegisterAt(instruction, 0);
CPURegister value = InputCPURegisterAt(instruction, 2);
+ CPURegister source = value;
Location index = locations->InAt(1);
size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
MemOperand destination = HeapOperand(obj);
@@ -1522,6 +1549,14 @@
// freeing the temporary registers so they can be used in `MarkGCCard`.
UseScratchRegisterScope temps(masm);
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp, value.W());
+ GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
+ }
+
if (index.IsConstant()) {
offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
destination = HeapOperand(obj, offset);
@@ -1532,7 +1567,7 @@
destination = HeapOperand(temp, offset);
}
- codegen_->Store(value_type, value, destination);
+ codegen_->Store(value_type, source, destination);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
@@ -1585,7 +1620,10 @@
}
// Compare the class of `obj` with `cls`.
__ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
+ GetAssembler()->MaybeUnpoisonHeapReference(obj_cls.W());
__ Cmp(obj_cls, cls);
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ B(ne, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -2152,6 +2190,7 @@
// Compare the class of `obj` with `cls`.
__ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
__ Cmp(out, cls);
if (instruction->IsClassFinal()) {
// Classes must be equal for the instanceof to succeed.
@@ -2225,6 +2264,7 @@
__ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
}
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
// temp = temp->GetImtEntryAt(method_offset);
__ Ldr(temp, MemOperand(temp, method_offset));
// lr = temp->GetEntryPoint();
@@ -2350,6 +2390,7 @@
DCHECK(receiver.IsRegister());
__ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
// temp = temp->GetMethodAt(method_offset);
__ Ldr(temp, MemOperand(temp, method_offset));
// lr = temp->GetEntryPoint();
@@ -2379,6 +2420,7 @@
DCHECK(cls->CanCallRuntime());
__ Ldr(out, MemOperand(current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
__ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
@@ -2428,7 +2470,9 @@
Register current_method = InputRegisterAt(load, 0);
__ Ldr(out, MemOperand(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
__ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
__ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
__ Cbz(out, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -2563,6 +2607,8 @@
Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
DCHECK(type_index.Is(w0));
__ Mov(type_index, instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(
GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
@@ -2586,6 +2632,8 @@
Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
DCHECK(type_index.Is(w0));
__ Mov(type_index, instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(
GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 931d751..262b234 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1315,9 +1315,11 @@
LocationSummary* locations = invoke->GetLocations();
Location receiver = locations->InAt(0);
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ // temp = object->GetClass();
DCHECK(receiver.IsRegister());
__ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetMethodAt(method_offset);
__ movl(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -1354,7 +1356,8 @@
} else {
__ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
}
- codegen_->MaybeRecordImplicitNullCheck(invoke);
+ codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetImtEntryAt(method_offset);
__ movl(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -3001,6 +3004,8 @@
void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
InvokeRuntimeCallingConvention calling_convention;
__ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ fs()->call(Address::Absolute(GetThreadOffset<kX86WordSize>(instruction->GetEntrypoint())));
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
@@ -3021,6 +3026,8 @@
InvokeRuntimeCallingConvention calling_convention;
__ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ fs()->call(Address::Absolute(GetThreadOffset<kX86WordSize>(instruction->GetEntrypoint())));
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
@@ -3397,6 +3404,10 @@
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(out.AsRegister<Register>());
+ }
}
void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
@@ -3420,9 +3431,9 @@
} else {
locations->SetInAt(1, Location::RequiresRegister());
}
- // Temporary registers for the write barrier.
if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
- locations->AddTemp(Location::RequiresRegister());
+ // Temporary registers for the write barrier.
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
// Ensure the card is in a byte register.
locations->AddTemp(Location::RegisterLocation(ECX));
} else if (is_volatile && (field_type == Primitive::kPrimLong)) {
@@ -3447,6 +3458,8 @@
bool is_volatile = field_info.IsVolatile();
Primitive::Type field_type = field_info.GetFieldType();
uint32_t offset = field_info.GetFieldOffset().Uint32Value();
+ bool needs_write_barrier =
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
@@ -3467,7 +3480,18 @@
case Primitive::kPrimInt:
case Primitive::kPrimNot: {
- __ movl(Address(base, offset), value.AsRegister<Register>());
+ if (kPoisonHeapReferences && needs_write_barrier) {
+ // Note that in the case where `value` is a null reference,
+ // we do not enter this block, as the reference does not
+ // need poisoning.
+ DCHECK_EQ(field_type, Primitive::kPrimNot);
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset), temp);
+ } else {
+ __ movl(Address(base, offset), value.AsRegister<Register>());
+ }
break;
}
@@ -3508,7 +3532,7 @@
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
- if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
+ if (needs_write_barrier) {
Register temp = locations->GetTemp(0).AsRegister<Register>();
Register card = locations->GetTemp(1).AsRegister<Register>();
codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
@@ -3737,6 +3761,11 @@
if (type != Primitive::kPrimLong) {
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
+
+ if (type == Primitive::kPrimNot) {
+ Register out = locations->Out().AsRegister<Register>();
+ __ MaybeUnpoisonHeapReference(out);
+ }
}
void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
@@ -3776,9 +3805,9 @@
} else {
locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
}
- // Temporary registers for the write barrier.
if (needs_write_barrier) {
- locations->AddTemp(Location::RequiresRegister());
+ // Temporary registers for the write barrier.
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
// Ensure the card is in a byte register.
locations->AddTemp(Location::RegisterLocation(ECX));
}
@@ -3852,21 +3881,43 @@
size_t offset =
(index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
if (value.IsRegister()) {
- __ movl(Address(obj, offset), value.AsRegister<Register>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, offset), temp);
+ } else {
+ __ movl(Address(obj, offset), value.AsRegister<Register>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
- __ movl(Address(obj, offset),
- Immediate(CodeGenerator::GetInt32ValueOf(value.GetConstant())));
+ int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
+ __ movl(Address(obj, offset), Immediate(v));
}
} else {
DCHECK(index.IsRegister()) << index;
if (value.IsRegister()) {
- __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
- value.AsRegister<Register>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset), temp);
+ } else {
+ __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
+ value.AsRegister<Register>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
- __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
- Immediate(CodeGenerator::GetInt32ValueOf(value.GetConstant())));
+ int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
+ __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset), Immediate(v));
}
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
@@ -3880,6 +3931,8 @@
} else {
DCHECK_EQ(value_type, Primitive::kPrimNot);
DCHECK(!codegen_->IsLeafMethod());
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
__ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
}
@@ -4343,6 +4396,7 @@
__ movl(out, Address(
current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
+ __ MaybeUnpoisonHeapReference(out);
SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
@@ -4400,7 +4454,9 @@
Register current_method = locations->InAt(0).AsRegister<Register>();
__ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
__ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value()));
+ __ MaybeUnpoisonHeapReference(out);
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+ __ MaybeUnpoisonHeapReference(out);
__ testl(out, out);
__ j(kEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
@@ -4455,8 +4511,9 @@
__ testl(obj, obj);
__ j(kEqual, &zero);
}
- __ movl(out, Address(obj, class_offset));
// Compare the class of `obj` with `cls`.
+ __ movl(out, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(out);
if (cls.IsRegister()) {
__ cmpl(out, cls.AsRegister<Register>());
} else {
@@ -4514,16 +4571,17 @@
__ testl(obj, obj);
__ j(kEqual, slow_path->GetExitLabel());
}
-
- __ movl(temp, Address(obj, class_offset));
// Compare the class of `obj` with `cls`.
+ __ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
if (cls.IsRegister()) {
__ cmpl(temp, cls.AsRegister<Register>());
} else {
DCHECK(cls.IsStackSlot()) << cls;
__ cmpl(temp, Address(ESP, cls.GetStackIndex()));
}
-
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ j(kNotEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -4687,5 +4745,7 @@
LOG(FATAL) << "Unreachable";
}
+#undef __
+
} // namespace x86
} // namespace art
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index afffbe2..c9d19c8 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -213,7 +213,7 @@
__ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
__ gs()->call(Address::Absolute((do_clinit_
? QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pInitializeStaticStorage)
- : QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pInitializeType)) , true));
+ : QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pInitializeType)), true));
RecordPcInfo(codegen, at_, dex_pc_);
Location out = locations->Out();
@@ -1429,6 +1429,7 @@
DCHECK(receiver.IsRegister());
__ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetMethodAt(method_offset);
__ movq(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -1466,6 +1467,7 @@
__ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
}
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetImtEntryAt(method_offset);
__ movq(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -3060,6 +3062,8 @@
InvokeRuntimeCallingConvention calling_convention;
codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ gs()->call(
Address::Absolute(GetThreadOffset<kX86_64WordSize>(instruction->GetEntrypoint()), true));
@@ -3082,6 +3086,8 @@
codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ gs()->call(
Address::Absolute(GetThreadOffset<kX86_64WordSize>(instruction->GetEntrypoint()), true));
@@ -3270,6 +3276,10 @@
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(out.AsRegister<CpuRegister>());
+ }
}
void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
@@ -3278,8 +3288,9 @@
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
+ Primitive::Type field_type = field_info.GetFieldType();
bool needs_write_barrier =
- CodeGenerator::StoreNeedsWriteBarrier(field_info.GetFieldType(), instruction->InputAt(1));
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
locations->SetInAt(0, Location::RequiresRegister());
if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
@@ -3289,7 +3300,10 @@
}
if (needs_write_barrier) {
// Temporary registers for the write barrier.
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
locations->AddTemp(Location::RequiresRegister());
+ } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
+ // Temporary register for the reference poisoning.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -3337,9 +3351,20 @@
case Primitive::kPrimNot: {
if (value.IsConstant()) {
int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `field_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
__ movl(Address(base, offset), Immediate(v));
} else {
- __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
+ if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value.AsRegister<CpuRegister>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset), temp);
+ } else {
+ __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
+ }
}
break;
}
@@ -3483,8 +3508,9 @@
LocationSummary* locations = instruction->GetLocations();
CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
Location index = locations->InAt(1);
+ Primitive::Type type = instruction->GetType();
- switch (instruction->GetType()) {
+ switch (type) {
case Primitive::kPrimBoolean: {
uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
CpuRegister out = locations->Out().AsRegister<CpuRegister>();
@@ -3585,10 +3611,15 @@
}
case Primitive::kPrimVoid:
- LOG(FATAL) << "Unreachable type " << instruction->GetType();
+ LOG(FATAL) << "Unreachable type " << type;
UNREACHABLE();
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (type == Primitive::kPrimNot) {
+ CpuRegister out = locations->Out().AsRegister<CpuRegister>();
+ __ MaybeUnpoisonHeapReference(out);
+ }
}
void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
@@ -3620,7 +3651,7 @@
if (needs_write_barrier) {
// Temporary registers for the write barrier.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -3696,20 +3727,42 @@
size_t offset =
(index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
if (value.IsRegister()) {
- __ movl(Address(obj, offset), value.AsRegister<CpuRegister>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value.AsRegister<CpuRegister>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, offset), temp);
+ } else {
+ __ movl(Address(obj, offset), value.AsRegister<CpuRegister>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
__ movl(Address(obj, offset), Immediate(v));
}
} else {
DCHECK(index.IsRegister()) << index;
if (value.IsRegister()) {
- __ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset),
- value.AsRegister<CpuRegister>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value.AsRegister<CpuRegister>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset), temp);
+ } else {
+ __ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset),
+ value.AsRegister<CpuRegister>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
__ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset),
Immediate(v));
}
@@ -3724,6 +3777,8 @@
}
} else {
DCHECK_EQ(value_type, Primitive::kPrimNot);
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
__ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAputObject),
true));
DCHECK(!codegen_->IsLeafMethod());
@@ -3876,7 +3931,7 @@
Thread::CardTableOffset<kX86_64WordSize>().Int32Value(), true));
__ movq(temp, object);
__ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
- __ movb(Address(temp, card, TIMES_1, 0), card);
+ __ movb(Address(temp, card, TIMES_1, 0), card);
if (value_can_be_null) {
__ Bind(&is_null);
}
@@ -4187,6 +4242,8 @@
__ movl(out, Address(
current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
+ __ MaybeUnpoisonHeapReference(out);
+
SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
codegen_->AddSlowPath(slow_path);
@@ -4234,7 +4291,9 @@
CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
__ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
__ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value()));
+ __ MaybeUnpoisonHeapReference(out);
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+ __ MaybeUnpoisonHeapReference(out);
__ testl(out, out);
__ j(kEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
@@ -4293,6 +4352,7 @@
}
// Compare the class of `obj` with `cls`.
__ movl(out, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(out);
if (cls.IsRegister()) {
__ cmpl(out, cls.AsRegister<CpuRegister>());
} else {
@@ -4351,13 +4411,15 @@
}
// Compare the class of `obj` with `cls`.
__ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
if (cls.IsRegister()) {
__ cmpl(temp, cls.AsRegister<CpuRegister>());
} else {
DCHECK(cls.IsStackSlot()) << cls;
__ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
}
- // Classes must be equal for the checkcast to succeed.
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ j(kNotEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -4576,5 +4638,7 @@
return Address::RIP(fixup);
}
+#undef __
+
} // namespace x86_64
} // namespace art
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index 71fadfb..b4dbf75 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -510,6 +510,11 @@
if (is_volatile) {
__ dmb(ISH);
}
+
+ if (type == Primitive::kPrimNot) {
+ Register trg = locations->Out().AsRegister<Register>();
+ __ MaybeUnpoisonHeapReference(trg);
+ }
}
static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -649,8 +654,15 @@
__ strd(value_lo, Address(IP));
}
} else {
- value = locations->InAt(3).AsRegister<Register>();
- __ str(value, Address(base, offset));
+ value = locations->InAt(3).AsRegister<Register>();
+ Register source = value;
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ Mov(temp, value);
+ __ PoisonHeapReference(temp);
+ source = temp;
+ }
+ __ str(source, Address(base, offset));
}
if (is_volatile) {
@@ -738,6 +750,11 @@
__ add(tmp_ptr, base, ShifterOperand(offset));
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->PoisonHeapReference(expected_lo);
+ codegen->GetAssembler()->PoisonHeapReference(value_lo);
+ }
+
// do {
// tmp = [r_ptr] - expected;
// } while (tmp == 0 && failure([r_ptr] <- r_new_value));
@@ -761,6 +778,11 @@
__ rsbs(out, tmp_lo, ShifterOperand(1));
__ it(CC);
__ mov(out, ShifterOperand(0), CC);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
+ codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
+ }
}
void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1047,5 +1069,9 @@
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace arm
} // namespace art
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 8bcb88b..78ac167 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -683,6 +683,11 @@
} else {
codegen->Load(type, trg, mem_op);
}
+
+ if (type == Primitive::kPrimNot) {
+ DCHECK(trg.IsW());
+ codegen->GetAssembler()->MaybeUnpoisonHeapReference(trg);
+ }
}
static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -781,22 +786,37 @@
Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
Register value = RegisterFrom(locations->InAt(3), type);
+ Register source = value;
bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
MemOperand mem_op(base.X(), offset);
- if (is_volatile || is_ordered) {
- if (use_acquire_release) {
- codegen->StoreRelease(type, value, mem_op);
- } else {
- __ Dmb(InnerShareable, BarrierAll);
- codegen->Store(type, value, mem_op);
- if (is_volatile) {
- __ Dmb(InnerShareable, BarrierReads);
- }
+ {
+ // We use a block to end the scratch scope before the write barrier, thus
+ // freeing the temporary registers so they can be used in `MarkGCCard`.
+ UseScratchRegisterScope temps(masm);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp.W(), value.W());
+ codegen->GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
}
- } else {
- codegen->Store(type, value, mem_op);
+
+ if (is_volatile || is_ordered) {
+ if (use_acquire_release) {
+ codegen->StoreRelease(type, source, mem_op);
+ } else {
+ __ Dmb(InnerShareable, BarrierAll);
+ codegen->Store(type, source, mem_op);
+ if (is_volatile) {
+ __ Dmb(InnerShareable, BarrierReads);
+ }
+ }
+ } else {
+ codegen->Store(type, source, mem_op);
+ }
}
if (type == Primitive::kPrimNot) {
@@ -872,6 +892,11 @@
__ Add(tmp_ptr, base.X(), Operand(offset));
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->PoisonHeapReference(expected);
+ codegen->GetAssembler()->PoisonHeapReference(value);
+ }
+
// do {
// tmp_value = [tmp_ptr] - expected;
// } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
@@ -897,6 +922,11 @@
}
__ Bind(&exit_loop);
__ Cset(out, eq);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->UnpoisonHeapReference(value);
+ codegen->GetAssembler()->UnpoisonHeapReference(expected);
+ }
}
void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1173,5 +1203,9 @@
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace arm64
} // namespace art
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index b04cc5c..0d6ca09 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -1335,9 +1335,14 @@
switch (type) {
case Primitive::kPrimInt:
- case Primitive::kPrimNot:
- __ movl(output.AsRegister<Register>(), Address(base, offset, ScaleFactor::TIMES_1, 0));
+ case Primitive::kPrimNot: {
+ Register output_reg = output.AsRegister<Register>();
+ __ movl(output_reg, Address(base, offset, ScaleFactor::TIMES_1, 0));
+ if (type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(output_reg);
+ }
break;
+ }
case Primitive::kPrimLong: {
Register output_lo = output.AsRegisterPairLow<Register>();
@@ -1436,7 +1441,7 @@
locations->SetInAt(3, Location::RequiresRegister());
if (type == Primitive::kPrimNot) {
// Need temp registers for card-marking.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
// Ensure the value is in a byte register.
locations->AddTemp(Location::RegisterLocation(ECX));
} else if (type == Primitive::kPrimLong && is_volatile) {
@@ -1498,6 +1503,11 @@
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
}
+ } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value_loc.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
} else {
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
}
@@ -1604,7 +1614,8 @@
__ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
} else {
// Integer or object.
- DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
+ Register expected = locations->InAt(3).AsRegister<Register>();
+ DCHECK_EQ(expected, EAX);
Register value = locations->InAt(4).AsRegister<Register>();
if (type == Primitive::kPrimNot) {
// Mark card for object assuming new value is stored.
@@ -1614,6 +1625,11 @@
base,
value,
value_can_be_null);
+
+ if (kPoisonHeapReferences) {
+ __ PoisonHeapReference(expected);
+ __ PoisonHeapReference(value);
+ }
}
__ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
@@ -1625,6 +1641,13 @@
// Convert ZF into the boolean result.
__ setb(kZero, out.AsRegister<Register>());
__ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ Register value = locations->InAt(4).AsRegister<Register>();
+ __ UnpoisonHeapReference(value);
+ // Do not unpoison the reference contained in register `expected`,
+ // as it is the same as register `out`.
+ }
}
void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1734,5 +1757,9 @@
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace x86
} // namespace art
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 888c7b8..ea342e9 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1251,6 +1251,9 @@
case Primitive::kPrimInt:
case Primitive::kPrimNot:
__ movl(trg, Address(base, offset, ScaleFactor::TIMES_1, 0));
+ if (type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(trg);
+ }
break;
case Primitive::kPrimLong:
@@ -1325,7 +1328,7 @@
locations->SetInAt(3, Location::RequiresRegister());
if (type == Primitive::kPrimNot) {
// Need temp registers for card-marking.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -1369,6 +1372,11 @@
if (type == Primitive::kPrimLong) {
__ movq(Address(base, offset, ScaleFactor::TIMES_1, 0), value);
+ } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value);
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
} else {
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value);
}
@@ -1471,6 +1479,11 @@
base,
value,
value_can_be_null);
+
+ if (kPoisonHeapReferences) {
+ __ PoisonHeapReference(expected);
+ __ PoisonHeapReference(value);
+ }
}
__ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
@@ -1482,6 +1495,11 @@
// Convert ZF into the boolean result.
__ setcc(kZero, out);
__ movzxb(out, out);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ __ UnpoisonHeapReference(value);
+ __ UnpoisonHeapReference(expected);
+ }
}
void IntrinsicCodeGeneratorX86_64::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1598,5 +1616,9 @@
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace x86_64
} // namespace art
diff --git a/compiler/utils/arm/assembler_arm.cc b/compiler/utils/arm/assembler_arm.cc
index 0086fe8..09d2270 100644
--- a/compiler/utils/arm/assembler_arm.cc
+++ b/compiler/utils/arm/assembler_arm.cc
@@ -529,13 +529,13 @@
}
void ArmAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
ArmManagedRegister dst = mdest.AsArm();
CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
LoadFromOffset(kLoadWord, dst.AsCoreRegister(),
base.AsArm().AsCoreRegister(), offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
- rsb(dst.AsCoreRegister(), dst.AsCoreRegister(), ShifterOperand(0));
+ if (unpoison_reference) {
+ MaybeUnpoisonHeapReference(dst.AsCoreRegister());
}
}
diff --git a/compiler/utils/arm/assembler_arm.h b/compiler/utils/arm/assembler_arm.h
index f8ca48e..5d85d11 100644
--- a/compiler/utils/arm/assembler_arm.h
+++ b/compiler/utils/arm/assembler_arm.h
@@ -774,7 +774,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
@@ -857,6 +857,27 @@
return r >= R8;
}
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(Register reg) {
+ // reg = -reg.
+ rsb(reg, reg, ShifterOperand(0));
+ }
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(Register reg) {
+ // reg = -reg.
+ rsb(reg, reg, ShifterOperand(0));
+ }
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(Register reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+ }
+
protected:
// Returns whether or not the given register is used for passing parameters.
static int RegisterCompare(const Register* reg1, const Register* reg2) {
diff --git a/compiler/utils/arm64/assembler_arm64.cc b/compiler/utils/arm64/assembler_arm64.cc
index 077579c..0e17512 100644
--- a/compiler/utils/arm64/assembler_arm64.cc
+++ b/compiler/utils/arm64/assembler_arm64.cc
@@ -298,15 +298,15 @@
}
void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
Arm64ManagedRegister dst = m_dst.AsArm64();
Arm64ManagedRegister base = m_base.AsArm64();
CHECK(dst.IsXRegister() && base.IsXRegister());
LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), base.AsXRegister(),
offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
+ if (unpoison_reference) {
WRegister ref_reg = dst.AsOverlappingWRegister();
- ___ Neg(reg_w(ref_reg), vixl::Operand(reg_w(ref_reg)));
+ MaybeUnpoisonHeapReference(reg_w(ref_reg));
}
}
@@ -784,5 +784,25 @@
cfi_.DefCFAOffset(frame_size);
}
+void Arm64Assembler::PoisonHeapReference(vixl::Register reg) {
+ DCHECK(reg.IsW());
+ // reg = -reg.
+ ___ Neg(reg, vixl::Operand(reg));
+}
+
+void Arm64Assembler::UnpoisonHeapReference(vixl::Register reg) {
+ DCHECK(reg.IsW());
+ // reg = -reg.
+ ___ Neg(reg, vixl::Operand(reg));
+}
+
+void Arm64Assembler::MaybeUnpoisonHeapReference(vixl::Register reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+}
+
+#undef ___
+
} // namespace arm64
} // namespace art
diff --git a/compiler/utils/arm64/assembler_arm64.h b/compiler/utils/arm64/assembler_arm64.h
index db95537..05882a3 100644
--- a/compiler/utils/arm64/assembler_arm64.h
+++ b/compiler/utils/arm64/assembler_arm64.h
@@ -10,7 +10,7 @@
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
+ * See the License for the specific language governing permissions and
* limitations under the License.
*/
@@ -116,7 +116,7 @@
void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE;
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
void LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) OVERRIDE;
@@ -182,6 +182,17 @@
// and branch to a ExceptionSlowPath if it is.
void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(vixl::Register reg);
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(vixl::Register reg);
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(vixl::Register reg);
+
private:
static vixl::Register reg_x(int code) {
CHECK(code < kNumberOfXRegisters) << code;
diff --git a/compiler/utils/assembler.h b/compiler/utils/assembler.h
index ee2d594..3097cd5 100644
--- a/compiler/utils/assembler.h
+++ b/compiler/utils/assembler.h
@@ -441,9 +441,9 @@
virtual void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size);
virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0;
- // If poison_reference is true and kPoisonReference is true, then we negate the read reference.
+ // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference.
virtual void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) = 0;
+ bool unpoison_reference) = 0;
virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0;
diff --git a/compiler/utils/mips/assembler_mips.cc b/compiler/utils/mips/assembler_mips.cc
index e55b461..c09dfcc 100644
--- a/compiler/utils/mips/assembler_mips.cc
+++ b/compiler/utils/mips/assembler_mips.cc
@@ -697,12 +697,12 @@
}
void MipsAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
MipsManagedRegister dest = mdest.AsMips();
CHECK(dest.IsCoreRegister() && dest.IsCoreRegister());
LoadFromOffset(kLoadWord, dest.AsCoreRegister(),
base.AsMips().AsCoreRegister(), offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
+ if (kPoisonHeapReferences && unpoison_reference) {
Subu(dest.AsCoreRegister(), ZERO, dest.AsCoreRegister());
}
}
diff --git a/compiler/utils/mips/assembler_mips.h b/compiler/utils/mips/assembler_mips.h
index 7b0fc39..0d1b82c 100644
--- a/compiler/utils/mips/assembler_mips.h
+++ b/compiler/utils/mips/assembler_mips.h
@@ -192,7 +192,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister mdest, ManagedRegister base, Offset offs) OVERRIDE;
diff --git a/compiler/utils/mips64/assembler_mips64.cc b/compiler/utils/mips64/assembler_mips64.cc
index 3333cd2..24ea9e2 100644
--- a/compiler/utils/mips64/assembler_mips64.cc
+++ b/compiler/utils/mips64/assembler_mips64.cc
@@ -1242,12 +1242,12 @@
}
void Mips64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
Mips64ManagedRegister dest = mdest.AsMips64();
CHECK(dest.IsGpuRegister() && base.AsMips64().IsGpuRegister());
LoadFromOffset(kLoadUnsignedWord, dest.AsGpuRegister(),
base.AsMips64().AsGpuRegister(), offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
+ if (kPoisonHeapReferences && unpoison_reference) {
// TODO: review
// Negate the 32-bit ref
Dsubu(dest.AsGpuRegister(), ZERO, dest.AsGpuRegister());
diff --git a/compiler/utils/mips64/assembler_mips64.h b/compiler/utils/mips64/assembler_mips64.h
index 88cc4bc..47b146a 100644
--- a/compiler/utils/mips64/assembler_mips64.h
+++ b/compiler/utils/mips64/assembler_mips64.h
@@ -265,7 +265,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister mdest, ManagedRegister base, Offset offs) OVERRIDE;
diff --git a/compiler/utils/x86/assembler_x86.cc b/compiler/utils/x86/assembler_x86.cc
index 390d46e..fa85ada 100644
--- a/compiler/utils/x86/assembler_x86.cc
+++ b/compiler/utils/x86/assembler_x86.cc
@@ -1910,12 +1910,12 @@
}
void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
X86ManagedRegister dest = mdest.AsX86();
CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
- if (kPoisonHeapReferences && poison_reference) {
- negl(dest.AsCpuRegister());
+ if (unpoison_reference) {
+ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
}
}
diff --git a/compiler/utils/x86/assembler_x86.h b/compiler/utils/x86/assembler_x86.h
index 1c1c023..d1b4e1d 100644
--- a/compiler/utils/x86/assembler_x86.h
+++ b/compiler/utils/x86/assembler_x86.h
@@ -541,7 +541,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
@@ -616,6 +616,21 @@
// and branch to a ExceptionSlowPath if it is.
void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(Register reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(Register reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(Register reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+ }
+
private:
inline void EmitUint8(uint8_t value);
inline void EmitInt32(int32_t value);
diff --git a/compiler/utils/x86_64/assembler_x86_64.cc b/compiler/utils/x86_64/assembler_x86_64.cc
index ac95c71..f35f51c 100644
--- a/compiler/utils/x86_64/assembler_x86_64.cc
+++ b/compiler/utils/x86_64/assembler_x86_64.cc
@@ -2597,12 +2597,12 @@
}
void X86_64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
X86_64ManagedRegister dest = mdest.AsX86_64();
CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
movl(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
- if (kPoisonHeapReferences && poison_reference) {
- negl(dest.AsCpuRegister());
+ if (unpoison_reference) {
+ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
}
}
diff --git a/compiler/utils/x86_64/assembler_x86_64.h b/compiler/utils/x86_64/assembler_x86_64.h
index 6b2b65d..61ffeab 100644
--- a/compiler/utils/x86_64/assembler_x86_64.h
+++ b/compiler/utils/x86_64/assembler_x86_64.h
@@ -669,7 +669,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
@@ -767,6 +767,21 @@
// Is the constant area empty? Return true if there are no literals in the constant area.
bool IsConstantAreaEmpty() const { return constant_area_.GetSize() == 0; }
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(CpuRegister reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(CpuRegister reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(CpuRegister reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+ }
+
private:
void EmitUint8(uint8_t value);
void EmitInt32(int32_t value);