Fix uses of MaybeRecordImplicitNullCheck without special scopes
MaybeRecordImplicitNullCheck is a function which uses
CodeGenerator::RecordPcInfo() and requires an exact PC. However for ARM32/ARM64,
when CodeGenerator::RecordPcInfo() is used without VIXL special scopes (EmissionCheckScope,
ExactAssemblyScope) there is no guarantee of an exact PC. Without the special scopes VIXL might
emit veneer/literal pools affecting a PC.
The ARM32 code generator has uses of MaybeRecordImplicitNullCheck without the
special scopes.
This CL fixes missing special scopes in the ARM32/ARM64 code generators.
It also changes API to prevent such cases:
1. A variant of CodeGenerator::RecordPcInfo with native_pc as a
parameter is added. The old variant (where Assembler::CodePosition is used) is
kept and documented that Assembler::CodePosition is target-dependent and
might be imprecise.
2. CodeGenerator::MaybeRecordImplicitNullCheck is made virtual. Checks
are added to ARM32/ARM64 code generators that
MaybeRecordImplicitNullCheck is invoked within VIXL special scopes.
Test: test.py --host --optimizing --jit --gtest
Test: test.py --target --optimizing --jit
Test: run-gtests.sh
Change-Id: Ic66c16e7bdf4751cbc19a9de05846fba005b7f55
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 4cae497..1c69dd6 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -68,6 +68,7 @@
using helpers::SRegisterFrom;
using helpers::Uint64ConstantFrom;
+using vixl::EmissionCheckScope;
using vixl::ExactAssemblyScope;
using vixl::CodeBufferCheckScope;
@@ -5443,24 +5444,29 @@
case DataType::Type::kUint16:
case DataType::Type::kInt16:
case DataType::Type::kInt32: {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
StoreOperandType operand_type = GetStoreOperandType(field_type);
GetAssembler()->StoreToOffset(operand_type, RegisterFrom(value), base, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
case DataType::Type::kReference: {
+ vixl32::Register value_reg = RegisterFrom(value);
if (kPoisonHeapReferences && needs_write_barrier) {
// Note that in the case where `value` is a null reference,
// we do not enter this block, as a null reference does not
// need poisoning.
DCHECK_EQ(field_type, DataType::Type::kReference);
- vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
- __ Mov(temp, RegisterFrom(value));
- GetAssembler()->PoisonHeapReference(temp);
- GetAssembler()->StoreToOffset(kStoreWord, temp, base, offset);
- } else {
- GetAssembler()->StoreToOffset(kStoreWord, RegisterFrom(value), base, offset);
+ value_reg = RegisterFrom(locations->GetTemp(0));
+ __ Mov(value_reg, RegisterFrom(value));
+ GetAssembler()->PoisonHeapReference(value_reg);
}
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
+ GetAssembler()->StoreToOffset(kStoreWord, value_reg, base, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
@@ -5474,6 +5480,8 @@
RegisterFrom(locations->GetTemp(1)),
instruction);
} else {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), base, offset);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
@@ -5481,7 +5489,10 @@
}
case DataType::Type::kFloat32: {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
GetAssembler()->StoreSToOffset(SRegisterFrom(value), base, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
@@ -5501,6 +5512,8 @@
RegisterFrom(locations->GetTemp(3)),
instruction);
} else {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
GetAssembler()->StoreDToOffset(value_reg, base, offset);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
@@ -5514,16 +5527,6 @@
UNREACHABLE();
}
- // Longs and doubles are handled in the switch.
- if (field_type != DataType::Type::kInt64 && field_type != DataType::Type::kFloat64) {
- // TODO(VIXL): Here and for other calls to `MaybeRecordImplicitNullCheck` in this method, we
- // should use a scope and the assembler to emit the store instruction to guarantee that we
- // record the pc at the correct position. But the `Assembler` does not automatically handle
- // unencodable offsets. Practically, everything is fine because the helper and VIXL, at the time
- // of writing, do generate the store instruction last.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- }
-
if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
vixl32::Register card = RegisterFrom(locations->GetTemp(1));
@@ -5686,8 +5689,11 @@
case DataType::Type::kUint16:
case DataType::Type::kInt16:
case DataType::Type::kInt32: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
LoadOperandType operand_type = GetLoadOperandType(load_type);
GetAssembler()->LoadFromOffset(operand_type, RegisterFrom(out), base, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
@@ -5703,8 +5709,12 @@
codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
} else {
- GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
- codegen_->MaybeRecordImplicitNullCheck(instruction);
+ {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
+ GetAssembler()->LoadFromOffset(kLoadWord, RegisterFrom(out), base, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ }
if (is_volatile) {
codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
@@ -5716,26 +5726,34 @@
break;
}
- case DataType::Type::kInt64:
+ case DataType::Type::kInt64: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
if (is_volatile && !atomic_ldrd_strd) {
GenerateWideAtomicLoad(base, offset, LowRegisterFrom(out), HighRegisterFrom(out));
} else {
GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out), base, offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
+ }
- case DataType::Type::kFloat32:
+ case DataType::Type::kFloat32: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
GetAssembler()->LoadSFromOffset(SRegisterFrom(out), base, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
+ }
case DataType::Type::kFloat64: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
vixl32::DRegister out_dreg = DRegisterFrom(out);
if (is_volatile && !atomic_ldrd_strd) {
vixl32::Register lo = RegisterFrom(locations->GetTemp(0));
vixl32::Register hi = RegisterFrom(locations->GetTemp(1));
GenerateWideAtomicLoad(base, offset, lo, hi);
- // TODO(VIXL): Do we need to be immediately after the ldrexd instruction? If so we need a
- // scope.
codegen_->MaybeRecordImplicitNullCheck(instruction);
__ Vmov(out_dreg, lo, hi);
} else {
@@ -5752,19 +5770,6 @@
UNREACHABLE();
}
- if (load_type == DataType::Type::kReference || load_type == DataType::Type::kFloat64) {
- // Potential implicit null checks, in the case of reference or
- // double fields, are handled in the previous switch statement.
- } else {
- // Address cases other than reference and double that may require an implicit null check.
- // TODO(VIXL): Here and for other calls to `MaybeRecordImplicitNullCheck` in this method, we
- // should use a scope and the assembler to emit the load instruction to guarantee that we
- // record the pc at the correct position. But the `Assembler` does not automatically handle
- // unencodable offsets. Practically, everything is fine because the helper and VIXL, at the time
- // of writing, do generate the store instruction last.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- }
-
if (is_volatile) {
if (load_type == DataType::Type::kReference) {
// Memory barriers, in the case of references, are also handled
@@ -6052,6 +6057,8 @@
if (maybe_compressed_char_at) {
length = RegisterFrom(locations->GetTemp(0));
uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
GetAssembler()->LoadFromOffset(kLoadWord, length, obj, count_offset);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
@@ -6080,8 +6087,11 @@
} else {
uint32_t full_offset = data_offset + (const_index << DataType::SizeShift(type));
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
LoadOperandType load_type = GetLoadOperandType(type);
GetAssembler()->LoadFromOffset(load_type, RegisterFrom(out_loc), obj, full_offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
}
} else {
UseScratchRegisterScope temps(GetVIXLAssembler());
@@ -6114,7 +6124,10 @@
__ Bind(&done);
}
} else {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
}
}
break;
@@ -6154,15 +6167,13 @@
} else {
vixl32::Register out = OutputRegister(instruction);
if (index.IsConstant()) {
- size_t offset =
- (Int32ConstantFrom(index) << TIMES_4) + data_offset;
- GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
- // TODO(VIXL): Here and for other calls to `MaybeRecordImplicitNullCheck` in this method,
- // we should use a scope and the assembler to emit the load instruction to guarantee that
- // we record the pc at the correct position. But the `Assembler` does not automatically
- // handle unencodable offsets. Practically, everything is fine because the helper and
- // VIXL, at the time of writing, do generate the store instruction last.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
+ size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
+ {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
+ GetAssembler()->LoadFromOffset(kLoadWord, out, obj, offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ }
// If read barriers are enabled, emit read barriers other than
// Baker's using a slow path (and also unpoison the loaded
// reference, if heap poisoning is enabled).
@@ -6183,12 +6194,13 @@
} else {
__ Add(temp, obj, data_offset);
}
- codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
- temps.Close();
- // TODO(VIXL): Use a scope to ensure that we record the pc position immediately after the
- // load instruction. Practically, everything is fine because the helper and VIXL, at the
- // time of writing, do generate the store instruction last.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
+ {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
+ codegen_->LoadFromShiftedRegOffset(type, out_loc, temp, RegisterFrom(index));
+ temps.Close();
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ }
// If read barriers are enabled, emit read barriers other than
// Baker's using a slow path (and also unpoison the loaded
// reference, if heap poisoning is enabled).
@@ -6200,6 +6212,9 @@
}
case DataType::Type::kInt64: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
if (index.IsConstant()) {
size_t offset =
(Int32ConstantFrom(index) << TIMES_8) + data_offset;
@@ -6210,10 +6225,14 @@
__ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
GetAssembler()->LoadFromOffset(kLoadWordPair, LowRegisterFrom(out_loc), temp, data_offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
case DataType::Type::kFloat32: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
vixl32::SRegister out = SRegisterFrom(out_loc);
if (index.IsConstant()) {
size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
@@ -6224,10 +6243,14 @@
__ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
GetAssembler()->LoadSFromOffset(out, temp, data_offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
case DataType::Type::kFloat64: {
+ // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
if (index.IsConstant()) {
size_t offset = (Int32ConstantFrom(index) << TIMES_8) + data_offset;
GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), obj, offset);
@@ -6237,6 +6260,7 @@
__ Add(temp, obj, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
GetAssembler()->LoadDFromOffset(DRegisterFrom(out_loc), temp, data_offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
@@ -6246,15 +6270,6 @@
LOG(FATAL) << "Unreachable type " << type;
UNREACHABLE();
}
-
- if (type == DataType::Type::kReference) {
- // Potential implicit null checks, in the case of reference
- // arrays, are handled in the previous switch statement.
- } else if (!maybe_compressed_char_at) {
- // TODO(VIXL): Use a scope to ensure we record the pc info immediately after
- // the preceding load instruction.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- }
}
void LocationsBuilderARMVIXL::VisitArraySet(HArraySet* instruction) {
@@ -6308,7 +6323,10 @@
uint32_t full_offset =
data_offset + (const_index << DataType::SizeShift(value_type));
StoreOperandType store_type = GetStoreOperandType(value_type);
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
GetAssembler()->StoreToOffset(store_type, RegisterFrom(value_loc), array, full_offset);
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
} else {
UseScratchRegisterScope temps(GetVIXLAssembler());
vixl32::Register temp = temps.Acquire();
@@ -6325,7 +6343,10 @@
} else {
__ Add(temp, array, data_offset);
}
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
}
break;
}
@@ -6337,6 +6358,9 @@
DCHECK(!has_intermediate_address);
if (instruction->InputAt(2)->IsNullConstant()) {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
// Just setting null.
if (index.IsConstant()) {
size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
@@ -6348,8 +6372,6 @@
__ Add(temp, array, data_offset);
codegen_->StoreToShiftedRegOffset(value_type, value_loc, temp, RegisterFrom(index));
}
- // TODO(VIXL): Use a scope to ensure we record the pc info immediately after the preceding
- // store instruction.
codegen_->MaybeRecordImplicitNullCheck(instruction);
DCHECK(!needs_write_barrier);
DCHECK(!needs_type_check);
@@ -6440,25 +6462,28 @@
source = temp1;
}
- if (index.IsConstant()) {
- size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
- GetAssembler()->StoreToOffset(kStoreWord, source, array, offset);
- } else {
- DCHECK(index.IsRegister()) << index;
+ {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
+ if (index.IsConstant()) {
+ size_t offset = (Int32ConstantFrom(index) << TIMES_4) + data_offset;
+ GetAssembler()->StoreToOffset(kStoreWord, source, array, offset);
+ } else {
+ DCHECK(index.IsRegister()) << index;
- UseScratchRegisterScope temps(GetVIXLAssembler());
- vixl32::Register temp = temps.Acquire();
- __ Add(temp, array, data_offset);
- codegen_->StoreToShiftedRegOffset(value_type,
- LocationFrom(source),
- temp,
- RegisterFrom(index));
- }
+ UseScratchRegisterScope temps(GetVIXLAssembler());
+ vixl32::Register temp = temps.Acquire();
+ __ Add(temp, array, data_offset);
+ codegen_->StoreToShiftedRegOffset(value_type,
+ LocationFrom(source),
+ temp,
+ RegisterFrom(index));
+ }
- if (can_value_be_null || !needs_type_check) {
- // TODO(VIXL): Ensure we record the pc position immediately after the preceding store
- // instruction.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
+ if (can_value_be_null || !needs_type_check) {
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ }
}
if (slow_path != nullptr) {
@@ -6469,6 +6494,9 @@
}
case DataType::Type::kInt64: {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Location value = locations->InAt(2);
if (index.IsConstant()) {
size_t offset =
@@ -6480,10 +6508,14 @@
__ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
GetAssembler()->StoreToOffset(kStoreWordPair, LowRegisterFrom(value), temp, data_offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
case DataType::Type::kFloat32: {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Location value = locations->InAt(2);
DCHECK(value.IsFpuRegister());
if (index.IsConstant()) {
@@ -6495,10 +6527,14 @@
__ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_4));
GetAssembler()->StoreSToOffset(SRegisterFrom(value), temp, data_offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
case DataType::Type::kFloat64: {
+ // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
+ // As two macro instructions can be emitted the max size is doubled.
+ EmissionCheckScope guard(GetVIXLAssembler(), 2 * kMaxMacroInstructionSizeInBytes);
Location value = locations->InAt(2);
DCHECK(value.IsFpuRegisterPair());
if (index.IsConstant()) {
@@ -6510,6 +6546,7 @@
__ Add(temp, array, Operand(RegisterFrom(index), vixl32::LSL, TIMES_8));
GetAssembler()->StoreDToOffset(DRegisterFrom(value), temp, data_offset);
}
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
break;
}
@@ -6519,13 +6556,6 @@
LOG(FATAL) << "Unreachable type " << value_type;
UNREACHABLE();
}
-
- // Objects are handled in the switch.
- if (value_type != DataType::Type::kReference) {
- // TODO(VIXL): Ensure we record the pc position immediately after the preceding store
- // instruction.
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- }
}
void LocationsBuilderARMVIXL::VisitArrayLength(HArrayLength* instruction) {