Update compiler/ implications to use (D)CHECK_IMPLIES
Follow-up to aosp/1988868 in which we added the (D)CHECK_IMPLIES
macro. This CL uses it on compiler/ occurrences found by a regex.
Test: art/test/testrunner/testrunner.py --host --64 --optimizing -b
Change-Id: If63aed969bfb8b31d6fbbcb3bca2b04314c894b7
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index 1f2ba46..aa4c139 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -1339,7 +1339,7 @@
void IntrinsicCodeGeneratorARMVIXL::VisitSystemArrayCopy(HInvoke* invoke) {
// The only read barrier implementation supporting the
// SystemArrayCopy intrinsic is the Baker-style read barriers.
- DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
+ DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier);
ArmVIXLAssembler* assembler = GetAssembler();
LocationSummary* locations = invoke->GetLocations();
@@ -3797,7 +3797,7 @@
}
void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeCompareAndSetObject(HInvoke* invoke) {
// The only supported read barrier implementation is the Baker-style read barriers (b/173104084).
- DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
+ DCHECK_IMPLIES(kEmitCompilerReadBarrier, kUseBakerReadBarrier);
GenUnsafeCas(invoke, DataType::Type::kReference, codegen_);
}
@@ -4623,7 +4623,8 @@
size_t temp_start = 0u;
if (Use64BitExclusiveLoadStore(atomic, codegen)) {
// Clear `maybe_temp3` which was initialized above for Float64.
- DCHECK(value_type != DataType::Type::kFloat64 || maybe_temp3.Equals(locations->GetTemp(2)));
+ DCHECK_IMPLIES(value_type == DataType::Type::kFloat64,
+ maybe_temp3.Equals(locations->GetTemp(2)));
maybe_temp3 = Location::NoLocation();
temp_start = 2u;
}