Update runtime/ implications to use (D)CHECK_IMPLIES

Follow-up to aosp/1988868 in which we added the (D)CHECK_IMPLIES
macro. This CL uses it on compiler/ occurrences found by a regex.

Test: art/test/testrunner/testrunner.py --host --64 --optimizing -b
Change-Id: Id4ee45b41bad85a1f0d98c1e88af6baa3e34a662
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index c86e248..5d90b9c 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -154,14 +154,14 @@
 inline bool ArtMethod::IsResolutionMethod() {
   bool result = this == Runtime::Current()->GetResolutionMethod();
   // Check that if we do think it is phony it looks like the resolution method.
-  DCHECK(!result || IsRuntimeMethod());
+  DCHECK_IMPLIES(result, IsRuntimeMethod());
   return result;
 }
 
 inline bool ArtMethod::IsImtUnimplementedMethod() {
   bool result = this == Runtime::Current()->GetImtUnimplementedMethod();
   // Check that if we do think it is phony it looks like the imt unimplemented method.
-  DCHECK(!result || IsRuntimeMethod());
+  DCHECK_IMPLIES(result, IsRuntimeMethod());
   return result;
 }
 
@@ -357,8 +357,8 @@
   ArtMethod* interface_method = GetInterfaceMethodForProxyUnchecked(pointer_size);
   // We can check that the proxy class implements the interface only if the proxy class
   // is resolved, otherwise the interface table is not yet initialized.
-  DCHECK(!GetDeclaringClass()->IsResolved() ||
-         interface_method->GetDeclaringClass()->IsAssignableFrom(GetDeclaringClass()));
+  DCHECK_IMPLIES(GetDeclaringClass()->IsResolved(),
+                 interface_method->GetDeclaringClass()->IsAssignableFrom(GetDeclaringClass()));
   return interface_method;
 }
 
diff --git a/runtime/cha.cc b/runtime/cha.cc
index bb96518..d19d4f6 100644
--- a/runtime/cha.cc
+++ b/runtime/cha.cc
@@ -317,7 +317,7 @@
   // even if it overrides, it doesn't invalidate single-implementation
   // assumption.
 
-  DCHECK((virtual_method != method_in_super) || virtual_method->IsAbstract());
+  DCHECK_IMPLIES(virtual_method == method_in_super, virtual_method->IsAbstract());
   DCHECK(method_in_super->GetDeclaringClass()->IsResolved()) << "class isn't resolved";
   // If virtual_method doesn't come from a default interface method, it should
   // be supplied by klass.
diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h
index e51d5a7..02b2778 100644
--- a/runtime/class_linker-inl.h
+++ b/runtime/class_linker-inl.h
@@ -310,7 +310,7 @@
   // Note: The referrer can be a Proxy constructor. In that case, we need to do the
   // lookup in the context of the original method from where it steals the code.
   // However, we delay the GetInterfaceMethodIfProxy() until needed.
-  DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor());
+  DCHECK_IMPLIES(referrer->IsProxyMethod(), referrer->IsConstructor());
   // We do not need the read barrier for getting the DexCache for the initial resolved method
   // lookup as both from-space and to-space copies point to the same native resolved methods array.
   ArtMethod* resolved_method = referrer->GetDexCache<kWithoutReadBarrier>()->GetResolvedMethod(
@@ -362,7 +362,7 @@
   // Note: The referrer can be a Proxy constructor. In that case, we need to do the
   // lookup in the context of the original method from where it steals the code.
   // However, we delay the GetInterfaceMethodIfProxy() until needed.
-  DCHECK(!referrer->IsProxyMethod() || referrer->IsConstructor());
+  DCHECK_IMPLIES(referrer->IsProxyMethod(), referrer->IsConstructor());
   Thread::PoisonObjectPointersIfDebug();
   // We do not need the read barrier for getting the DexCache for the initial resolved method
   // lookup as both from-space and to-space copies point to the same native resolved methods array.
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 62be39a..6cfd5c8 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1999,8 +1999,8 @@
           // Set image methods' entry point that point to the nterp trampoline to the
           // nterp entry point. This allows taking the fast path when doing a
           // nterp->nterp call.
-          DCHECK(!NeedsClinitCheckBeforeCall(&method) ||
-                 method.GetDeclaringClass()->IsVisiblyInitialized());
+          DCHECK_IMPLIES(NeedsClinitCheckBeforeCall(&method),
+                         method.GetDeclaringClass()->IsVisiblyInitialized());
           method.SetEntryPointFromQuickCompiledCode(interpreter::GetNterpEntryPoint());
         } else {
           method.SetEntryPointFromQuickCompiledCode(GetQuickToInterpreterBridge());
@@ -4148,9 +4148,9 @@
   }
   // Core array classes, i.e. Object[], Class[], String[] and primitive
   // arrays, have special initialization and they should be found above.
-  DCHECK(!component_type->IsObjectClass() ||
-         // Guard from false positives for errors before setting superclass.
-         component_type->IsErroneousUnresolved());
+  DCHECK_IMPLIES(component_type->IsObjectClass(),
+                 // Guard from false positives for errors before setting superclass.
+                 component_type->IsErroneousUnresolved());
   DCHECK(!component_type->IsStringClass());
   DCHECK(!component_type->IsClassClass());
   DCHECK(!component_type->IsPrimitive());
@@ -4566,7 +4566,7 @@
   // If the oat file says the class had an error, re-run the verifier. That way we will either:
   // 1) Be successful at runtime, or
   // 2) Get a precise error message.
-  DCHECK(!mirror::Class::IsErroneous(oat_file_class_status) || !preverified);
+  DCHECK_IMPLIES(mirror::Class::IsErroneous(oat_file_class_status), !preverified);
 
   std::string error_msg;
   verifier::FailureKind verifier_failure = verifier::FailureKind::kNoFailure;
@@ -8884,7 +8884,7 @@
   self->EndAssertNoThreadSuspension(old_no_suspend_cause);
 
   // We lie to the GC about the java.lang.ref.Reference.referent field, so it doesn't scan it.
-  DCHECK(!class_linker->init_done_ || !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
+  DCHECK_IMPLIES(class_linker->init_done_, !klass->DescriptorEquals("Ljava/lang/ref/Reference;"));
   if (!is_static &&
       UNLIKELY(!class_linker->init_done_) &&
       klass->DescriptorEquals("Ljava/lang/ref/Reference;")) {
@@ -9186,8 +9186,9 @@
     // In case of jmvti, the dex file gets verified before being registered, so first
     // check if it's registered before checking class tables.
     const DexFile& dex_file = *dex_cache->GetDexFile();
-    DCHECK(!IsDexFileRegistered(Thread::Current(), dex_file) ||
-           FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
+    DCHECK_IMPLIES(
+        IsDexFileRegistered(Thread::Current(), dex_file),
+        FindClassTable(Thread::Current(), dex_cache) == ClassTableForClassLoader(class_loader))
         << "DexFile referrer: " << dex_file.GetLocation()
         << " ClassLoader: " << DescribeLoaders(class_loader, "");
     // Be a good citizen and update the dex cache to speed subsequent calls.
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index b5bd6ef..0b4398d 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -730,7 +730,8 @@
       VLOG(deopt) << "Forcing deoptimization on return from method " << method->PrettyMethod()
                   << " to " << caller->PrettyMethod()
                   << (force_frame_pop ? " for frame-pop" : "");
-      DCHECK(!force_frame_pop || result.GetJ() == 0) << "Force frame pop should have no result.";
+      DCHECK_IMPLIES(force_frame_pop, result.GetJ() == 0)
+          << "Force frame pop should have no result.";
       if (force_frame_pop && self->GetException() != nullptr) {
         LOG(WARNING) << "Suppressing exception for instruction-retry: "
                      << self->GetException()->Dump();
diff --git a/runtime/gc/allocator/rosalloc.cc b/runtime/gc/allocator/rosalloc.cc
index 2698874..320440d 100644
--- a/runtime/gc/allocator/rosalloc.cc
+++ b/runtime/gc/allocator/rosalloc.cc
@@ -674,7 +674,7 @@
     DCHECK(thread_local_run->IsThreadLocal() || thread_local_run == dedicated_full_run_);
     slot_addr = thread_local_run->AllocSlot();
     // The allocation must fail if the run is invalid.
-    DCHECK(thread_local_run != dedicated_full_run_ || slot_addr == nullptr)
+    DCHECK_IMPLIES(thread_local_run == dedicated_full_run_, slot_addr == nullptr)
         << "allocated from an invalid run";
     if (UNLIKELY(slot_addr == nullptr)) {
       // The run got full. Try to free slots.
diff --git a/runtime/gc/collector/concurrent_copying-inl.h b/runtime/gc/collector/concurrent_copying-inl.h
index 76bc812..5a25166 100644
--- a/runtime/gc/collector/concurrent_copying-inl.h
+++ b/runtime/gc/collector/concurrent_copying-inl.h
@@ -129,7 +129,7 @@
                                                mirror::Object* holder,
                                                MemberOffset offset) {
   // Cannot have `kNoUnEvac` when Generational CC collection is disabled.
-  DCHECK(!kNoUnEvac || use_generational_cc_);
+  DCHECK_IMPLIES(kNoUnEvac, use_generational_cc_);
   if (from_ref == nullptr) {
     return nullptr;
   }
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 34a4089..0de62fe 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -3152,7 +3152,7 @@
   explicit RefFieldsVisitor(ConcurrentCopying* collector, Thread* const thread)
       : collector_(collector), thread_(thread) {
     // Cannot have `kNoUnEvac` when Generational CC collection is disabled.
-    DCHECK(!kNoUnEvac || collector_->use_generational_cc_);
+    DCHECK_IMPLIES(kNoUnEvac, collector_->use_generational_cc_);
   }
 
   void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */)
@@ -3189,7 +3189,7 @@
 template <bool kNoUnEvac>
 inline void ConcurrentCopying::Scan(mirror::Object* to_ref, size_t obj_size) {
   // Cannot have `kNoUnEvac` when Generational CC collection is disabled.
-  DCHECK(!kNoUnEvac || use_generational_cc_);
+  DCHECK_IMPLIES(kNoUnEvac, use_generational_cc_);
   if (kDisallowReadBarrierDuringScan && !Runtime::Current()->IsActiveTransaction()) {
     // Avoid all read barriers during visit references to help performance.
     // Don't do this in transaction mode because we may read the old value of an field which may
@@ -3215,7 +3215,7 @@
 template <bool kNoUnEvac>
 inline void ConcurrentCopying::Process(mirror::Object* obj, MemberOffset offset) {
   // Cannot have `kNoUnEvac` when Generational CC collection is disabled.
-  DCHECK(!kNoUnEvac || use_generational_cc_);
+  DCHECK_IMPLIES(kNoUnEvac, use_generational_cc_);
   DCHECK_EQ(Thread::Current(), thread_running_gc_);
   mirror::Object* ref = obj->GetFieldObject<
       mirror::Object, kVerifyNone, kWithoutReadBarrier, false>(offset);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index f9bf02b..100da30 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -486,7 +486,7 @@
     DCHECK_EQ(heap_reservation_size, heap_reservation.IsValid() ? heap_reservation.Size() : 0u);
     DCHECK(!boot_image_spaces.empty());
     request_begin = boot_image_spaces.back()->GetImageHeader().GetOatFileEnd();
-    DCHECK(!heap_reservation.IsValid() || request_begin == heap_reservation.Begin())
+    DCHECK_IMPLIES(heap_reservation.IsValid(), request_begin == heap_reservation.Begin())
         << "request_begin=" << static_cast<const void*>(request_begin)
         << " heap_reservation.Begin()=" << static_cast<const void*>(heap_reservation.Begin());
     for (std::unique_ptr<space::ImageSpace>& space : boot_image_spaces) {
@@ -3777,7 +3777,7 @@
     gc::Heap* heap = runtime->GetHeap();
     DCHECK(GCNumberLt(my_gc_num_, heap->GetCurrentGcNum() + 2));  // <= current_gc_num + 1
     heap->ConcurrentGC(self, cause_, force_full_, my_gc_num_);
-    CHECK(!GCNumberLt(heap->GetCurrentGcNum(), my_gc_num_) || runtime->IsShuttingDown(self));
+    CHECK_IMPLIES(GCNumberLt(heap->GetCurrentGcNum(), my_gc_num_), runtime->IsShuttingDown(self));
   }
 
  private:
diff --git a/runtime/gc/space/dlmalloc_space.cc b/runtime/gc/space/dlmalloc_space.cc
index 4f7cc71..25cac7e 100644
--- a/runtime/gc/space/dlmalloc_space.cc
+++ b/runtime/gc/space/dlmalloc_space.cc
@@ -184,7 +184,7 @@
     // Zero freshly allocated memory, done while not holding the space's lock.
     memset(result, 0, num_bytes);
     // Check that the result is contained in the space.
-    CHECK(!kDebugSpaces || Contains(result));
+    CHECK_IMPLIES(kDebugSpaces, Contains(result));
   }
   return result;
 }
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index b6cb334..6afd63e 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -2192,7 +2192,7 @@
   DCHECK_EQ(GetBaseAddress(), 0u);
   bool validate = (oat_checksums != nullptr);
   static_assert(ImageSpace::kImageChecksumPrefix == 'i', "Format prefix check.");
-  DCHECK(!validate || StartsWith(*oat_checksums, "i"));
+  DCHECK_IMPLIES(validate, StartsWith(*oat_checksums, "i"));
 
   ArrayRef<const std::string> components = image_locations_;
   size_t named_components_count = 0u;
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 7df33b9..171c5cd 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -1032,7 +1032,7 @@
       r->Unfree(this, time_);
       if (use_generational_cc_) {
         // TODO: Add an explanation for this assertion.
-        DCHECK(!for_evac || !r->is_newly_allocated_);
+        DCHECK_IMPLIES(for_evac, !r->is_newly_allocated_);
       }
       if (for_evac) {
         ++num_evac_regions_;
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index b60a384..38c94ab 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -328,7 +328,7 @@
 
   // Lock counting is a special version of accessibility checks, and for simplicity and
   // reduction of template parameters, we gate it behind access-checks mode.
-  DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
+  DCHECK_IMPLIES(method->SkipAccessChecks(), !method->MustCountLocks());
 
   VLOG(interpreter) << "Interpreting " << method->PrettyMethod();
 
diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc
index 9c275da..cb98830 100644
--- a/runtime/jit/jit.cc
+++ b/runtime/jit/jit.cc
@@ -397,7 +397,7 @@
 }
 
 Jit::~Jit() {
-  DCHECK(!options_->GetSaveProfilingInfo() || !ProfileSaver::IsStarted());
+  DCHECK_IMPLIES(options_->GetSaveProfilingInfo(), !ProfileSaver::IsStarted());
   if (options_->DumpJitInfoOnShutdown()) {
     DumpInfo(LOG_STREAM(INFO));
     Runtime::Current()->DumpDeoptimizations(LOG_STREAM(INFO));
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index ad44ac2..e6be531 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -647,7 +647,7 @@
                           CompilationKind compilation_kind,
                           bool has_should_deoptimize_flag,
                           const ArenaSet<ArtMethod*>& cha_single_implementation_list) {
-  DCHECK(!method->IsNative() || (compilation_kind != CompilationKind::kOsr));
+  DCHECK_IMPLIES(method->IsNative(), (compilation_kind != CompilationKind::kOsr));
 
   if (!method->IsNative()) {
     // We need to do this before grabbing the lock_ because it needs to be able to see the string
diff --git a/runtime/jni/jni_internal.cc b/runtime/jni/jni_internal.cc
index 57102e3..cd9ee06 100644
--- a/runtime/jni/jni_internal.cc
+++ b/runtime/jni/jni_internal.cc
@@ -166,7 +166,7 @@
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
     ObjPtr<mirror::String> string = ObjPtr<mirror::String>::DownCast(obj);
     string->SetCount(count_);
-    DCHECK(!string->IsCompressed() || mirror::kUseStringCompression);
+    DCHECK_IMPLIES(string->IsCompressed(), mirror::kUseStringCompression);
     if (string->IsCompressed()) {
       uint8_t* value_compressed = string->GetValueCompressed();
       auto good = [&](const char* ptr, size_t length) {
@@ -202,8 +202,8 @@
         *value++ = kBadUtf8ReplacementChar;
       };
       VisitUtf8Chars(utf_, utf8_length_, good, bad);
-      DCHECK(!mirror::kUseStringCompression ||
-             !mirror::String::AllASCII(string->GetValue(), string->GetLength()));
+      DCHECK_IMPLIES(mirror::kUseStringCompression,
+                     !mirror::String::AllASCII(string->GetValue(), string->GetLength()));
     }
   }
 
diff --git a/runtime/mirror/class-alloc-inl.h b/runtime/mirror/class-alloc-inl.h
index 5627b49..ed3967b 100644
--- a/runtime/mirror/class-alloc-inl.h
+++ b/runtime/mirror/class-alloc-inl.h
@@ -57,7 +57,7 @@
       break;
     case Class::AddFinalizer::kNoAddFinalizer:
       add_finalizer = false;
-      DCHECK(!kCheckAddFinalizer || !IsFinalizable());
+      DCHECK_IMPLIES(kCheckAddFinalizer, !IsFinalizable());
       break;
   }
   // Note that the `this` pointer may be invalidated after the allocation.
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index c9df1f0..a07a15b 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -149,7 +149,7 @@
                                           std::memory_order_seq_cst);
     }
     ObjPtr<ClassExt> ret(set ? new_ext.Get() : h_this->GetExtData());
-    DCHECK(!set || h_this->GetExtData() == new_ext.Get());
+    DCHECK_IMPLIES(set, h_this->GetExtData() == new_ext.Get());
     CHECK(!ret.IsNull());
     // Restore the exception if there was one.
     if (throwable != nullptr) {
@@ -1546,7 +1546,7 @@
     DCHECK(!klass->IsProxyClass());
     dex::TypeIndex type_idx = klass->GetDirectInterfaceTypeIdx(idx);
     interface = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, klass.Get());
-    CHECK(interface != nullptr || self->IsExceptionPending());
+    CHECK_IMPLIES(interface == nullptr, self->IsExceptionPending());
   }
   return interface;
 }
@@ -1755,7 +1755,7 @@
   // We iterate over virtual methods first and then over direct ones,
   // so we can never be in situation where `orig_method` is direct and
   // `new_method` is virtual.
-  DCHECK(!orig_method->IsDirect() || new_method->IsDirect());
+  DCHECK_IMPLIES(orig_method->IsDirect(), new_method->IsDirect());
 
   // Original method is synthetic, the new one is not?
   if (orig_method->IsSynthetic() && !new_method->IsSynthetic()) {
diff --git a/runtime/mirror/string-alloc-inl.h b/runtime/mirror/string-alloc-inl.h
index 5f3abd7..533053d 100644
--- a/runtime/mirror/string-alloc-inl.h
+++ b/runtime/mirror/string-alloc-inl.h
@@ -45,7 +45,7 @@
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
     ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
     string->SetCount(count_);
-    DCHECK(!string->IsCompressed() || kUseStringCompression);
+    DCHECK_IMPLIES(string->IsCompressed(), kUseStringCompression);
   }
 
  private:
@@ -65,7 +65,7 @@
     // Avoid AsString as object is not yet in live bitmap or allocation stack.
     ObjPtr<String> string = ObjPtr<String>::DownCast(obj);
     string->SetCount(count_);
-    DCHECK(!string->IsCompressed() || kUseStringCompression);
+    DCHECK_IMPLIES(string->IsCompressed(), kUseStringCompression);
     int32_t length = String::GetLengthFromCount(count_);
     const uint8_t* const src = reinterpret_cast<uint8_t*>(src_array_->GetData()) + offset_;
     if (string->IsCompressed()) {
diff --git a/runtime/mirror/string-inl.h b/runtime/mirror/string-inl.h
index f132a8d..8c1d13b 100644
--- a/runtime/mirror/string-inl.h
+++ b/runtime/mirror/string-inl.h
@@ -74,10 +74,10 @@
   }
   if (kIsDebugBuild) {
     if (IsCompressed()) {
-      DCHECK(result != 0 || ComputeUtf16Hash(GetValueCompressed(), GetLength()) == 0)
+      DCHECK_IMPLIES(result == 0, ComputeUtf16Hash(GetValueCompressed(), GetLength()) == 0)
           << ToModifiedUtf8() << " " << result;
     } else {
-      DCHECK(result != 0 || ComputeUtf16Hash(GetValue(), GetLength()) == 0)
+      DCHECK_IMPLIES(result == 0, ComputeUtf16Hash(GetValue(), GetLength()) == 0)
           << ToModifiedUtf8() << " " << result;
     }
   }
diff --git a/runtime/mirror/string.cc b/runtime/mirror/string.cc
index 704433a..75282aa 100644
--- a/runtime/mirror/string.cc
+++ b/runtime/mirror/string.cc
@@ -106,7 +106,7 @@
       } else {
         std::transform(src->value_, src->value_ + length, out, replace);
       }
-      DCHECK(!kUseStringCompression || !AllASCII(out, length));
+      DCHECK_IMPLIES(kUseStringCompression, !AllASCII(out, length));
     }
   };
   return Alloc(self, length_with_flag, allocator_type, visitor);
@@ -202,7 +202,7 @@
 ObjPtr<String> String::AllocFromUtf16(Thread* self,
                                       int32_t utf16_length,
                                       const uint16_t* utf16_data_in) {
-  CHECK(utf16_data_in != nullptr || utf16_length == 0);
+  CHECK_IMPLIES(utf16_data_in == nullptr, utf16_length == 0);
   gc::AllocatorType allocator_type = Runtime::Current()->GetHeap()->GetCurrentAllocator();
   const bool compressible = kUseStringCompression &&
                             String::AllASCII<uint16_t>(utf16_data_in, utf16_length);
diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc
index c80feaf..afa3d4d 100644
--- a/runtime/oat_file.cc
+++ b/runtime/oat_file.cc
@@ -1185,7 +1185,7 @@
   }
 
   bool success = Dlopen(elf_filename, reservation, error_msg);
-  DCHECK(dlopen_handle_ != nullptr || !success);
+  DCHECK_IMPLIES(dlopen_handle_ == nullptr, !success);
 
   return success;
 }
diff --git a/runtime/oat_file_assistant.cc b/runtime/oat_file_assistant.cc
index 46a4d0e..7d2196f 100644
--- a/runtime/oat_file_assistant.cc
+++ b/runtime/oat_file_assistant.cc
@@ -113,7 +113,7 @@
       dm_for_oat_(this, /*is_oat_location=*/ true),
       zip_fd_(zip_fd) {
   CHECK(dex_location != nullptr) << "OatFileAssistant: null dex location";
-  CHECK(!load_executable || context != nullptr) << "Loading executable without a context";
+  CHECK_IMPLIES(load_executable, context != nullptr) << "Loading executable without a context";
 
   if (zip_fd < 0) {
     CHECK_LE(oat_fd, 0) << "zip_fd must be provided with valid oat_fd. zip_fd=" << zip_fd
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 0bcdb13..b2fc369 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1493,7 +1493,7 @@
   // (a) runtime was started with a command line flag that enables the checks, or
   // (b) Zygote forked a new process that is not exempt (see ZygoteHooks).
   hidden_api_policy_ = runtime_options.GetOrDefault(Opt::HiddenApiPolicy);
-  DCHECK(!is_zygote_ || hidden_api_policy_ == hiddenapi::EnforcementPolicy::kDisabled);
+  DCHECK_IMPLIES(is_zygote_, hidden_api_policy_ == hiddenapi::EnforcementPolicy::kDisabled);
 
   // Set core platform API enforcement policy. The checks are disabled by default and
   // can be enabled with a command line flag. AndroidRuntime will pass the flag if
@@ -2146,26 +2146,26 @@
       env->NewGlobalRef(env->GetStaticObjectField(
           WellKnownClasses::java_lang_ThreadGroup,
           WellKnownClasses::java_lang_ThreadGroup_mainThreadGroup));
-  CHECK(main_thread_group_ != nullptr || IsAotCompiler());
+  CHECK_IMPLIES(main_thread_group_ == nullptr, IsAotCompiler());
   system_thread_group_ =
       env->NewGlobalRef(env->GetStaticObjectField(
           WellKnownClasses::java_lang_ThreadGroup,
           WellKnownClasses::java_lang_ThreadGroup_systemThreadGroup));
-  CHECK(system_thread_group_ != nullptr || IsAotCompiler());
+  CHECK_IMPLIES(system_thread_group_ == nullptr, IsAotCompiler());
 }
 
 jobject Runtime::GetMainThreadGroup() const {
-  CHECK(main_thread_group_ != nullptr || IsAotCompiler());
+  CHECK_IMPLIES(main_thread_group_ == nullptr, IsAotCompiler());
   return main_thread_group_;
 }
 
 jobject Runtime::GetSystemThreadGroup() const {
-  CHECK(system_thread_group_ != nullptr || IsAotCompiler());
+  CHECK_IMPLIES(system_thread_group_ == nullptr, IsAotCompiler());
   return system_thread_group_;
 }
 
 jobject Runtime::GetSystemClassLoader() const {
-  CHECK(system_class_loader_ != nullptr || IsAotCompiler());
+  CHECK_IMPLIES(system_class_loader_ == nullptr, IsAotCompiler());
   return system_class_loader_;
 }
 
diff --git a/runtime/subtype_check_info.h b/runtime/subtype_check_info.h
index 08db770..d734557 100644
--- a/runtime/subtype_check_info.h
+++ b/runtime/subtype_check_info.h
@@ -18,6 +18,7 @@
 #define ART_RUNTIME_SUBTYPE_CHECK_INFO_H_
 
 #include "base/bit_string.h"
+#include "base/logging.h"
 #include "subtype_check_bits.h"
 
 // Forward-declare for testing purposes.
@@ -280,9 +281,9 @@
     // Either Assigned or Initialized.
     BitString path_to_root = GetPathToRoot();
 
-    DCHECK(!HasNext() || GetNext() != 0u)
-        << "Expected (Assigned|Initialized) state to have >0 Next value: "
-        << GetNext() << " path: " << path_to_root;
+    DCHECK_IMPLIES(HasNext(), GetNext() != 0u)
+        << "Expected (Assigned|Initialized) state to have >0 Next value: " << GetNext()
+        << " path: " << path_to_root;
 
     if (path_to_root.Length() == depth_) {
       return kAssigned;
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 0a54e19..78ba26d 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -3380,7 +3380,7 @@
     DCHECK(IsExceptionPending());
     return;
   }
-  DCHECK(!runtime->IsStarted() || exception_class->IsThrowableClass());
+  DCHECK_IMPLIES(runtime->IsStarted(), exception_class->IsThrowableClass());
   Handle<mirror::Throwable> exception(
       hs.NewHandle(ObjPtr<mirror::Throwable>::DownCast(exception_class->AllocObject(this))));
 
diff --git a/runtime/thread.h b/runtime/thread.h
index d1829a3..0fe6c4c 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -413,7 +413,7 @@
   // End region where no thread suspension is expected.
   void EndAssertNoThreadSuspension(const char* old_cause) RELEASE(Roles::uninterruptible_) {
     if (kIsDebugBuild) {
-      CHECK(old_cause != nullptr || tls32_.no_thread_suspension == 1);
+      CHECK_IMPLIES(old_cause == nullptr, tls32_.no_thread_suspension == 1);
       CHECK_GT(tls32_.no_thread_suspension, 0U);
       tls32_.no_thread_suspension--;
       tlsPtr_.last_no_thread_suspension_cause = old_cause;
diff --git a/runtime/trace.cc b/runtime/trace.cc
index 4b5412f..ec61726 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -562,7 +562,7 @@
       start_time_(MicroTime()), clock_overhead_ns_(GetClockOverheadNanoSeconds()),
       overflow_(false), interval_us_(0), streaming_lock_(nullptr),
       unique_methods_lock_(new Mutex("unique methods lock", kTracingUniqueMethodsLock)) {
-  CHECK(trace_file != nullptr || output_mode == TraceOutputMode::kDDMS);
+  CHECK_IMPLIES(trace_file == nullptr, output_mode == TraceOutputMode::kDDMS);
 
   uint16_t trace_version = GetTraceVersion(clock_source_);
   if (output_mode == TraceOutputMode::kStreaming) {
diff --git a/runtime/vdex_file.cc b/runtime/vdex_file.cc
index 77647fb..fb3f809 100644
--- a/runtime/vdex_file.cc
+++ b/runtime/vdex_file.cc
@@ -126,7 +126,7 @@
                               vdex_length);
     return nullptr;
   }
-  CHECK(!mmap_reuse || mmap_addr != nullptr);
+  CHECK_IMPLIES(mmap_reuse, mmap_addr != nullptr);
   // Start as PROT_WRITE so we can mprotect back to it if we want to.
   MemMap mmap = MemMap::MapFileAtAddress(
       mmap_addr,
diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc
index 1f5e31f..74c286f 100644
--- a/runtime/verifier/method_verifier.cc
+++ b/runtime/verifier/method_verifier.cc
@@ -2864,7 +2864,7 @@
                                    return_type_class,
                                    return_type_class->CannotBeAssignedFromOtherTypes());
         } else {
-          DCHECK(!can_load_classes_ || self_->IsExceptionPending());
+          DCHECK_IMPLIES(can_load_classes_, self_->IsExceptionPending());
           self_->ClearException();
         }
       }
@@ -2909,7 +2909,7 @@
                                    return_type_class,
                                    return_type_class->CannotBeAssignedFromOtherTypes());
         } else {
-          DCHECK(!can_load_classes_ || self_->IsExceptionPending());
+          DCHECK_IMPLIES(can_load_classes_, self_->IsExceptionPending());
           self_->ClearException();
         }
       }
@@ -4710,7 +4710,7 @@
                               field_type_class,
                               field_type_class->CannotBeAssignedFromOtherTypes());
     } else {
-      DCHECK(!can_load_classes_ || self_->IsExceptionPending());
+      DCHECK_IMPLIES(can_load_classes_, self_->IsExceptionPending());
       self_->ClearException();
     }
   } else if (IsSdkVersionSetAndAtLeast(api_level_, SdkVersion::kP)) {
diff --git a/runtime/verifier/reg_type.cc b/runtime/verifier/reg_type.cc
index 5d2db03..40b880e 100644
--- a/runtime/verifier/reg_type.cc
+++ b/runtime/verifier/reg_type.cc
@@ -849,7 +849,7 @@
              (IsDoubleTypes() && incoming_type.IsDoubleTypes()) ||
              (IsDoubleHighTypes() && incoming_type.IsDoubleHighTypes())) {
     // check constant case was handled prior to entry
-    DCHECK(!IsConstant() || !incoming_type.IsConstant());
+    DCHECK_IMPLIES(IsConstant(), !incoming_type.IsConstant());
     // float/long/double MERGE float/long/double_constant => float/long/double
     return SelectNonConstant(*this, incoming_type);
   } else if (IsReferenceTypes() && incoming_type.IsReferenceTypes()) {
diff --git a/runtime/verifier/reg_type_cache-inl.h b/runtime/verifier/reg_type_cache-inl.h
index a3f3ff8..c5ff2b3 100644
--- a/runtime/verifier/reg_type_cache-inl.h
+++ b/runtime/verifier/reg_type_cache-inl.h
@@ -40,7 +40,7 @@
 
 inline const ConstantType& RegTypeCache::FromCat1Const(int32_t value, bool precise) {
   // We only expect 0 to be a precise constant.
-  DCHECK(value != 0 || precise);
+  DCHECK_IMPLIES(value == 0, precise);
   if (precise && (value >= kMinSmallConstant) && (value <= kMaxSmallConstant)) {
     return *small_precise_constants_[value - kMinSmallConstant];
   }
diff --git a/runtime/verifier/reg_type_cache.cc b/runtime/verifier/reg_type_cache.cc
index 2edb0f1..0bba6e8 100644
--- a/runtime/verifier/reg_type_cache.cc
+++ b/runtime/verifier/reg_type_cache.cc
@@ -200,14 +200,14 @@
     // Class was not found, must create new type.
     // To pass the verification, the type should be imprecise,
     // instantiable or an interface with the precise type set to false.
-    DCHECK(!precise || klass->IsInstantiable());
+    DCHECK_IMPLIES(precise, klass->IsInstantiable());
     // Create a precise type if:
     // 1- Class is final and NOT an interface. a precise interface is meaningless !!
     // 2- Precise Flag passed as true.
     RegType* entry;
     // Create an imprecise type if we can't tell for a fact that it is precise.
     if (klass->CannotBeAssignedFromOtherTypes() || precise) {
-      DCHECK(!(klass->IsAbstract()) || klass->IsArrayClass());
+      DCHECK_IMPLIES(klass->IsAbstract(), klass->IsArrayClass());
       DCHECK(!klass->IsInterface());
       entry =
           new (&allocator_) PreciseReferenceType(klass, AddString(sv_descriptor), entries_.size());
diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc
index 4705741..c56b933 100644
--- a/runtime/well_known_classes.cc
+++ b/runtime/well_known_classes.cc
@@ -296,12 +296,16 @@
 
 void Thread::InitStringEntryPoints() {
   QuickEntryPoints* qpoints = &tlsPtr_.quick_entrypoints;
-  #define SET_ENTRY_POINT(init_runtime_name, init_signature, new_runtime_name,              \
-                          new_java_name, new_signature, entry_point_name)                   \
-      DCHECK(!Runtime::Current()->IsStarted() || (new_runtime_name) != nullptr);            \
-      qpoints->p ## entry_point_name = reinterpret_cast<void*>(new_runtime_name);
-      STRING_INIT_LIST(SET_ENTRY_POINT)
-  #undef SET_ENTRY_POINT
+#define SET_ENTRY_POINT(init_runtime_name,                                        \
+                        init_signature,                                           \
+                        new_runtime_name,                                         \
+                        new_java_name,                                            \
+                        new_signature,                                            \
+                        entry_point_name)                                         \
+  DCHECK_IMPLIES(Runtime::Current()->IsStarted(), (new_runtime_name) != nullptr); \
+  qpoints->p##entry_point_name = reinterpret_cast<void*>(new_runtime_name);
+  STRING_INIT_LIST(SET_ENTRY_POINT)
+#undef SET_ENTRY_POINT
 }
 
 ArtMethod* WellKnownClasses::StringInitToStringFactory(ArtMethod* string_init) {