Inclusive language fixes
Based on:
https://source.android.com/setup/contribute/respectful-code
#inclusivefixit
Bug: 161336379
Bug: 161896447
Test: art/test.py --host --64
Change-Id: I02c96aa477c4be6af8384222f1f111cc7ae1eeac
diff --git a/runtime/base/timing_logger.cc b/runtime/base/timing_logger.cc
index bd39192..c4034b0 100644
--- a/runtime/base/timing_logger.cc
+++ b/runtime/base/timing_logger.cc
@@ -96,8 +96,8 @@
delta_time /= kAdjust;
total_time_ += delta_time;
Histogram<uint64_t>* histogram;
- Histogram<uint64_t> dummy(label.c_str());
- auto it = histograms_.find(&dummy);
+ Histogram<uint64_t> candidate(label.c_str());
+ auto it = histograms_.find(&candidate);
if (it == histograms_.end()) {
const size_t max_buckets = Runtime::Current()->GetHeap()->IsLowMemoryMode() ?
kLowMemoryBucketCount : kDefaultBucketCount;
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index d2cb939..2da3e41 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -951,12 +951,12 @@
{
MutexLock mu(self, *Locks::alloc_tracker_lock_);
gc::AllocRecordObjectMap* records = Runtime::Current()->GetHeap()->GetAllocationRecords();
- // In case this method is called when allocation tracker is disabled,
+ // In case this method is called when allocation tracker is not enabled,
// we should still send some data back.
- gc::AllocRecordObjectMap dummy;
+ gc::AllocRecordObjectMap fallback_record_map;
if (records == nullptr) {
CHECK(!Runtime::Current()->GetHeap()->IsAllocTrackingEnabled());
- records = &dummy;
+ records = &fallback_record_map;
}
// We don't need to wait on the condition variable records->new_record_condition_, because this
// function only reads the class objects, which are already marked so it doesn't change their
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 92d28cf..82f97d6 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -617,19 +617,19 @@
// Ensure that the stack is still in order.
if (kIsDebugBuild) {
- class DummyStackVisitor : public StackVisitor {
+ class EntireStackVisitor : public StackVisitor {
public:
- explicit DummyStackVisitor(Thread* self_in) REQUIRES_SHARED(Locks::mutator_lock_)
+ explicit EntireStackVisitor(Thread* self_in) REQUIRES_SHARED(Locks::mutator_lock_)
: StackVisitor(self_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
- // Nothing to do here. In a debug build, SanityCheckFrame will do the work in the walking
+ // Nothing to do here. In a debug build, ValidateFrame will do the work in the walking
// logic. Just always say we want to continue.
return true;
}
};
- DummyStackVisitor dsv(self);
- dsv.WalkStack();
+ EntireStackVisitor esv(self);
+ esv.WalkStack();
}
// Restore the exception that was pending before deoptimization then interpret the
@@ -1311,9 +1311,9 @@
// Resolve method filling in dex cache.
if (!called_method_known_on_entry) {
StackHandleScope<1> hs(self);
- mirror::Object* dummy = nullptr;
+ mirror::Object* fake_receiver = nullptr;
HandleWrapper<mirror::Object> h_receiver(
- hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
+ hs.NewHandleWrapper(virtual_or_interface ? &receiver : &fake_receiver));
DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
called = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
self, called_method.index, caller, invoke_type);
diff --git a/runtime/mirror/var_handle.cc b/runtime/mirror/var_handle.cc
index 3079c35..9a78758 100644
--- a/runtime/mirror/var_handle.cc
+++ b/runtime/mirror/var_handle.cc
@@ -1668,7 +1668,8 @@
if (method_name == nullptr) {
return false;
}
- VarHandleAccessorToAccessModeEntry target = { method_name, /*dummy*/VarHandle::AccessMode::kGet };
+ const auto kUnusedAccessMode = VarHandle::AccessMode::kGet; // arbitrary value.
+ VarHandleAccessorToAccessModeEntry target = { method_name, kUnusedAccessMode };
auto last = std::cend(kAccessorToAccessMode);
auto it = std::lower_bound(std::cbegin(kAccessorToAccessMode),
last,
diff --git a/runtime/stack.cc b/runtime/stack.cc
index 1fca012..526386d 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -713,7 +713,7 @@
<< " code_size=" << code_size;
}
-void StackVisitor::SanityCheckFrame() const {
+void StackVisitor::ValidateFrame() const {
if (kIsDebugBuild) {
ArtMethod* method = GetMethod();
ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
@@ -886,7 +886,7 @@
cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_);
}
header_retrieved = false; // Force header retrieval in next iteration.
- SanityCheckFrame();
+ ValidateFrame();
if ((walk_kind_ == StackWalkKind::kIncludeInlinedFrames)
&& (cur_oat_quick_method_header_ != nullptr)
@@ -980,7 +980,7 @@
cur_oat_quick_method_header_ = nullptr;
} else if (cur_shadow_frame_ != nullptr) {
do {
- SanityCheckFrame();
+ ValidateFrame();
bool should_continue = VisitFrame();
if (UNLIKELY(!should_continue)) {
return;
diff --git a/runtime/stack.h b/runtime/stack.h
index a7d9d53..30d7533 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -346,7 +346,7 @@
ShadowFrame* PrepareSetVReg(ArtMethod* m, uint16_t vreg, bool wide)
REQUIRES_SHARED(Locks::mutator_lock_);
- void SanityCheckFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
+ void ValidateFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
ALWAYS_INLINE CodeInfo* GetCurrentInlineInfo() const;
ALWAYS_INLINE StackMap* GetCurrentStackMap() const;