Remove default argument values in GenerateGcRootFieldLoad.
These values were never or rarely used.
Test: mmma art (with and without `ART_USE_READ_BARRIER=true`)
Bug: 12687968
Bug: 29516974
Change-Id: I5d15140ce501bf50d7a87871b1e492cee54913db
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 9f92b20..660afec 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -5773,7 +5773,7 @@
__ movt(temp, /* placeholder */ 0u);
__ BindTrackedLabel(&labels->add_pc_label);
__ add(temp, temp, ShifterOperand(PC));
- GenerateGcRootFieldLoad(load, out_loc, temp, 0);
+ GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kEmitCompilerReadBarrier);
SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
codegen_->AddSlowPath(slow_path);
__ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index 4d59b47..729cbe1 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -283,12 +283,12 @@
//
// root <- *(obj + offset)
//
- // while honoring read barriers if requires_read_barrier is true.
+ // while honoring read barriers if `requires_read_barrier` is true.
void GenerateGcRootFieldLoad(HInstruction* instruction,
Location root,
Register obj,
uint32_t offset,
- bool requires_read_barrier = kEmitCompilerReadBarrier);
+ bool requires_read_barrier);
void GenerateTestAndBranch(HInstruction* instruction,
size_t condition_input_index,
Label* true_target,
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 9e59d8c..78c164b 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -4098,7 +4098,7 @@
out_loc,
current_method,
ArtMethod::DeclaringClassOffset().Int32Value(),
- /*fixup_label*/ nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
break;
}
@@ -4143,7 +4143,7 @@
out_loc,
out.X(),
offset,
- /*fixup_label*/ nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
generate_null_check = !cls->IsInDexCache();
break;
@@ -4180,7 +4180,7 @@
out_loc,
out.X(),
CodeGenerator::GetCacheOffset(cls->GetTypeIndex()),
- /*fixup_label*/ nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
generate_null_check = !cls->IsInDexCache();
break;
@@ -4319,8 +4319,9 @@
GenerateGcRootFieldLoad(load,
load->GetLocations()->Out(),
temp,
- /* placeholder */ 0u,
- ldr_label);
+ /* offset placeholder */ 0u,
+ ldr_label,
+ kEmitCompilerReadBarrier);
SlowPathCodeARM64* slow_path =
new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
codegen_->AddSlowPath(slow_path);
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index eb28ecb..7de84be 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -289,13 +289,13 @@
//
// root <- *(obj + offset)
//
- // while honoring read barriers (if any).
+ // while honoring read barriers if `requires_read_barrier` is true.
void GenerateGcRootFieldLoad(HInstruction* instruction,
Location root,
vixl::aarch64::Register obj,
uint32_t offset,
- vixl::aarch64::Label* fixup_label = nullptr,
- bool requires_read_barrier = kEmitCompilerReadBarrier);
+ vixl::aarch64::Label* fixup_label,
+ bool requires_read_barrier);
// Generate a floating-point comparison.
void GenerateFcmp(HInstruction* instruction);
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index cac0543..b9e049a 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -3357,7 +3357,8 @@
GenerateGcRootFieldLoad(cls,
out_loc,
current_method,
- ArtMethod::DeclaringClassOffset().Int32Value());
+ ArtMethod::DeclaringClassOffset().Int32Value(),
+ kEmitCompilerReadBarrier);
break;
}
case HLoadClass::LoadKind::kDexCacheViaMethod: {
@@ -3369,7 +3370,7 @@
GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset);
// /* GcRoot<mirror::Class> */ out = out[type_index]
size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex());
- GenerateGcRootFieldLoad(cls, out_loc, out, offset);
+ GenerateGcRootFieldLoad(cls, out_loc, out, offset, kEmitCompilerReadBarrier);
generate_null_check = !cls->IsInDexCache();
break;
}
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 1cd6184..b0fa038 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -339,7 +339,7 @@
Location root,
vixl::aarch32::Register obj,
uint32_t offset,
- bool requires_read_barrier = kEmitCompilerReadBarrier);
+ bool requires_read_barrier);
void GenerateTestAndBranch(HInstruction* instruction,
size_t condition_input_index,
vixl::aarch32::Label* true_target,
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 02c1c3b..d930016 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -5897,7 +5897,7 @@
cls,
out_loc,
Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
- /*fixup_label*/ nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
break;
}
@@ -5929,7 +5929,7 @@
GenerateGcRootFieldLoad(cls,
out_loc,
Address::Absolute(address),
- /*fixup_label*/ nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
generate_null_check = !cls->IsInDexCache();
break;
@@ -5957,7 +5957,7 @@
GenerateGcRootFieldLoad(cls,
out_loc,
Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
- /*fixup_label*/ nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
generate_null_check = !cls->IsInDexCache();
break;
@@ -6099,7 +6099,7 @@
Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
// /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
- GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
+ GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier);
SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
codegen_->AddSlowPath(slow_path);
__ testl(out, out);
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index e7d9a43..9e5bc83 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -259,12 +259,12 @@
//
// root <- *address
//
- // while honoring read barriers (if any).
+ // while honoring read barriers if `requires_read_barrier` is true.
void GenerateGcRootFieldLoad(HInstruction* instruction,
Location root,
const Address& address,
- Label* fixup_label = nullptr,
- bool requires_read_barrier = kEmitCompilerReadBarrier);
+ Label* fixup_label,
+ bool requires_read_barrier);
// Push value to FPU stack. `is_fp` specifies whether the value is floating point or not.
// `is_wide` specifies whether it is long/double or not.
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 4b64c1b..1f0d648 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -5318,7 +5318,7 @@
cls,
out_loc,
Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
- /*fixup_label*/nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
break;
}
@@ -5343,7 +5343,7 @@
GenerateGcRootFieldLoad(cls,
out_loc,
address,
- /*fixup_label*/nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
} else {
// TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
@@ -5351,7 +5351,7 @@
GenerateGcRootFieldLoad(cls,
out_loc,
Address(out, 0),
- /*fixup_label*/nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
}
generate_null_check = !cls->IsInDexCache();
@@ -5379,7 +5379,7 @@
cls,
out_loc,
Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
- /*fixup_label*/nullptr,
+ /* fixup_label */ nullptr,
requires_read_barrier);
generate_null_check = !cls->IsInDexCache();
break;
@@ -5496,7 +5496,7 @@
/* no_rip */ false);
Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
// /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
- GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
+ GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier);
SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
codegen_->AddSlowPath(slow_path);
__ testl(out, out);
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 57ef83f..eb082a0 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -253,12 +253,12 @@
//
// root <- *address
//
- // while honoring read barriers (if any).
+ // while honoring read barriers if `requires_read_barrier` is true.
void GenerateGcRootFieldLoad(HInstruction* instruction,
Location root,
const Address& address,
- Label* fixup_label = nullptr,
- bool requires_read_barrier = kEmitCompilerReadBarrier);
+ Label* fixup_label,
+ bool requires_read_barrier);
void PushOntoFPStack(Location source, uint32_t temp_offset,
uint32_t stack_adjustment, bool is_float);