summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2024-03-22 10:45:15 +0000
committer Santiago Aboy Solanes <solanes@google.com> 2024-03-25 13:43:14 +0000
commit56dcaeec46dd53e271d6f201d2028996bf19f9dd (patch)
treeea607494f1912d60e838be94919e25db2ec3ac89 /compiler
parent9c4a2b5bb3695349e31a00492731e578e8853a21 (diff)
Remove extra uses of ClearAllBits
ArenaBitVector creation guarantees it starts empty. Add a debug check to make sure this assumption doesn't change. Note that ArenaAllocator guarantees zero-initialized memory but ScopedArenaAllocators do not. This is fine either way since the BitVector constructor calls ClearAllBits. Bug: 329037671 Test: art/test/testrunner/testrunner.py --host --64 --optimizing -b Change-Id: Icbf5e5dd1869e80b5d5828ecca9f13de30c0242b
Diffstat (limited to 'compiler')
-rw-r--r--compiler/optimizing/code_sinking.cc6
-rw-r--r--compiler/optimizing/constructor_fence_redundancy_elimination.cc1
-rw-r--r--compiler/optimizing/dead_code_elimination.cc8
-rw-r--r--compiler/optimizing/graph_checker.cc1
-rw-r--r--compiler/optimizing/graph_checker.h4
-rw-r--r--compiler/optimizing/gvn.cc7
-rw-r--r--compiler/optimizing/instruction_builder.cc1
-rw-r--r--compiler/optimizing/load_store_analysis.h4
-rw-r--r--compiler/optimizing/load_store_elimination.cc23
-rw-r--r--compiler/optimizing/nodes.cc5
-rw-r--r--compiler/optimizing/register_allocator.cc1
-rw-r--r--compiler/optimizing/ssa_liveness_analysis.h3
-rw-r--r--compiler/optimizing/ssa_phi_elimination.cc1
-rw-r--r--compiler/optimizing/superblock_cloner.cc2
14 files changed, 12 insertions, 55 deletions
diff --git a/compiler/optimizing/code_sinking.cc b/compiler/optimizing/code_sinking.cc
index a2371817ee..0abcaea719 100644
--- a/compiler/optimizing/code_sinking.cc
+++ b/compiler/optimizing/code_sinking.cc
@@ -333,10 +333,9 @@ void CodeSinking::SinkCodeToUncommonBranch(HBasicBlock* end_block) {
size_t number_of_instructions = graph_->GetCurrentInstructionId();
ScopedArenaVector<HInstruction*> worklist(allocator.Adapter(kArenaAllocMisc));
- ArenaBitVector processed_instructions(&allocator, number_of_instructions, /* expandable= */ false);
- processed_instructions.ClearAllBits();
+ ArenaBitVector processed_instructions(
+ &allocator, number_of_instructions, /* expandable= */ false);
ArenaBitVector post_dominated(&allocator, graph_->GetBlocks().size(), /* expandable= */ false);
- post_dominated.ClearAllBits();
// Step (1): Visit post order to get a subset of blocks post dominated by `end_block`.
// TODO(ngeoffray): Getting the full set of post-dominated should be done by
@@ -411,7 +410,6 @@ void CodeSinking::SinkCodeToUncommonBranch(HBasicBlock* end_block) {
// Step (2): iterate over the worklist to find sinking candidates.
ArenaBitVector instructions_that_can_move(
&allocator, number_of_instructions, /* expandable= */ false);
- instructions_that_can_move.ClearAllBits();
ScopedArenaVector<ScopedArenaVector<HInstruction*>> instructions_to_move(
graph_->GetBlocks().size(),
ScopedArenaVector<HInstruction*>(allocator.Adapter(kArenaAllocMisc)),
diff --git a/compiler/optimizing/constructor_fence_redundancy_elimination.cc b/compiler/optimizing/constructor_fence_redundancy_elimination.cc
index 30c33dd5c5..71fc39a956 100644
--- a/compiler/optimizing/constructor_fence_redundancy_elimination.cc
+++ b/compiler/optimizing/constructor_fence_redundancy_elimination.cc
@@ -52,7 +52,6 @@ class CFREVisitor final : public HGraphVisitor {
size_t number_of_instructions = GetGraph()->GetCurrentInstructionId();
candidate_fence_targets_.emplace(
&scoped_allocator_, number_of_instructions, /*expandable=*/ false, kArenaAllocCFRE);
- candidate_fence_targets_->ClearAllBits();
}
for (HInstruction* input : constructor_fence->GetInputs()) {
diff --git a/compiler/optimizing/dead_code_elimination.cc b/compiler/optimizing/dead_code_elimination.cc
index fe1361c935..f44f4b577b 100644
--- a/compiler/optimizing/dead_code_elimination.cc
+++ b/compiler/optimizing/dead_code_elimination.cc
@@ -592,10 +592,7 @@ struct HDeadCodeElimination::TryBelongingInformation {
TryBelongingInformation(HGraph* graph, ScopedArenaAllocator* allocator)
: blocks_in_try(allocator, graph->GetBlocks().size(), /*expandable=*/false, kArenaAllocDCE),
coalesced_try_entries(
- allocator, graph->GetBlocks().size(), /*expandable=*/false, kArenaAllocDCE) {
- blocks_in_try.ClearAllBits();
- coalesced_try_entries.ClearAllBits();
- }
+ allocator, graph->GetBlocks().size(), /*expandable=*/false, kArenaAllocDCE) {}
// Which blocks belong in the try.
ArenaBitVector blocks_in_try;
@@ -803,7 +800,6 @@ bool HDeadCodeElimination::RemoveEmptyIfs() {
ScopedArenaAllocator allocator(graph_->GetArenaStack());
ArenaBitVector visited_blocks(
&allocator, graph_->GetBlocks().size(), /*expandable=*/ false, kArenaAllocDCE);
- visited_blocks.ClearAllBits();
HBasicBlock* merge_true = true_block;
visited_blocks.SetBit(merge_true->GetBlockId());
while (merge_true->IsSingleGoto()) {
@@ -827,7 +823,6 @@ bool HDeadCodeElimination::RemoveEmptyIfs() {
ScopedArenaQueue<HInstruction*> maybe_remove(allocator.Adapter(kArenaAllocDCE));
ArenaBitVector visited(
&allocator, graph_->GetCurrentInstructionId(), /*expandable=*/ false, kArenaAllocDCE);
- visited.ClearAllBits();
maybe_remove.push(if_instr->InputAt(0));
visited.SetBit(if_instr->GetId());
@@ -879,7 +874,6 @@ bool HDeadCodeElimination::RemoveDeadBlocks(bool force_recomputation,
// Classify blocks as reachable/unreachable.
ArenaBitVector live_blocks(&allocator, graph_->GetBlocks().size(), false, kArenaAllocDCE);
- live_blocks.ClearAllBits();
MarkReachableBlocks(graph_, &live_blocks);
bool removed_one_or_more_blocks = false;
diff --git a/compiler/optimizing/graph_checker.cc b/compiler/optimizing/graph_checker.cc
index f2662700c7..1ff0d4dc84 100644
--- a/compiler/optimizing/graph_checker.cc
+++ b/compiler/optimizing/graph_checker.cc
@@ -1175,7 +1175,6 @@ void GraphChecker::VisitPhi(HPhi* phi) {
GetGraph()->GetCurrentInstructionId(),
/* expandable= */ false,
kArenaAllocGraphChecker);
- visited.ClearAllBits();
if (!IsConstantEquivalent(phi, other_phi, &visited)) {
AddError(StringPrintf("Two phis (%d and %d) found for VReg %d but they "
"are not equivalents of constants.",
diff --git a/compiler/optimizing/graph_checker.h b/compiler/optimizing/graph_checker.h
index 541a9cc3d2..a1b7b28a24 100644
--- a/compiler/optimizing/graph_checker.h
+++ b/compiler/optimizing/graph_checker.h
@@ -43,9 +43,7 @@ class GraphChecker final : public HGraphDelegateVisitor {
uses_per_instruction_(allocator_.Adapter(kArenaAllocGraphChecker)),
instructions_per_block_(allocator_.Adapter(kArenaAllocGraphChecker)),
phis_per_block_(allocator_.Adapter(kArenaAllocGraphChecker)),
- codegen_(codegen) {
- seen_ids_.ClearAllBits();
- }
+ codegen_(codegen) {}
// Check the whole graph. The pass_change parameter indicates whether changes
// may have occurred during the just executed pass. The default value is
diff --git a/compiler/optimizing/gvn.cc b/compiler/optimizing/gvn.cc
index 9113860387..cd3b07065c 100644
--- a/compiler/optimizing/gvn.cc
+++ b/compiler/optimizing/gvn.cc
@@ -353,16 +353,13 @@ class ValueSet : public ArenaObject<kArenaAllocGvn> {
*/
class GlobalValueNumberer : public ValueObject {
public:
- GlobalValueNumberer(HGraph* graph,
- const SideEffectsAnalysis& side_effects)
+ GlobalValueNumberer(HGraph* graph, const SideEffectsAnalysis& side_effects)
: graph_(graph),
allocator_(graph->GetArenaStack()),
side_effects_(side_effects),
sets_(graph->GetBlocks().size(), nullptr, allocator_.Adapter(kArenaAllocGvn)),
visited_blocks_(
- &allocator_, graph->GetBlocks().size(), /* expandable= */ false, kArenaAllocGvn) {
- visited_blocks_.ClearAllBits();
- }
+ &allocator_, graph->GetBlocks().size(), /* expandable= */ false, kArenaAllocGvn) {}
bool Run();
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 410d6fd0d0..1d665b2559 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -538,7 +538,6 @@ ArenaBitVector* HInstructionBuilder::FindNativeDebugInfoLocations() {
code_item_accessor_.InsnsSizeInCodeUnits(),
/* expandable= */ false,
kArenaAllocGraphBuilder);
- locations->ClearAllBits();
// The visitor gets called when the line number changes.
// In other words, it marks the start of new java statement.
code_item_accessor_.DecodeDebugPositionInfo([&](const DexFile::PositionInfo& entry) {
diff --git a/compiler/optimizing/load_store_analysis.h b/compiler/optimizing/load_store_analysis.h
index 4a630ddf8f..088f5b710f 100644
--- a/compiler/optimizing/load_store_analysis.h
+++ b/compiler/optimizing/load_store_analysis.h
@@ -189,9 +189,7 @@ class HeapLocationCollector : public HGraphVisitor {
ref_info_array_(allocator->Adapter(kArenaAllocLSA)),
heap_locations_(allocator->Adapter(kArenaAllocLSA)),
aliasing_matrix_(allocator, kInitialAliasingMatrixBitVectorSize, true, kArenaAllocLSA),
- has_heap_stores_(false) {
- aliasing_matrix_.ClearAllBits();
- }
+ has_heap_stores_(false) {}
~HeapLocationCollector() {
CleanUp();
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 80cf9e669b..bd38d8265c 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -1427,9 +1427,8 @@ LSEVisitor::LSEVisitor(HGraph* graph,
loads_and_stores_(allocator_.Adapter(kArenaAllocLSE)),
// We may add new instructions (default values, Phis) but we're not adding loads
// or stores, so we shall not need to resize following vector and BitVector.
- substitute_instructions_for_loads_(graph->GetCurrentInstructionId(),
- nullptr,
- allocator_.Adapter(kArenaAllocLSE)),
+ substitute_instructions_for_loads_(
+ graph->GetCurrentInstructionId(), nullptr, allocator_.Adapter(kArenaAllocLSE)),
kept_stores_(&allocator_,
/*start_bits=*/graph->GetCurrentInstructionId(),
/*expandable=*/false,
@@ -1440,17 +1439,12 @@ LSEVisitor::LSEVisitor(HGraph* graph,
kArenaAllocLSE),
loads_requiring_loop_phi_(allocator_.Adapter(kArenaAllocLSE)),
store_records_(allocator_.Adapter(kArenaAllocLSE)),
- phi_placeholder_replacements_(num_phi_placeholders_,
- Value::Invalid(),
- allocator_.Adapter(kArenaAllocLSE)),
+ phi_placeholder_replacements_(
+ num_phi_placeholders_, Value::Invalid(), allocator_.Adapter(kArenaAllocLSE)),
singleton_new_instances_(allocator_.Adapter(kArenaAllocLSE)),
field_infos_(heap_location_collector_.GetNumberOfHeapLocations(),
allocator_.Adapter(kArenaAllocLSE)),
- current_phase_(Phase::kLoadElimination) {
- // Clear bit vectors.
- phi_placeholders_to_search_for_kept_stores_.ClearAllBits();
- kept_stores_.ClearAllBits();
-}
+ current_phase_(Phase::kLoadElimination) {}
LSEVisitor::Value LSEVisitor::PrepareLoopValue(HBasicBlock* block, size_t idx) {
// If the pre-header value is known (which implies that the reference dominates this
@@ -1876,7 +1870,6 @@ bool LSEVisitor::TryReplacingLoopPhiPlaceholderWithDefault(
/*start_bits=*/ num_phi_placeholders_,
/*expandable=*/ false,
kArenaAllocLSE);
- visited.ClearAllBits();
ScopedArenaVector<PhiPlaceholder> work_queue(allocator.Adapter(kArenaAllocLSE));
// Use depth first search to check if any non-Phi input is unknown.
@@ -1966,7 +1959,6 @@ bool LSEVisitor::TryReplacingLoopPhiPlaceholderWithSingleInput(
/*start_bits=*/ num_phi_placeholders_,
/*expandable=*/ false,
kArenaAllocLSE);
- visited.ClearAllBits();
ScopedArenaVector<PhiPlaceholder> work_queue(allocator.Adapter(kArenaAllocLSE));
// Use depth first search to check if any non-Phi input is unknown.
@@ -2278,7 +2270,6 @@ bool LSEVisitor::MaterializeLoopPhis(const ArenaBitVector& phi_placeholders_to_m
dependencies.push_back(
ArenaBitVector::Create(&allocator, num_phi_placeholders, kExpandable, kArenaAllocLSE));
ArenaBitVector* current_dependencies = dependencies.back();
- current_dependencies->ClearAllBits();
current_dependencies->SetBit(matrix_index); // Count the Phi placeholder as its own dependency.
PhiPlaceholder current_phi_placeholder =
GetPhiPlaceholderAt(phi_placeholder_indexes[matrix_index]);
@@ -2377,7 +2368,6 @@ std::optional<LSEVisitor::PhiPlaceholder> LSEVisitor::TryToMaterializeLoopPhis(
// Find Phi placeholders to materialize.
ArenaBitVector phi_placeholders_to_materialize(
&allocator, num_phi_placeholders_, /*expandable=*/ false, kArenaAllocLSE);
- phi_placeholders_to_materialize.ClearAllBits();
DataType::Type type = load->GetType();
bool can_use_default_or_phi = IsDefaultOrPhiAllowedForLoad(load);
std::optional<PhiPlaceholder> loop_phi_with_unknown_input = FindLoopPhisToMaterialize(
@@ -2681,12 +2671,10 @@ void LSEVisitor::FindOldValueForPhiPlaceholder(PhiPlaceholder phi_placeholder,
/*start_bits=*/ num_phi_placeholders_,
/*expandable=*/ false,
kArenaAllocLSE);
- visited.ClearAllBits();
// Find Phi placeholders to try and match against existing Phis or other replacement values.
ArenaBitVector phi_placeholders_to_materialize(
&allocator, num_phi_placeholders_, /*expandable=*/ false, kArenaAllocLSE);
- phi_placeholders_to_materialize.ClearAllBits();
std::optional<PhiPlaceholder> loop_phi_with_unknown_input = FindLoopPhisToMaterialize(
phi_placeholder, &phi_placeholders_to_materialize, type, /*can_use_default_or_phi=*/true);
if (loop_phi_with_unknown_input) {
@@ -2791,7 +2779,6 @@ void LSEVisitor::FindStoresWritingOldValues() {
/*start_bits=*/ GetGraph()->GetCurrentInstructionId(),
/*expandable=*/ false,
kArenaAllocLSE);
- eliminated_stores.ClearAllBits();
for (uint32_t store_id : kept_stores_.Indexes()) {
DCHECK(kept_stores_.IsBitSet(store_id));
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index ba268560e8..f2cac19786 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -72,7 +72,6 @@ void HGraph::FindBackEdges(ArenaBitVector* visited) {
// Nodes that we're currently visiting, indexed by block id.
ArenaBitVector visiting(
&allocator, blocks_.size(), /* expandable= */ false, kArenaAllocGraphBuilder);
- visiting.ClearAllBits();
// Number of successors visited from a given node, indexed by block id.
ScopedArenaVector<size_t> successors_visited(blocks_.size(),
0u,
@@ -219,7 +218,6 @@ GraphAnalysisResult HGraph::BuildDominatorTree() {
ScopedArenaAllocator allocator(GetArenaStack());
ArenaBitVector visited(&allocator, blocks_.size(), false, kArenaAllocGraphBuilder);
- visited.ClearAllBits();
// (1) Find the back edges in the graph doing a DFS traversal.
FindBackEdges(&visited);
@@ -890,7 +888,6 @@ void HLoopInformation::Populate() {
graph->GetBlocks().size(),
/* expandable= */ false,
kArenaAllocGraphBuilder);
- visited.ClearAllBits();
// Stop marking blocks at the loop header.
visited.SetBit(header_->GetBlockId());
@@ -1221,7 +1218,6 @@ std::ostream& HInstruction::Dump(std::ostream& os, bool dump_args) {
(graph != nullptr) ? graph->GetCurrentInstructionId() : 0u,
/* expandable= */ (graph == nullptr),
kArenaAllocMisc);
- visited.ClearAllBits();
visited.SetBit(GetId());
// Keep a queue of instructions with their indentations.
ScopedArenaDeque<std::pair<HInstruction*, size_t>> queue(allocator.Adapter(kArenaAllocMisc));
@@ -1424,7 +1420,6 @@ void HInstruction::ReplaceUsesDominatedBy(HInstruction* dominator,
graph->GetBlocks().size(),
/* expandable= */ false,
kArenaAllocMisc);
- visited_blocks->ClearAllBits();
ScopedArenaAllocator allocator(graph->GetArenaStack());
ScopedArenaQueue<const HBasicBlock*> worklist(allocator.Adapter(kArenaAllocMisc));
worklist.push(dominator_block);
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index 54a80555dc..1b3a9a6285 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -214,7 +214,6 @@ bool RegisterAllocator::ValidateIntervals(ArrayRef<LiveInterval* const> interval
for (size_t i = 0; i < number_of_registers + number_of_spill_slots; ++i) {
liveness_of_values.push_back(
ArenaBitVector::Create(&allocator, max_end, false, kArenaAllocRegisterAllocatorValidate));
- liveness_of_values.back()->ClearAllBits();
}
for (LiveInterval* start_interval : intervals) {
diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h
index cc2b49cf22..e9422edb15 100644
--- a/compiler/optimizing/ssa_liveness_analysis.h
+++ b/compiler/optimizing/ssa_liveness_analysis.h
@@ -41,9 +41,6 @@ class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
UNUSED(block_);
- live_in_.ClearAllBits();
- live_out_.ClearAllBits();
- kill_.ClearAllBits();
}
private:
diff --git a/compiler/optimizing/ssa_phi_elimination.cc b/compiler/optimizing/ssa_phi_elimination.cc
index 1d9be3956a..0796acc687 100644
--- a/compiler/optimizing/ssa_phi_elimination.cc
+++ b/compiler/optimizing/ssa_phi_elimination.cc
@@ -143,7 +143,6 @@ bool SsaRedundantPhiElimination::Run() {
graph_->GetCurrentInstructionId(),
/* expandable= */ false,
kArenaAllocSsaPhiElimination);
- visited_phis_in_cycle.ClearAllBits();
ScopedArenaVector<HPhi*> cycle_worklist(allocator.Adapter(kArenaAllocSsaPhiElimination));
while (!worklist.empty()) {
diff --git a/compiler/optimizing/superblock_cloner.cc b/compiler/optimizing/superblock_cloner.cc
index 0a7b95a1fb..e14f7347fb 100644
--- a/compiler/optimizing/superblock_cloner.cc
+++ b/compiler/optimizing/superblock_cloner.cc
@@ -272,8 +272,6 @@ void SuperblockCloner::CopyIncomingEdgesForVersioning() {
// FindBackEdgesInTheNaturalLoop.
void SuperblockCloner::FindBackEdgesLocal(HBasicBlock* entry_block, ArenaBitVector* local_set) {
ArenaBitVector visited(arena_, graph_->GetBlocks().size(), false, kArenaAllocSuperblockCloner);
- // "visited" must be empty on entry, it's an output argument for all visited (i.e. live) blocks.
- DCHECK_EQ(visited.GetHighestBitSet(), -1);
// Nodes that we're currently visiting, indexed by block id.
ArenaBitVector visiting(arena_, graph_->GetBlocks().size(), false, kArenaAllocGraphBuilder);