blob: 6600ff319a728428735af1a2e7f0243a296f3dd9 [file] [log] [blame]
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
18#define ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_
19
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -080020#include <memory>
Alex Light74328052021-03-29 18:11:23 -070021#include <ostream>
Alex Light3a73ffb2021-01-25 14:11:05 +000022#include <string_view>
Alex Light74328052021-03-29 18:11:23 -070023#include <string>
24#include <tuple>
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -080025#include <vector>
Alex Light74328052021-03-29 18:11:23 -070026#include <variant>
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -080027
Alex Light3a73ffb2021-01-25 14:11:05 +000028#include "base/indenter.h"
David Sehr3215fff2018-04-03 17:10:12 -070029#include "base/malloc_arena_pool.h"
Vladimir Markoca6fff82017-10-03 14:49:14 +010030#include "base/scoped_arena_allocator.h"
Roland Levillainccc07a92014-09-16 14:48:16 +010031#include "builder.h"
David Brazdil4833f5a2015-12-16 10:37:39 +000032#include "common_compiler_test.h"
David Sehr9e734c72018-01-04 17:56:19 -080033#include "dex/code_item_accessors-inl.h"
34#include "dex/dex_file.h"
35#include "dex/dex_instruction.h"
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -080036#include "dex/standard_dex_file.h"
Vladimir Marko92f7f3c2017-10-31 11:38:30 +000037#include "driver/dex_compilation_unit.h"
Artem Serov15f95b12018-06-29 15:30:36 +010038#include "graph_checker.h"
Alex Light3a73ffb2021-01-25 14:11:05 +000039#include "gtest/gtest.h"
Vladimir Marko69d310e2017-10-09 14:12:23 +010040#include "handle_scope-inl.h"
Alex Light3a73ffb2021-01-25 14:11:05 +000041#include "handle_scope.h"
Vladimir Marko69d310e2017-10-09 14:12:23 +010042#include "mirror/class_loader.h"
43#include "mirror/dex_cache.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070044#include "nodes.h"
David Brazdil4833f5a2015-12-16 10:37:39 +000045#include "scoped_thread_state_change.h"
46#include "ssa_builder.h"
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010047#include "ssa_liveness_analysis.h"
48
Vladimir Marko0a516052019-10-14 13:00:44 +000049namespace art {
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010050
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +000051#define NUM_INSTRUCTIONS(...) \
52 (sizeof((uint16_t[]) {__VA_ARGS__}) /sizeof(uint16_t))
53
Roland Levillain55dcfb52014-10-24 18:09:09 +010054#define N_REGISTERS_CODE_ITEM(NUM_REGS, ...) \
55 { NUM_REGS, 0, 0, 0, 0, 0, NUM_INSTRUCTIONS(__VA_ARGS__), 0, __VA_ARGS__ }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +000056
Roland Levillain55dcfb52014-10-24 18:09:09 +010057#define ZERO_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(0, __VA_ARGS__)
58#define ONE_REGISTER_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(1, __VA_ARGS__)
59#define TWO_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(2, __VA_ARGS__)
60#define THREE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(3, __VA_ARGS__)
61#define FOUR_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(4, __VA_ARGS__)
62#define FIVE_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(5, __VA_ARGS__)
63#define SIX_REGISTERS_CODE_ITEM(...) N_REGISTERS_CODE_ITEM(6, __VA_ARGS__)
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +000064
Alex Light74328052021-03-29 18:11:23 -070065struct InstructionDumper {
66 public:
67 HInstruction* ins_;
68};
69
70inline bool operator==(const InstructionDumper& a, const InstructionDumper& b) {
71 return a.ins_ == b.ins_;
72}
73inline bool operator!=(const InstructionDumper& a, const InstructionDumper& b) {
74 return !(a == b);
75}
76
77inline std::ostream& operator<<(std::ostream& os, const InstructionDumper& id) {
78 if (id.ins_ == nullptr) {
79 return os << "NULL";
80 } else {
81 return os << "(" << id.ins_ << "): " << id.ins_->DumpWithArgs();
82 }
83}
84
85#define EXPECT_INS_EQ(a, b) EXPECT_EQ(InstructionDumper{a}, InstructionDumper{b})
86#define EXPECT_INS_REMOVED(a) EXPECT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
87#define EXPECT_INS_RETAINED(a) EXPECT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
88#define ASSERT_INS_EQ(a, b) ASSERT_EQ(InstructionDumper{a}, InstructionDumper{b})
89#define ASSERT_INS_REMOVED(a) ASSERT_TRUE(IsRemoved(a)) << "Not removed: " << (InstructionDumper{a})
90#define ASSERT_INS_RETAINED(a) ASSERT_FALSE(IsRemoved(a)) << "Removed: " << (InstructionDumper{a})
91
David Srbecky883c1342020-05-11 23:30:29 +000092inline LiveInterval* BuildInterval(const size_t ranges[][2],
93 size_t number_of_ranges,
94 ScopedArenaAllocator* allocator,
95 int reg = -1,
96 HInstruction* defined_by = nullptr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010097 LiveInterval* interval =
98 LiveInterval::MakeInterval(allocator, DataType::Type::kInt32, defined_by);
Nicolas Geoffraydd8f8872015-01-15 15:37:37 +000099 if (defined_by != nullptr) {
100 defined_by->SetLiveInterval(interval);
101 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100102 for (size_t i = number_of_ranges; i > 0; --i) {
103 interval->AddRange(ranges[i - 1][0], ranges[i - 1][1]);
104 }
105 interval->SetRegister(reg);
106 return interval;
107}
108
David Srbecky883c1342020-05-11 23:30:29 +0000109inline void RemoveSuspendChecks(HGraph* graph) {
Vladimir Markofa6b93c2015-09-15 10:15:55 +0100110 for (HBasicBlock* block : graph->GetBlocks()) {
David Brazdilbadd8262016-02-02 16:28:56 +0000111 if (block != nullptr) {
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100112 if (block->GetLoopInformation() != nullptr) {
113 block->GetLoopInformation()->SetSuspendCheck(nullptr);
114 }
David Brazdilbadd8262016-02-02 16:28:56 +0000115 for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
116 HInstruction* current = it.Current();
117 if (current->IsSuspendCheck()) {
118 current->GetBlock()->RemoveInstruction(current);
119 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000120 }
121 }
122 }
123}
124
Vladimir Markoca6fff82017-10-03 14:49:14 +0100125class ArenaPoolAndAllocator {
126 public:
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100127 ArenaPoolAndAllocator()
128 : pool_(), allocator_(&pool_), arena_stack_(&pool_), scoped_allocator_(&arena_stack_) { }
Vladimir Markoca6fff82017-10-03 14:49:14 +0100129
130 ArenaAllocator* GetAllocator() { return &allocator_; }
131 ArenaStack* GetArenaStack() { return &arena_stack_; }
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100132 ScopedArenaAllocator* GetScopedAllocator() { return &scoped_allocator_; }
Vladimir Markoca6fff82017-10-03 14:49:14 +0100133
134 private:
David Sehr3215fff2018-04-03 17:10:12 -0700135 MallocArenaPool pool_;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100136 ArenaAllocator allocator_;
137 ArenaStack arena_stack_;
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100138 ScopedArenaAllocator scoped_allocator_;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100139};
140
Alex Light74328052021-03-29 18:11:23 -0700141class AdjacencyListGraph {
142 public:
143 using Edge = std::pair<const std::string_view, const std::string_view>;
144 AdjacencyListGraph(
145 HGraph* graph,
146 ArenaAllocator* alloc,
147 const std::string_view entry_name,
148 const std::string_view exit_name,
149 const std::vector<Edge>& adj) : graph_(graph) {
150 auto create_block = [&]() {
151 HBasicBlock* blk = new (alloc) HBasicBlock(graph_);
152 graph_->AddBlock(blk);
153 return blk;
154 };
155 HBasicBlock* entry = create_block();
156 HBasicBlock* exit = create_block();
157 graph_->SetEntryBlock(entry);
158 graph_->SetExitBlock(exit);
159 name_to_block_.Put(entry_name, entry);
160 name_to_block_.Put(exit_name, exit);
161 for (const auto& [src, dest] : adj) {
162 HBasicBlock* src_blk = name_to_block_.GetOrCreate(src, create_block);
163 HBasicBlock* dest_blk = name_to_block_.GetOrCreate(dest, create_block);
164 src_blk->AddSuccessor(dest_blk);
165 }
166 graph_->ClearReachabilityInformation();
167 graph_->ComputeDominanceInformation();
168 graph_->ComputeReachabilityInformation();
169 for (auto [name, blk] : name_to_block_) {
170 block_to_name_.Put(blk, name);
171 }
172 }
173
174 bool HasBlock(const HBasicBlock* blk) const {
175 return block_to_name_.find(blk) != block_to_name_.end();
176 }
177
178 std::string_view GetName(const HBasicBlock* blk) const {
179 return block_to_name_.Get(blk);
180 }
181
182 HBasicBlock* Get(const std::string_view& sv) const {
183 return name_to_block_.Get(sv);
184 }
185
186 AdjacencyListGraph(AdjacencyListGraph&&) = default;
187 AdjacencyListGraph(const AdjacencyListGraph&) = default;
188 AdjacencyListGraph& operator=(AdjacencyListGraph&&) = default;
189 AdjacencyListGraph& operator=(const AdjacencyListGraph&) = default;
190
191 std::ostream& Dump(std::ostream& os) const {
192 struct Namer : public BlockNamer {
193 public:
194 explicit Namer(const AdjacencyListGraph& alg) : BlockNamer(), alg_(alg) {}
195 std::ostream& PrintName(std::ostream& os, HBasicBlock* blk) const override {
196 if (alg_.HasBlock(blk)) {
197 return os << alg_.GetName(blk) << " (" << blk->GetBlockId() << ")";
198 } else {
199 return os << "<Unnamed B" << blk->GetBlockId() << ">";
200 }
201 }
202
203 const AdjacencyListGraph& alg_;
204 };
205 Namer namer(*this);
206 return graph_->Dump(os, namer);
207 }
208
209 private:
210 HGraph* graph_;
211 SafeMap<const std::string_view, HBasicBlock*> name_to_block_;
212 SafeMap<const HBasicBlock*, const std::string_view> block_to_name_;
213};
214
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800215// Have a separate helper so the OptimizingCFITest can inherit it without causing
216// multiple inheritance errors from having two gtest as a parent twice.
217class OptimizingUnitTestHelper {
218 public:
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100219 OptimizingUnitTestHelper()
220 : pool_and_allocator_(new ArenaPoolAndAllocator()),
221 graph_(nullptr),
222 entry_block_(nullptr),
223 return_block_(nullptr),
224 exit_block_(nullptr) { }
David Brazdilbadd8262016-02-02 16:28:56 +0000225
Vladimir Markoca6fff82017-10-03 14:49:14 +0100226 ArenaAllocator* GetAllocator() { return pool_and_allocator_->GetAllocator(); }
227 ArenaStack* GetArenaStack() { return pool_and_allocator_->GetArenaStack(); }
Vladimir Markoe764d2e2017-10-05 14:35:55 +0100228 ScopedArenaAllocator* GetScopedAllocator() { return pool_and_allocator_->GetScopedAllocator(); }
Vladimir Markoca6fff82017-10-03 14:49:14 +0100229
230 void ResetPoolAndAllocator() {
231 pool_and_allocator_.reset(new ArenaPoolAndAllocator());
David Brazdilbadd8262016-02-02 16:28:56 +0000232 }
Vladimir Markoca6fff82017-10-03 14:49:14 +0100233
Vladimir Marko02ca05a2020-05-12 13:58:51 +0100234 HGraph* CreateGraph(VariableSizedHandleScope* handles = nullptr) {
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800235 ArenaAllocator* const allocator = pool_and_allocator_->GetAllocator();
236
237 // Reserve a big array of 0s so the dex file constructor can offsets from the header.
238 static constexpr size_t kDexDataSize = 4 * KB;
239 const uint8_t* dex_data = reinterpret_cast<uint8_t*>(allocator->Alloc(kDexDataSize));
240
241 // Create the dex file based on the fake data. Call the constructor so that we can use virtual
242 // functions. Don't use the arena for the StandardDexFile otherwise the dex location leaks.
243 dex_files_.emplace_back(new StandardDexFile(
David Sehr0b426772018-07-03 23:03:42 +0000244 dex_data,
245 sizeof(StandardDexFile::Header),
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800246 "no_location",
247 /*location_checksum*/ 0,
David Sehr0b426772018-07-03 23:03:42 +0000248 /*oat_dex_file*/ nullptr,
249 /*container*/ nullptr));
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800250
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100251 graph_ = new (allocator) HGraph(
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800252 allocator,
253 pool_and_allocator_->GetArenaStack(),
Vladimir Marko02ca05a2020-05-12 13:58:51 +0100254 handles,
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800255 *dex_files_.back(),
256 /*method_idx*/-1,
257 kRuntimeISA);
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100258 return graph_;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100259 }
260
261 // Create a control-flow graph from Dex instructions.
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800262 HGraph* CreateCFG(const std::vector<uint16_t>& data,
Vladimir Marko02ca05a2020-05-12 13:58:51 +0100263 DataType::Type return_type = DataType::Type::kInt32,
264 VariableSizedHandleScope* handles = nullptr) {
265 HGraph* graph = CreateGraph(handles);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100266
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800267 // The code item data might not aligned to 4 bytes, copy it to ensure that.
268 const size_t code_item_size = data.size() * sizeof(data.front());
269 void* aligned_data = GetAllocator()->Alloc(code_item_size);
270 memcpy(aligned_data, &data[0], code_item_size);
271 CHECK_ALIGNED(aligned_data, StandardDexFile::CodeItem::kAlignment);
Andreas Gampe3f1dcd32018-12-28 09:39:56 -0800272 const dex::CodeItem* code_item = reinterpret_cast<const dex::CodeItem*>(aligned_data);
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800273
Vladimir Markoca6fff82017-10-03 14:49:14 +0100274 {
Vladimir Marko69d310e2017-10-09 14:12:23 +0100275 const DexCompilationUnit* dex_compilation_unit =
276 new (graph->GetAllocator()) DexCompilationUnit(
Vladimir Marko02ca05a2020-05-12 13:58:51 +0100277 /* class_loader= */ Handle<mirror::ClassLoader>(), // Invalid handle.
Andreas Gampe3db70682018-12-26 15:12:03 -0800278 /* class_linker= */ nullptr,
Vladimir Marko92f7f3c2017-10-31 11:38:30 +0000279 graph->GetDexFile(),
Vladimir Marko69d310e2017-10-09 14:12:23 +0100280 code_item,
Andreas Gampe3db70682018-12-26 15:12:03 -0800281 /* class_def_index= */ DexFile::kDexNoIndex16,
282 /* method_idx= */ dex::kDexNoIndex,
283 /* access_flags= */ 0u,
284 /* verified_method= */ nullptr,
Vladimir Marko02ca05a2020-05-12 13:58:51 +0100285 /* dex_cache= */ Handle<mirror::DexCache>()); // Invalid handle.
Mathieu Chartierfa3db3d2018-01-12 14:42:18 -0800286 CodeItemDebugInfoAccessor accessor(graph->GetDexFile(), code_item, /*dex_method_idx*/ 0u);
Vladimir Marko02ca05a2020-05-12 13:58:51 +0100287 HGraphBuilder builder(graph, dex_compilation_unit, accessor, return_type);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100288 bool graph_built = (builder.BuildGraph() == kAnalysisSuccess);
289 return graph_built ? graph : nullptr;
290 }
291 }
292
Alex Light3a73ffb2021-01-25 14:11:05 +0000293 void InitGraph(VariableSizedHandleScope* handles = nullptr) {
294 CreateGraph(handles);
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100295 entry_block_ = AddNewBlock();
296 return_block_ = AddNewBlock();
297 exit_block_ = AddNewBlock();
298
299 graph_->SetEntryBlock(entry_block_);
300 graph_->SetExitBlock(exit_block_);
301
302 entry_block_->AddSuccessor(return_block_);
303 return_block_->AddSuccessor(exit_block_);
304
305 return_block_->AddInstruction(new (GetAllocator()) HReturnVoid());
306 exit_block_->AddInstruction(new (GetAllocator()) HExit());
307 }
308
309 void AddParameter(HInstruction* parameter) {
310 entry_block_->AddInstruction(parameter);
311 parameters_.push_back(parameter);
312 }
313
314 HBasicBlock* AddNewBlock() {
315 HBasicBlock* block = new (GetAllocator()) HBasicBlock(graph_);
316 graph_->AddBlock(block);
317 return block;
318 }
319
Evgeny Astigeevich7ee34a12019-12-10 11:36:33 +0000320 // Run GraphChecker with all checks.
321 //
322 // Return: the status whether the run is successful.
Alex Light86fe9b82020-11-16 16:54:01 +0000323 bool CheckGraph(HGraph* graph, std::ostream& oss = std::cerr) {
324 return CheckGraph(graph, /*check_ref_type_info=*/true, oss);
Evgeny Astigeevich7ee34a12019-12-10 11:36:33 +0000325 }
326
Alex Light86fe9b82020-11-16 16:54:01 +0000327 bool CheckGraph(std::ostream& oss = std::cerr) {
328 return CheckGraph(graph_, oss);
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100329 }
330
Evgeny Astigeevich7ee34a12019-12-10 11:36:33 +0000331 // Run GraphChecker with all checks except reference type information checks.
332 //
333 // Return: the status whether the run is successful.
Alex Light86fe9b82020-11-16 16:54:01 +0000334 bool CheckGraphSkipRefTypeInfoChecks(HGraph* graph, std::ostream& oss = std::cerr) {
335 return CheckGraph(graph, /*check_ref_type_info=*/false, oss);
Evgeny Astigeevich7ee34a12019-12-10 11:36:33 +0000336 }
337
Alex Light86fe9b82020-11-16 16:54:01 +0000338 bool CheckGraphSkipRefTypeInfoChecks(std::ostream& oss = std::cerr) {
339 return CheckGraphSkipRefTypeInfoChecks(graph_, oss);
Artem Serov15f95b12018-06-29 15:30:36 +0100340 }
341
342 HEnvironment* ManuallyBuildEnvFor(HInstruction* instruction,
343 ArenaVector<HInstruction*>* current_locals) {
344 HEnvironment* environment = new (GetAllocator()) HEnvironment(
345 (GetAllocator()),
346 current_locals->size(),
347 graph_->GetArtMethod(),
348 instruction->GetDexPc(),
349 instruction);
350
351 environment->CopyFrom(ArrayRef<HInstruction* const>(*current_locals));
352 instruction->SetRawEnvironment(environment);
353 return environment;
354 }
355
Alex Light3a73ffb2021-01-25 14:11:05 +0000356 void EnsurePredecessorOrder(HBasicBlock* target, std::initializer_list<HBasicBlock*> preds) {
357 // Make sure the given preds and block predecessors have the same blocks.
358 BitVector bv(preds.size(), false, Allocator::GetMallocAllocator());
359 auto preds_and_idx = ZipCount(MakeIterationRange(target->GetPredecessors()));
360 bool correct_preds = preds.size() == target->GetPredecessors().size() &&
361 std::all_of(preds.begin(), preds.end(), [&](HBasicBlock* pred) {
362 return std::any_of(preds_and_idx.begin(),
363 preds_and_idx.end(),
364 // Make sure every target predecessor is used only
365 // once.
366 [&](std::pair<HBasicBlock*, uint32_t> cur) {
367 if (cur.first == pred && !bv.IsBitSet(cur.second)) {
368 bv.SetBit(cur.second);
369 return true;
370 } else {
371 return false;
372 }
373 });
374 }) &&
375 bv.NumSetBits() == preds.size();
376 auto dump_list = [](auto it) {
377 std::ostringstream oss;
378 oss << "[";
379 bool first = true;
380 for (HBasicBlock* b : it) {
381 if (!first) {
382 oss << ", ";
383 }
384 first = false;
385 oss << b->GetBlockId();
386 }
387 oss << "]";
388 return oss.str();
389 };
390 ASSERT_TRUE(correct_preds) << "Predecessors of " << target->GetBlockId() << " are "
391 << dump_list(target->GetPredecessors()) << " not "
392 << dump_list(preds);
393 if (correct_preds) {
394 std::copy(preds.begin(), preds.end(), target->predecessors_.begin());
395 }
396 }
397
Alex Light74328052021-03-29 18:11:23 -0700398 AdjacencyListGraph SetupFromAdjacencyList(const std::string_view entry_name,
399 const std::string_view exit_name,
400 const std::vector<AdjacencyListGraph::Edge>& adj) {
401 return AdjacencyListGraph(graph_, GetAllocator(), entry_name, exit_name, adj);
402 }
403
404 void ManuallyBuildEnvFor(HInstruction* ins, const std::initializer_list<HInstruction*>& env) {
405 ArenaVector<HInstruction*> current_locals(env, GetAllocator()->Adapter(kArenaAllocInstruction));
406 OptimizingUnitTestHelper::ManuallyBuildEnvFor(ins, &current_locals);
407 }
408
Alex Lightbb550e42021-04-21 17:04:13 -0700409 HLoadClass* MakeClassLoad(std::optional<dex::TypeIndex> ti = std::nullopt,
410 std::optional<Handle<mirror::Class>> klass = std::nullopt) {
Alex Light74328052021-03-29 18:11:23 -0700411 return new (GetAllocator()) HLoadClass(graph_->GetCurrentMethod(),
412 ti ? *ti : dex::TypeIndex(class_idx_++),
413 graph_->GetDexFile(),
Alex Lightbb550e42021-04-21 17:04:13 -0700414 /* klass= */ klass ? *klass : null_klass_,
Alex Light74328052021-03-29 18:11:23 -0700415 /* is_referrers_class= */ false,
416 /* dex_pc= */ 0,
417 /* needs_access_check= */ false);
418 }
419
420 HNewInstance* MakeNewInstance(HInstruction* cls, uint32_t dex_pc = 0u) {
421 EXPECT_TRUE(cls->IsLoadClass() || cls->IsClinitCheck()) << *cls;
422 HLoadClass* load =
423 cls->IsLoadClass() ? cls->AsLoadClass() : cls->AsClinitCheck()->GetLoadClass();
424 return new (GetAllocator()) HNewInstance(cls,
425 dex_pc,
426 load->GetTypeIndex(),
427 graph_->GetDexFile(),
428 /* finalizable= */ false,
429 QuickEntrypointEnum::kQuickAllocObjectInitialized);
430 }
431
432 HInstanceFieldSet* MakeIFieldSet(HInstruction* inst,
433 HInstruction* data,
434 MemberOffset off,
435 uint32_t dex_pc = 0u) {
436 return new (GetAllocator()) HInstanceFieldSet(inst,
437 data,
438 /* field= */ nullptr,
439 /* field_type= */ data->GetType(),
440 /* field_offset= */ off,
441 /* is_volatile= */ false,
442 /* field_idx= */ 0,
443 /* declaring_class_def_index= */ 0,
444 graph_->GetDexFile(),
445 dex_pc);
446 }
447
448 HInstanceFieldGet* MakeIFieldGet(HInstruction* inst,
449 DataType::Type type,
450 MemberOffset off,
451 uint32_t dex_pc = 0u) {
452 return new (GetAllocator()) HInstanceFieldGet(inst,
453 /* field= */ nullptr,
454 /* field_type= */ type,
455 /* field_offset= */ off,
456 /* is_volatile= */ false,
457 /* field_idx= */ 0,
458 /* declaring_class_def_index= */ 0,
459 graph_->GetDexFile(),
460 dex_pc);
461 }
462
463 HInvokeStaticOrDirect* MakeInvoke(DataType::Type return_type,
464 const std::vector<HInstruction*>& args) {
465 MethodReference method_reference{/* file= */ &graph_->GetDexFile(), /* index= */ method_idx_++};
466 HInvokeStaticOrDirect* res = new (GetAllocator())
467 HInvokeStaticOrDirect(GetAllocator(),
468 args.size(),
469 return_type,
470 /* dex_pc= */ 0,
471 method_reference,
472 /* resolved_method= */ nullptr,
473 HInvokeStaticOrDirect::DispatchInfo{},
474 InvokeType::kStatic,
475 /* resolved_method_reference= */ method_reference,
476 HInvokeStaticOrDirect::ClinitCheckRequirement::kNone);
477 for (auto [ins, idx] : ZipCount(MakeIterationRange(args))) {
478 res->SetRawInputAt(idx, ins);
479 }
480 return res;
481 }
482
483 HPhi* MakePhi(const std::vector<HInstruction*>& ins) {
484 EXPECT_GE(ins.size(), 2u) << "Phi requires at least 2 inputs";
485 HPhi* phi =
486 new (GetAllocator()) HPhi(GetAllocator(), kNoRegNumber, ins.size(), ins[0]->GetType());
487 for (auto [i, idx] : ZipCount(MakeIterationRange(ins))) {
488 phi->SetRawInputAt(idx, i);
489 }
490 return phi;
491 }
492
493 void SetupExit(HBasicBlock* exit) {
494 exit->AddInstruction(new (GetAllocator()) HExit());
495 }
496
497 dex::TypeIndex DefaultTypeIndexForType(DataType::Type type) {
498 switch (type) {
499 case DataType::Type::kBool:
500 return dex::TypeIndex(1);
501 case DataType::Type::kUint8:
502 case DataType::Type::kInt8:
503 return dex::TypeIndex(2);
504 case DataType::Type::kUint16:
505 case DataType::Type::kInt16:
506 return dex::TypeIndex(3);
507 case DataType::Type::kUint32:
508 case DataType::Type::kInt32:
509 return dex::TypeIndex(4);
510 case DataType::Type::kUint64:
511 case DataType::Type::kInt64:
512 return dex::TypeIndex(5);
513 case DataType::Type::kReference:
514 return dex::TypeIndex(6);
515 case DataType::Type::kFloat32:
516 return dex::TypeIndex(7);
517 case DataType::Type::kFloat64:
518 return dex::TypeIndex(8);
519 case DataType::Type::kVoid:
520 EXPECT_TRUE(false) << "No type for void!";
521 return dex::TypeIndex(1000);
522 }
523 }
524
525 // Creates a parameter. The instruction is automatically added to the entry-block
526 HParameterValue* MakeParam(DataType::Type type, std::optional<dex::TypeIndex> ti = std::nullopt) {
527 HParameterValue* val = new (GetAllocator()) HParameterValue(
528 graph_->GetDexFile(), ti ? *ti : DefaultTypeIndexForType(type), param_count_++, type);
529 graph_->GetEntryBlock()->AddInstruction(val);
530 return val;
531 }
532
Artem Serov15f95b12018-06-29 15:30:36 +0100533 protected:
Alex Light86fe9b82020-11-16 16:54:01 +0000534 bool CheckGraph(HGraph* graph, bool check_ref_type_info, std::ostream& oss) {
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100535 GraphChecker checker(graph);
536 checker.SetRefTypeInfoCheckEnabled(check_ref_type_info);
537 checker.Run();
Alex Light86fe9b82020-11-16 16:54:01 +0000538 checker.Dump(oss);
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100539 return checker.IsValid();
540 }
541
542 std::vector<std::unique_ptr<const StandardDexFile>> dex_files_;
543 std::unique_ptr<ArenaPoolAndAllocator> pool_and_allocator_;
Evgeny Astigeevich52506e22019-12-04 15:59:37 +0000544
Artem Serov15f95b12018-06-29 15:30:36 +0100545 HGraph* graph_;
Artem Serov15f95b12018-06-29 15:30:36 +0100546 HBasicBlock* entry_block_;
547 HBasicBlock* return_block_;
548 HBasicBlock* exit_block_;
549
Evgeny Astigeevich52506e22019-12-04 15:59:37 +0000550 std::vector<HInstruction*> parameters_;
Alex Light74328052021-03-29 18:11:23 -0700551
552 size_t param_count_ = 0;
553 size_t class_idx_ = 42;
554 uint32_t method_idx_ = 100;
555
556 ScopedNullHandle<mirror::Class> null_klass_;
Artem Serov15f95b12018-06-29 15:30:36 +0100557};
558
Vladimir Markof91fc122020-05-13 09:21:00 +0100559class OptimizingUnitTest : public CommonArtTest, public OptimizingUnitTestHelper {};
Vladimir Marko5d2311a2020-05-13 17:30:32 +0100560
Roland Levillain72bceff2014-09-15 18:29:00 +0100561// Naive string diff data type.
562typedef std::list<std::pair<std::string, std::string>> diff_t;
563
564// An alias for the empty string used to make it clear that a line is
565// removed in a diff.
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800566static const std::string removed = ""; // NOLINT [runtime/string] [4]
Roland Levillain72bceff2014-09-15 18:29:00 +0100567
568// Naive patch command: apply a diff to a string.
569inline std::string Patch(const std::string& original, const diff_t& diff) {
570 std::string result = original;
571 for (const auto& p : diff) {
572 std::string::size_type pos = result.find(p.first);
David Brazdil86ea7ee2016-02-16 09:26:07 +0000573 DCHECK_NE(pos, std::string::npos)
574 << "Could not find: \"" << p.first << "\" in \"" << result << "\"";
Roland Levillain72bceff2014-09-15 18:29:00 +0100575 result.replace(pos, p.first.size(), p.second);
576 }
577 return result;
578}
579
Mingyao Yangf384f882014-10-22 16:08:18 -0700580// Returns if the instruction is removed from the graph.
581inline bool IsRemoved(HInstruction* instruction) {
582 return instruction->GetBlock() == nullptr;
583}
584
Alex Light74328052021-03-29 18:11:23 -0700585inline std::ostream& operator<<(std::ostream& oss, const AdjacencyListGraph& alg) {
586 return alg.Dump(oss);
587}
588
589class PatternMatchGraphVisitor : public HGraphVisitor {
590 private:
591 struct HandlerWrapper {
592 public:
593 virtual ~HandlerWrapper() {}
594 virtual void operator()(HInstruction* h) = 0;
595 };
596
597 template <HInstruction::InstructionKind kKind, typename F>
598 struct KindWrapper;
599
600#define GEN_HANDLER(nm, unused) \
601 template <typename F> \
602 struct KindWrapper<HInstruction::InstructionKind::k##nm, F> : public HandlerWrapper { \
603 public: \
604 explicit KindWrapper(F f) : f_(f) {} \
605 void operator()(HInstruction* h) override { \
606 if constexpr (std::is_invocable_v<F, H##nm*>) { \
607 f_(h->As##nm()); \
608 } else { \
609 LOG(FATAL) << "Incorrect call with " << #nm; \
610 } \
611 } \
612 \
613 private: \
614 F f_; \
615 };
616
617 FOR_EACH_CONCRETE_INSTRUCTION(GEN_HANDLER)
618#undef GEN_HANDLER
619
620 template <typename F>
621 std::unique_ptr<HandlerWrapper> GetWrapper(HInstruction::InstructionKind kind, F f) {
622 switch (kind) {
623#define GEN_GETTER(nm, unused) \
624 case HInstruction::InstructionKind::k##nm: \
625 return std::unique_ptr<HandlerWrapper>( \
626 new KindWrapper<HInstruction::InstructionKind::k##nm, F>(f));
627 FOR_EACH_CONCRETE_INSTRUCTION(GEN_GETTER)
628#undef GEN_GETTER
629 default:
630 LOG(FATAL) << "Unable to handle kind " << kind;
631 return nullptr;
632 }
633 }
634
Alex Light9dec90a2020-09-14 17:58:28 -0700635 public:
Alex Light74328052021-03-29 18:11:23 -0700636 template <typename... Inst>
637 explicit PatternMatchGraphVisitor(HGraph* graph, Inst... handlers) : HGraphVisitor(graph) {
638 FillHandlers(handlers...);
639 }
640
641 void VisitInstruction(HInstruction* instruction) override {
642 auto& h = handlers_[instruction->GetKind()];
643 if (h.get() != nullptr) {
644 (*h)(instruction);
Alex Light9dec90a2020-09-14 17:58:28 -0700645 }
Alex Light3a73ffb2021-01-25 14:11:05 +0000646 }
647
Alex Light9dec90a2020-09-14 17:58:28 -0700648 private:
Alex Light74328052021-03-29 18:11:23 -0700649 template <typename Func>
650 constexpr HInstruction::InstructionKind GetKind() {
651#define CHECK_INST(nm, unused) \
652 if constexpr (std::is_invocable_v<Func, H##nm*>) { \
653 return HInstruction::InstructionKind::k##nm; \
654 }
655 FOR_EACH_CONCRETE_INSTRUCTION(CHECK_INST);
656#undef CHECK_INST
657 static_assert(!std::is_invocable_v<Func, HInstruction*>,
658 "Use on generic HInstruction not allowed");
659#define STATIC_ASSERT_ABSTRACT(nm, unused) && !std::is_invocable_v<Func, H##nm*>
660 static_assert(true FOR_EACH_ABSTRACT_INSTRUCTION(STATIC_ASSERT_ABSTRACT),
661 "Must not be abstract instruction");
662#undef STATIC_ASSERT_ABSTRACT
663#define STATIC_ASSERT_CONCRETE(nm, unused) || std::is_invocable_v<Func, H##nm*>
664 static_assert(false FOR_EACH_CONCRETE_INSTRUCTION(STATIC_ASSERT_CONCRETE),
665 "Must be a concrete instruction");
666#undef STATIC_ASSERT_CONCRETE
667 return HInstruction::InstructionKind::kLastInstructionKind;
668 }
669 template <typename First>
670 void FillHandlers(First h1) {
671 HInstruction::InstructionKind type = GetKind<First>();
672 CHECK_NE(type, HInstruction::kLastInstructionKind)
673 << "Unknown instruction kind. Only concrete ones please.";
674 handlers_[type] = GetWrapper(type, h1);
675 }
676
677 template <typename First, typename... Inst>
678 void FillHandlers(First h1, Inst... handlers) {
679 FillHandlers(h1);
680 FillHandlers<Inst...>(handlers...);
681 }
682
683 std::array<std::unique_ptr<HandlerWrapper>, HInstruction::InstructionKind::kLastInstructionKind>
684 handlers_;
Alex Light9dec90a2020-09-14 17:58:28 -0700685};
686
Alex Light74328052021-03-29 18:11:23 -0700687template <typename... Target>
688std::tuple<std::vector<Target*>...> FindAllInstructions(
689 HGraph* graph,
690 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
691 std::nullopt) {
692 std::tuple<std::vector<Target*>...> res;
693 PatternMatchGraphVisitor vis(
694 graph, [&](Target* t) { std::get<std::vector<Target*>>(res).push_back(t); }...);
695
696 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
697 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
698 vis.VisitBasicBlock(blk);
699 }
700 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
701 vis.VisitInsertionOrder();
702 } else {
703 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
704 }
705 return res;
706}
707
708template <typename... Target>
709std::tuple<Target*...> FindSingleInstructions(
710 HGraph* graph,
711 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
712 std::nullopt) {
713 std::tuple<Target*...> res;
714 PatternMatchGraphVisitor vis(graph, [&](Target* t) {
715 EXPECT_EQ(std::get<Target*>(res), nullptr)
716 << *std::get<Target*>(res) << " already found but found " << *t << "!";
717 std::get<Target*>(res) = t;
718 }...);
719 if (std::holds_alternative<std::initializer_list<HBasicBlock*>>(blks)) {
720 for (HBasicBlock* blk : std::get<std::initializer_list<HBasicBlock*>>(blks)) {
721 vis.VisitBasicBlock(blk);
722 }
723 } else if (std::holds_alternative<std::nullopt_t>(blks)) {
724 vis.VisitInsertionOrder();
725 } else {
726 vis.VisitBasicBlock(std::get<HBasicBlock*>(blks));
727 }
728 return res;
729}
730
731template <typename Target>
732Target* FindSingleInstruction(
733 HGraph* graph,
734 std::variant<std::nullopt_t, HBasicBlock*, std::initializer_list<HBasicBlock*>> blks =
735 std::nullopt) {
736 return std::get<Target*>(FindSingleInstructions<Target>(graph, blks));
Alex Light3a73ffb2021-01-25 14:11:05 +0000737}
738
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100739} // namespace art
740
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +0000741#endif // ART_COMPILER_OPTIMIZING_OPTIMIZING_UNIT_TEST_H_