blob: a6d234d7390865bb9adfd835232d609f7e1c76fb [file] [log] [blame]
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <memory>
18#include <vector>
19
20#include "arch/instruction_set.h"
21#include "cfi_test.h"
22#include "gtest/gtest.h"
23#include "optimizing/code_generator.h"
Nicolas Geoffray0a23d742015-05-07 11:57:35 +010024#include "optimizing/optimizing_unit_test.h"
David Srbeckyc6b4dd82015-04-07 20:32:43 +010025#include "utils/assembler.h"
Vladimir Marko10ef6942015-10-22 15:25:54 +010026#include "utils/arm/assembler_thumb2.h"
27#include "utils/mips/assembler_mips.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070028#include "utils/mips64/assembler_mips64.h"
David Srbeckyc6b4dd82015-04-07 20:32:43 +010029
30#include "optimizing/optimizing_cfi_test_expected.inc"
31
32namespace art {
33
34// Run the tests only on host.
Bilyan Borisovbb661c02016-04-04 16:27:32 +010035#ifndef ART_TARGET_ANDROID
David Srbeckyc6b4dd82015-04-07 20:32:43 +010036
Mathieu Chartiere401d142015-04-22 13:56:20 -070037class OptimizingCFITest : public CFITest {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010038 public:
39 // Enable this flag to generate the expected outputs.
40 static constexpr bool kGenerateExpected = false;
41
Vladimir Marko10ef6942015-10-22 15:25:54 +010042 OptimizingCFITest()
43 : pool_(),
44 allocator_(&pool_),
45 opts_(),
46 isa_features_(),
47 graph_(nullptr),
48 code_gen_(),
49 blocks_(allocator_.Adapter()) {}
50
51 void SetUpFrame(InstructionSet isa) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010052 // Setup simple context.
David Srbeckyc6b4dd82015-04-07 20:32:43 +010053 std::string error;
Vladimir Marko10ef6942015-10-22 15:25:54 +010054 isa_features_.reset(InstructionSetFeatures::FromVariant(isa, "default", &error));
55 graph_ = CreateGraph(&allocator_);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010056 // Generate simple frame with some spills.
Vladimir Markod58b8372016-04-12 18:51:43 +010057 code_gen_ = CodeGenerator::Create(graph_, isa, *isa_features_, opts_);
Vladimir Marko10ef6942015-10-22 15:25:54 +010058 code_gen_->GetAssembler()->cfi().SetEnabled(true);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010059 const int frame_size = 64;
60 int core_reg = 0;
61 int fp_reg = 0;
62 for (int i = 0; i < 2; i++) { // Two registers of each kind.
63 for (; core_reg < 32; core_reg++) {
Vladimir Marko10ef6942015-10-22 15:25:54 +010064 if (code_gen_->IsCoreCalleeSaveRegister(core_reg)) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010065 auto location = Location::RegisterLocation(core_reg);
Vladimir Marko10ef6942015-10-22 15:25:54 +010066 code_gen_->AddAllocatedRegister(location);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010067 core_reg++;
68 break;
69 }
70 }
71 for (; fp_reg < 32; fp_reg++) {
Vladimir Marko10ef6942015-10-22 15:25:54 +010072 if (code_gen_->IsFloatingPointCalleeSaveRegister(fp_reg)) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010073 auto location = Location::FpuRegisterLocation(fp_reg);
Vladimir Marko10ef6942015-10-22 15:25:54 +010074 code_gen_->AddAllocatedRegister(location);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010075 fp_reg++;
76 break;
77 }
78 }
79 }
Vladimir Marko10ef6942015-10-22 15:25:54 +010080 code_gen_->block_order_ = &blocks_;
81 code_gen_->ComputeSpillMask();
82 code_gen_->SetFrameSize(frame_size);
83 code_gen_->GenerateFrameEntry();
84 }
85
86 void Finish() {
87 code_gen_->GenerateFrameExit();
88 code_gen_->Finalize(&code_allocator_);
89 }
90
91 void Check(InstructionSet isa,
92 const char* isa_str,
93 const std::vector<uint8_t>& expected_asm,
94 const std::vector<uint8_t>& expected_cfi) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010095 // Get the outputs.
Vladimir Marko10ef6942015-10-22 15:25:54 +010096 const std::vector<uint8_t>& actual_asm = code_allocator_.GetMemory();
97 Assembler* opt_asm = code_gen_->GetAssembler();
David Srbeckyc6b4dd82015-04-07 20:32:43 +010098 const std::vector<uint8_t>& actual_cfi = *(opt_asm->cfi().data());
99
100 if (kGenerateExpected) {
101 GenerateExpected(stdout, isa, isa_str, actual_asm, actual_cfi);
102 } else {
103 EXPECT_EQ(expected_asm, actual_asm);
104 EXPECT_EQ(expected_cfi, actual_cfi);
105 }
106 }
David Srbecky46325a02015-04-09 22:51:56 +0100107
Vladimir Marko10ef6942015-10-22 15:25:54 +0100108 void TestImpl(InstructionSet isa, const char*
109 isa_str,
110 const std::vector<uint8_t>& expected_asm,
111 const std::vector<uint8_t>& expected_cfi) {
112 SetUpFrame(isa);
113 Finish();
114 Check(isa, isa_str, expected_asm, expected_cfi);
115 }
116
117 CodeGenerator* GetCodeGenerator() {
118 return code_gen_.get();
119 }
120
David Srbecky46325a02015-04-09 22:51:56 +0100121 private:
122 class InternalCodeAllocator : public CodeAllocator {
123 public:
124 InternalCodeAllocator() {}
125
126 virtual uint8_t* Allocate(size_t size) {
127 memory_.resize(size);
128 return memory_.data();
129 }
130
131 const std::vector<uint8_t>& GetMemory() { return memory_; }
132
133 private:
134 std::vector<uint8_t> memory_;
135
136 DISALLOW_COPY_AND_ASSIGN(InternalCodeAllocator);
137 };
Vladimir Marko10ef6942015-10-22 15:25:54 +0100138
139 ArenaPool pool_;
140 ArenaAllocator allocator_;
141 CompilerOptions opts_;
142 std::unique_ptr<const InstructionSetFeatures> isa_features_;
143 HGraph* graph_;
144 std::unique_ptr<CodeGenerator> code_gen_;
145 ArenaVector<HBasicBlock*> blocks_;
146 InternalCodeAllocator code_allocator_;
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100147};
148
Vladimir Marko10ef6942015-10-22 15:25:54 +0100149#define TEST_ISA(isa) \
150 TEST_F(OptimizingCFITest, isa) { \
151 std::vector<uint8_t> expected_asm( \
152 expected_asm_##isa, \
153 expected_asm_##isa + arraysize(expected_asm_##isa)); \
154 std::vector<uint8_t> expected_cfi( \
155 expected_cfi_##isa, \
156 expected_cfi_##isa + arraysize(expected_cfi_##isa)); \
157 TestImpl(isa, #isa, expected_asm, expected_cfi); \
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100158 }
159
160TEST_ISA(kThumb2)
161TEST_ISA(kArm64)
162TEST_ISA(kX86)
163TEST_ISA(kX86_64)
Vladimir Marko10ef6942015-10-22 15:25:54 +0100164TEST_ISA(kMips)
165TEST_ISA(kMips64)
166
167TEST_F(OptimizingCFITest, kThumb2Adjust) {
168 std::vector<uint8_t> expected_asm(
169 expected_asm_kThumb2_adjust,
170 expected_asm_kThumb2_adjust + arraysize(expected_asm_kThumb2_adjust));
171 std::vector<uint8_t> expected_cfi(
172 expected_cfi_kThumb2_adjust,
173 expected_cfi_kThumb2_adjust + arraysize(expected_cfi_kThumb2_adjust));
174 SetUpFrame(kThumb2);
175#define __ down_cast<arm::Thumb2Assembler*>(GetCodeGenerator()->GetAssembler())->
176 Label target;
177 __ CompareAndBranchIfZero(arm::R0, &target);
178 // Push the target out of range of CBZ.
179 for (size_t i = 0; i != 65; ++i) {
180 __ ldr(arm::R0, arm::Address(arm::R0));
181 }
182 __ Bind(&target);
183#undef __
184 Finish();
185 Check(kThumb2, "kThumb2_adjust", expected_asm, expected_cfi);
186}
187
188TEST_F(OptimizingCFITest, kMipsAdjust) {
189 // One NOP in delay slot, 1 << 15 NOPS have size 1 << 17 which exceeds 18-bit signed maximum.
190 static constexpr size_t kNumNops = 1u + (1u << 15);
191 std::vector<uint8_t> expected_asm(
192 expected_asm_kMips_adjust_head,
193 expected_asm_kMips_adjust_head + arraysize(expected_asm_kMips_adjust_head));
194 expected_asm.resize(expected_asm.size() + kNumNops * 4u, 0u);
195 expected_asm.insert(
196 expected_asm.end(),
197 expected_asm_kMips_adjust_tail,
198 expected_asm_kMips_adjust_tail + arraysize(expected_asm_kMips_adjust_tail));
199 std::vector<uint8_t> expected_cfi(
200 expected_cfi_kMips_adjust,
201 expected_cfi_kMips_adjust + arraysize(expected_cfi_kMips_adjust));
202 SetUpFrame(kMips);
203#define __ down_cast<mips::MipsAssembler*>(GetCodeGenerator()->GetAssembler())->
204 mips::MipsLabel target;
205 __ Beqz(mips::A0, &target);
206 // Push the target out of range of BEQZ.
207 for (size_t i = 0; i != kNumNops; ++i) {
208 __ Nop();
209 }
210 __ Bind(&target);
211#undef __
212 Finish();
213 Check(kMips, "kMips_adjust", expected_asm, expected_cfi);
214}
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100215
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700216TEST_F(OptimizingCFITest, kMips64Adjust) {
217 // One NOP in forbidden slot, 1 << 15 NOPS have size 1 << 17 which exceeds 18-bit signed maximum.
218 static constexpr size_t kNumNops = 1u + (1u << 15);
219 std::vector<uint8_t> expected_asm(
220 expected_asm_kMips64_adjust_head,
221 expected_asm_kMips64_adjust_head + arraysize(expected_asm_kMips64_adjust_head));
222 expected_asm.resize(expected_asm.size() + kNumNops * 4u, 0u);
223 expected_asm.insert(
224 expected_asm.end(),
225 expected_asm_kMips64_adjust_tail,
226 expected_asm_kMips64_adjust_tail + arraysize(expected_asm_kMips64_adjust_tail));
227 std::vector<uint8_t> expected_cfi(
228 expected_cfi_kMips64_adjust,
229 expected_cfi_kMips64_adjust + arraysize(expected_cfi_kMips64_adjust));
230 SetUpFrame(kMips64);
231#define __ down_cast<mips64::Mips64Assembler*>(GetCodeGenerator()->GetAssembler())->
232 mips64::Mips64Label target;
233 __ Beqc(mips64::A1, mips64::A2, &target);
234 // Push the target out of range of BEQC.
235 for (size_t i = 0; i != kNumNops; ++i) {
236 __ Nop();
237 }
238 __ Bind(&target);
239#undef __
240 Finish();
241 Check(kMips64, "kMips64_adjust", expected_asm, expected_cfi);
242}
243
Bilyan Borisovbb661c02016-04-04 16:27:32 +0100244#endif // ART_TARGET_ANDROID
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100245
246} // namespace art