blob: 510619faf90f7a0abe99844e9a772f03d07a5dc0 [file] [log] [blame]
Alexandre Rames22aa54b2016-10-18 09:32:29 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "scheduler_arm64.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070018
Alexandre Rames22aa54b2016-10-18 09:32:29 +010019#include "code_generator_utils.h"
Artem Serovf0fc4c62017-05-03 15:07:15 +010020#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070021#include "mirror/string.h"
Alexandre Rames22aa54b2016-10-18 09:32:29 +010022
23namespace art {
24namespace arm64 {
25
26void SchedulingLatencyVisitorARM64::VisitBinaryOperation(HBinaryOperation* instr) {
27 last_visited_latency_ = Primitive::IsFloatingPointType(instr->GetResultType())
28 ? kArm64FloatingPointOpLatency
29 : kArm64IntegerOpLatency;
30}
31
32void SchedulingLatencyVisitorARM64::VisitBitwiseNegatedRight(
33 HBitwiseNegatedRight* ATTRIBUTE_UNUSED) {
34 last_visited_latency_ = kArm64IntegerOpLatency;
35}
36
Anton Kirilov74234da2017-01-13 14:42:47 +000037void SchedulingLatencyVisitorARM64::VisitDataProcWithShifterOp(
38 HDataProcWithShifterOp* ATTRIBUTE_UNUSED) {
Alexandre Rames22aa54b2016-10-18 09:32:29 +010039 last_visited_latency_ = kArm64DataProcWithShifterOpLatency;
40}
41
42void SchedulingLatencyVisitorARM64::VisitIntermediateAddress(
43 HIntermediateAddress* ATTRIBUTE_UNUSED) {
44 // Although the code generated is a simple `add` instruction, we found through empirical results
45 // that spacing it from its use in memory accesses was beneficial.
46 last_visited_latency_ = kArm64IntegerOpLatency + 2;
47}
48
Artem Serovf0fc4c62017-05-03 15:07:15 +010049void SchedulingLatencyVisitorARM64::VisitIntermediateAddressIndex(
50 HIntermediateAddressIndex* instr ATTRIBUTE_UNUSED) {
51 // Although the code generated is a simple `add` instruction, we found through empirical results
52 // that spacing it from its use in memory accesses was beneficial.
53 last_visited_latency_ = kArm64DataProcWithShifterOpLatency + 2;
54}
55
Alexandre Rames22aa54b2016-10-18 09:32:29 +010056void SchedulingLatencyVisitorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
57 last_visited_latency_ = kArm64MulIntegerLatency;
58}
59
60void SchedulingLatencyVisitorARM64::VisitArrayGet(HArrayGet* instruction) {
61 if (!instruction->GetArray()->IsIntermediateAddress()) {
62 // Take the intermediate address computation into account.
63 last_visited_internal_latency_ = kArm64IntegerOpLatency;
64 }
65 last_visited_latency_ = kArm64MemoryLoadLatency;
66}
67
68void SchedulingLatencyVisitorARM64::VisitArrayLength(HArrayLength* ATTRIBUTE_UNUSED) {
69 last_visited_latency_ = kArm64MemoryLoadLatency;
70}
71
72void SchedulingLatencyVisitorARM64::VisitArraySet(HArraySet* ATTRIBUTE_UNUSED) {
73 last_visited_latency_ = kArm64MemoryStoreLatency;
74}
75
76void SchedulingLatencyVisitorARM64::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
77 last_visited_internal_latency_ = kArm64IntegerOpLatency;
78 // Users do not use any data results.
79 last_visited_latency_ = 0;
80}
81
82void SchedulingLatencyVisitorARM64::VisitDiv(HDiv* instr) {
83 Primitive::Type type = instr->GetResultType();
84 switch (type) {
85 case Primitive::kPrimFloat:
86 last_visited_latency_ = kArm64DivFloatLatency;
87 break;
88 case Primitive::kPrimDouble:
89 last_visited_latency_ = kArm64DivDoubleLatency;
90 break;
91 default:
92 // Follow the code path used by code generation.
93 if (instr->GetRight()->IsConstant()) {
94 int64_t imm = Int64FromConstant(instr->GetRight()->AsConstant());
95 if (imm == 0) {
96 last_visited_internal_latency_ = 0;
97 last_visited_latency_ = 0;
98 } else if (imm == 1 || imm == -1) {
99 last_visited_internal_latency_ = 0;
100 last_visited_latency_ = kArm64IntegerOpLatency;
101 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
102 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
103 last_visited_latency_ = kArm64IntegerOpLatency;
104 } else {
105 DCHECK(imm <= -2 || imm >= 2);
106 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
107 last_visited_latency_ = kArm64MulIntegerLatency;
108 }
109 } else {
110 last_visited_latency_ = kArm64DivIntegerLatency;
111 }
112 break;
113 }
114}
115
116void SchedulingLatencyVisitorARM64::VisitInstanceFieldGet(HInstanceFieldGet* ATTRIBUTE_UNUSED) {
117 last_visited_latency_ = kArm64MemoryLoadLatency;
118}
119
120void SchedulingLatencyVisitorARM64::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
121 last_visited_internal_latency_ = kArm64CallInternalLatency;
122 last_visited_latency_ = kArm64IntegerOpLatency;
123}
124
125void SchedulingLatencyVisitorARM64::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
126 last_visited_internal_latency_ = kArm64CallInternalLatency;
127 last_visited_latency_ = kArm64CallLatency;
128}
129
130void SchedulingLatencyVisitorARM64::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
131 last_visited_internal_latency_ = kArm64LoadStringInternalLatency;
132 last_visited_latency_ = kArm64MemoryLoadLatency;
133}
134
135void SchedulingLatencyVisitorARM64::VisitMul(HMul* instr) {
136 last_visited_latency_ = Primitive::IsFloatingPointType(instr->GetResultType())
137 ? kArm64MulFloatingPointLatency
138 : kArm64MulIntegerLatency;
139}
140
141void SchedulingLatencyVisitorARM64::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
142 last_visited_internal_latency_ = kArm64IntegerOpLatency + kArm64CallInternalLatency;
143 last_visited_latency_ = kArm64CallLatency;
144}
145
146void SchedulingLatencyVisitorARM64::VisitNewInstance(HNewInstance* instruction) {
147 if (instruction->IsStringAlloc()) {
148 last_visited_internal_latency_ = 2 + kArm64MemoryLoadLatency + kArm64CallInternalLatency;
149 } else {
150 last_visited_internal_latency_ = kArm64CallInternalLatency;
151 }
152 last_visited_latency_ = kArm64CallLatency;
153}
154
155void SchedulingLatencyVisitorARM64::VisitRem(HRem* instruction) {
156 if (Primitive::IsFloatingPointType(instruction->GetResultType())) {
157 last_visited_internal_latency_ = kArm64CallInternalLatency;
158 last_visited_latency_ = kArm64CallLatency;
159 } else {
160 // Follow the code path used by code generation.
161 if (instruction->GetRight()->IsConstant()) {
162 int64_t imm = Int64FromConstant(instruction->GetRight()->AsConstant());
163 if (imm == 0) {
164 last_visited_internal_latency_ = 0;
165 last_visited_latency_ = 0;
166 } else if (imm == 1 || imm == -1) {
167 last_visited_internal_latency_ = 0;
168 last_visited_latency_ = kArm64IntegerOpLatency;
169 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
170 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
171 last_visited_latency_ = kArm64IntegerOpLatency;
172 } else {
173 DCHECK(imm <= -2 || imm >= 2);
174 last_visited_internal_latency_ = 4 * kArm64IntegerOpLatency;
175 last_visited_latency_ = kArm64MulIntegerLatency;
176 }
177 } else {
178 last_visited_internal_latency_ = kArm64DivIntegerLatency;
179 last_visited_latency_ = kArm64MulIntegerLatency;
180 }
181 }
182}
183
184void SchedulingLatencyVisitorARM64::VisitStaticFieldGet(HStaticFieldGet* ATTRIBUTE_UNUSED) {
185 last_visited_latency_ = kArm64MemoryLoadLatency;
186}
187
188void SchedulingLatencyVisitorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
189 HBasicBlock* block = instruction->GetBlock();
190 DCHECK((block->GetLoopInformation() != nullptr) ||
191 (block->IsEntryBlock() && instruction->GetNext()->IsGoto()));
192 // Users do not use any data results.
193 last_visited_latency_ = 0;
194}
195
196void SchedulingLatencyVisitorARM64::VisitTypeConversion(HTypeConversion* instr) {
197 if (Primitive::IsFloatingPointType(instr->GetResultType()) ||
198 Primitive::IsFloatingPointType(instr->GetInputType())) {
199 last_visited_latency_ = kArm64TypeConversionFloatingPointIntegerLatency;
200 } else {
201 last_visited_latency_ = kArm64IntegerOpLatency;
202 }
203}
204
Artem Serovf0fc4c62017-05-03 15:07:15 +0100205void SchedulingLatencyVisitorARM64::HandleSimpleArithmeticSIMD(HVecOperation *instr) {
206 if (Primitive::IsFloatingPointType(instr->GetPackedType())) {
207 last_visited_latency_ = kArm64SIMDFloatingPointOpLatency;
208 } else {
209 last_visited_latency_ = kArm64SIMDIntegerOpLatency;
210 }
211}
212
213void SchedulingLatencyVisitorARM64::VisitVecReplicateScalar(
214 HVecReplicateScalar* instr ATTRIBUTE_UNUSED) {
215 last_visited_latency_ = kArm64SIMDReplicateOpLatency;
216}
217
218void SchedulingLatencyVisitorARM64::VisitVecSetScalars(HVecSetScalars* instr) {
219 LOG(FATAL) << "Unsupported SIMD instruction " << instr->GetId();
220}
221
222void SchedulingLatencyVisitorARM64::VisitVecSumReduce(HVecSumReduce* instr) {
223 LOG(FATAL) << "Unsupported SIMD instruction " << instr->GetId();
224}
225
226void SchedulingLatencyVisitorARM64::VisitVecCnv(HVecCnv* instr ATTRIBUTE_UNUSED) {
227 last_visited_latency_ = kArm64SIMDTypeConversionInt2FPLatency;
228}
229
230void SchedulingLatencyVisitorARM64::VisitVecNeg(HVecNeg* instr) {
231 HandleSimpleArithmeticSIMD(instr);
232}
233
234void SchedulingLatencyVisitorARM64::VisitVecAbs(HVecAbs* instr) {
235 HandleSimpleArithmeticSIMD(instr);
236}
237
238void SchedulingLatencyVisitorARM64::VisitVecNot(HVecNot* instr) {
239 if (instr->GetPackedType() == Primitive::kPrimBoolean) {
240 last_visited_internal_latency_ = kArm64SIMDIntegerOpLatency;
241 }
242 last_visited_latency_ = kArm64SIMDIntegerOpLatency;
243}
244
245void SchedulingLatencyVisitorARM64::VisitVecAdd(HVecAdd* instr) {
246 HandleSimpleArithmeticSIMD(instr);
247}
248
249void SchedulingLatencyVisitorARM64::VisitVecHalvingAdd(HVecHalvingAdd* instr) {
250 HandleSimpleArithmeticSIMD(instr);
251}
252
253void SchedulingLatencyVisitorARM64::VisitVecSub(HVecSub* instr) {
254 HandleSimpleArithmeticSIMD(instr);
255}
256
257void SchedulingLatencyVisitorARM64::VisitVecMul(HVecMul* instr) {
258 if (Primitive::IsFloatingPointType(instr->GetPackedType())) {
259 last_visited_latency_ = kArm64SIMDMulFloatingPointLatency;
260 } else {
261 last_visited_latency_ = kArm64SIMDMulIntegerLatency;
262 }
263}
264
265void SchedulingLatencyVisitorARM64::VisitVecDiv(HVecDiv* instr) {
266 if (instr->GetPackedType() == Primitive::kPrimFloat) {
267 last_visited_latency_ = kArm64SIMDDivFloatLatency;
268 } else {
269 DCHECK(instr->GetPackedType() == Primitive::kPrimDouble);
270 last_visited_latency_ = kArm64SIMDDivDoubleLatency;
271 }
272}
273
274void SchedulingLatencyVisitorARM64::VisitVecMin(HVecMin* instr) {
275 HandleSimpleArithmeticSIMD(instr);
276}
277
278void SchedulingLatencyVisitorARM64::VisitVecMax(HVecMax* instr) {
279 HandleSimpleArithmeticSIMD(instr);
280}
281
282void SchedulingLatencyVisitorARM64::VisitVecAnd(HVecAnd* instr ATTRIBUTE_UNUSED) {
283 last_visited_latency_ = kArm64SIMDIntegerOpLatency;
284}
285
286void SchedulingLatencyVisitorARM64::VisitVecAndNot(HVecAndNot* instr) {
287 LOG(FATAL) << "Unsupported SIMD instruction " << instr->GetId();
288}
289
290void SchedulingLatencyVisitorARM64::VisitVecOr(HVecOr* instr ATTRIBUTE_UNUSED) {
291 last_visited_latency_ = kArm64SIMDIntegerOpLatency;
292}
293
294void SchedulingLatencyVisitorARM64::VisitVecXor(HVecXor* instr ATTRIBUTE_UNUSED) {
295 last_visited_latency_ = kArm64SIMDIntegerOpLatency;
296}
297
298void SchedulingLatencyVisitorARM64::VisitVecShl(HVecShl* instr) {
299 HandleSimpleArithmeticSIMD(instr);
300}
301
302void SchedulingLatencyVisitorARM64::VisitVecShr(HVecShr* instr) {
303 HandleSimpleArithmeticSIMD(instr);
304}
305
306void SchedulingLatencyVisitorARM64::VisitVecUShr(HVecUShr* instr) {
307 HandleSimpleArithmeticSIMD(instr);
308}
309
310void SchedulingLatencyVisitorARM64::VisitVecMultiplyAccumulate(
311 HVecMultiplyAccumulate* instr ATTRIBUTE_UNUSED) {
312 last_visited_latency_ = kArm64SIMDMulIntegerLatency;
313}
314
315void SchedulingLatencyVisitorARM64::HandleVecAddress(
316 HVecMemoryOperation* instruction,
317 size_t size ATTRIBUTE_UNUSED) {
318 HInstruction* index = instruction->InputAt(1);
319 if (!index->IsConstant()) {
320 last_visited_internal_latency_ += kArm64DataProcWithShifterOpLatency;
321 }
322}
323
324void SchedulingLatencyVisitorARM64::VisitVecLoad(HVecLoad* instr) {
325 last_visited_internal_latency_ = 0;
326 size_t size = Primitive::ComponentSize(instr->GetPackedType());
327
328 if (instr->GetPackedType() == Primitive::kPrimChar
329 && mirror::kUseStringCompression
330 && instr->IsStringCharAt()) {
331 // Set latencies for the uncompressed case.
332 last_visited_internal_latency_ += kArm64MemoryLoadLatency + kArm64BranchLatency;
333 HandleVecAddress(instr, size);
334 last_visited_latency_ = kArm64SIMDMemoryLoadLatency;
335 } else {
336 HandleVecAddress(instr, size);
337 last_visited_latency_ = kArm64SIMDMemoryLoadLatency;
338 }
339}
340
341void SchedulingLatencyVisitorARM64::VisitVecStore(HVecStore* instr) {
342 last_visited_internal_latency_ = 0;
343 size_t size = Primitive::ComponentSize(instr->GetPackedType());
344 HandleVecAddress(instr, size);
345 last_visited_latency_ = kArm64SIMDMemoryStoreLatency;
346}
347
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100348} // namespace arm64
349} // namespace art