v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
instruction-scheduler-arm64.cc
Go to the documentation of this file.
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7namespace v8 {
8namespace internal {
9namespace compiler {
10
11bool InstructionScheduler::SchedulerSupported() { return true; }
12
14 const Instruction* instr) const {
15 switch (instr->arch_opcode()) {
16 case kArm64Add:
17 case kArm64Add32:
18 case kArm64And:
19 case kArm64And32:
20 case kArm64Bic:
21 case kArm64Bic32:
22 case kArm64Clz:
23 case kArm64Clz32:
24 case kArm64Cmp:
25 case kArm64Cmp32:
26 case kArm64Cmn:
27 case kArm64Cmn32:
28 case kArm64Cnt:
29 case kArm64Cnt32:
30 case kArm64Cnt64:
31 case kArm64Tst:
32 case kArm64Tst32:
33 case kArm64Or:
34 case kArm64Or32:
35 case kArm64Orn:
36 case kArm64Orn32:
37 case kArm64Eor:
38 case kArm64Eor32:
39 case kArm64Eon:
40 case kArm64Eon32:
41 case kArm64Sub:
42 case kArm64Sub32:
43 case kArm64Mul:
44 case kArm64Mul32:
45 case kArm64Smulh:
46 case kArm64Smull:
47 case kArm64Smull2:
48 case kArm64Umulh:
49 case kArm64Umull:
50 case kArm64Umull2:
51 case kArm64Madd:
52 case kArm64Madd32:
53 case kArm64Msub:
54 case kArm64Msub32:
55 case kArm64Mneg:
56 case kArm64Mneg32:
57 case kArm64Idiv:
58 case kArm64Idiv32:
59 case kArm64Udiv:
60 case kArm64Udiv32:
61 case kArm64Imod:
62 case kArm64Imod32:
63 case kArm64Umod:
64 case kArm64Umod32:
65 case kArm64Not:
66 case kArm64Not32:
67 case kArm64Lsl:
68 case kArm64Lsl32:
69 case kArm64Lsr:
70 case kArm64Lsr32:
71 case kArm64Asr:
72 case kArm64Asr32:
73 case kArm64Ror:
74 case kArm64Ror32:
75 case kArm64Mov32:
76 case kArm64Sxtb:
77 case kArm64Sxtb32:
78 case kArm64Sxth:
79 case kArm64Sxth32:
80 case kArm64Sxtw:
81 case kArm64Sbfx:
82 case kArm64Sbfx32:
83 case kArm64Ubfx:
84 case kArm64Ubfx32:
85 case kArm64Ubfiz32:
86 case kArm64Sbfiz:
87 case kArm64Bfi:
88 case kArm64Rbit:
89 case kArm64Rbit32:
90 case kArm64Rev:
91 case kArm64Rev32:
92 case kArm64Float16RoundDown:
93 case kArm64Float16RoundTiesEven:
94 case kArm64Float16RoundTruncate:
95 case kArm64Float16RoundUp:
96 case kArm64Float32Cmp:
97 case kArm64Float32Add:
98 case kArm64Float32Sub:
99 case kArm64Float32Mul:
100 case kArm64Float32Div:
101 case kArm64Float32Abs:
102 case kArm64Float32Abd:
103 case kArm64Float32Neg:
104 case kArm64Float32Sqrt:
105 case kArm64Float32Fnmul:
106 case kArm64Float32RoundDown:
107 case kArm64Float32Max:
108 case kArm64Float32Min:
109 case kArm64Float64Cmp:
110 case kArm64Float64Add:
111 case kArm64Float64Sub:
112 case kArm64Float64Mul:
113 case kArm64Float64Div:
114 case kArm64Float64Max:
115 case kArm64Float64Min:
116 case kArm64Float64Abs:
117 case kArm64Float64Abd:
118 case kArm64Float64Neg:
119 case kArm64Float64Sqrt:
120 case kArm64Float64Fnmul:
121 case kArm64Float64RoundDown:
122 case kArm64Float64RoundTiesAway:
123 case kArm64Float64RoundTruncate:
124 case kArm64Float64RoundTiesEven:
125 case kArm64Float64RoundUp:
126 case kArm64Float32RoundTiesEven:
127 case kArm64Float32RoundTruncate:
128 case kArm64Float32RoundUp:
129 case kArm64Float32ToFloat64:
130 case kArm64Float64ToFloat32:
131 case kArm64Float64ToFloat16RawBits:
132 case kArm64Float16RawBitsToFloat64:
133 case kArm64Float32ToInt32:
134 case kArm64Float64ToInt32:
135 case kArm64Float32ToUint32:
136 case kArm64Float64ToUint32:
137 case kArm64Float32ToInt64:
138 case kArm64Float64ToInt64:
139 case kArm64Float32ToUint64:
140 case kArm64Float64ToUint64:
141 case kArm64Int32ToFloat32:
142 case kArm64Int32ToFloat64:
143 case kArm64Int64ToFloat32:
144 case kArm64Int64ToFloat64:
145 case kArm64Uint32ToFloat32:
146 case kArm64Uint32ToFloat64:
147 case kArm64Uint64ToFloat32:
148 case kArm64Uint64ToFloat64:
149 case kArm64Float64ExtractLowWord32:
150 case kArm64Float64ExtractHighWord32:
151 case kArm64Float64InsertLowWord32:
152 case kArm64Float64InsertHighWord32:
153 case kArm64Float64Mod:
154 case kArm64Float64MoveU64:
155 case kArm64U64MoveFloat64:
156 case kArm64Float64SilenceNaN:
157#if V8_ENABLE_WEBASSEMBLY
158 case kArm64Sadalp:
159 case kArm64Saddlp:
160 case kArm64Uadalp:
161 case kArm64Uaddlp:
162 case kArm64Smlal:
163 case kArm64Smlal2:
164 case kArm64Umlal:
165 case kArm64Umlal2:
166 case kArm64FAdd:
167 case kArm64FSub:
168 case kArm64FMul:
169 case kArm64FMulElement:
170 case kArm64FDiv:
171 case kArm64FMin:
172 case kArm64FMax:
173 case kArm64FEq:
174 case kArm64FNe:
175 case kArm64FLt:
176 case kArm64FLe:
177 case kArm64FGt:
178 case kArm64FGe:
179 case kArm64FExtractLane:
180 case kArm64FReplaceLane:
181 case kArm64FSplat:
182 case kArm64FAbs:
183 case kArm64FNeg:
184 case kArm64FSqrt:
185 case kArm64F64x2Qfma:
186 case kArm64F64x2Qfms:
187 case kArm64F64x2Pmin:
188 case kArm64F64x2Pmax:
189 case kArm64F64x2ConvertLowI32x4S:
190 case kArm64F64x2ConvertLowI32x4U:
191 case kArm64F64x2PromoteLowF32x4:
192 case kArm64F32x4SConvertI32x4:
193 case kArm64F32x4UConvertI32x4:
194 case kArm64F32x4Qfma:
195 case kArm64F32x4Qfms:
196 case kArm64F32x4Pmin:
197 case kArm64F32x4Pmax:
198 case kArm64F32x4DemoteF64x2Zero:
199 case kArm64F16x8Pmin:
200 case kArm64F16x8Pmax:
201 case kArm64F32x4PromoteLowF16x8:
202 case kArm64F16x8SConvertI16x8:
203 case kArm64F16x8UConvertI16x8:
204 case kArm64F16x8DemoteF32x4Zero:
205 case kArm64F16x8DemoteF64x2Zero:
206 case kArm64I16x8SConvertF16x8:
207 case kArm64I16x8UConvertF16x8:
208 case kArm64F16x8Qfma:
209 case kArm64F16x8Qfms:
210 case kArm64IExtractLane:
211 case kArm64IReplaceLane:
212 case kArm64ISplat:
213 case kArm64IAbs:
214 case kArm64INeg:
215 case kArm64Mla:
216 case kArm64Mls:
217 case kArm64I64x2Shl:
218 case kArm64I64x2ShrS:
219 case kArm64I64x2Mul:
220 case kArm64I64x2ShrU:
221 case kArm64I64x2BitMask:
222 case kArm64I32x4SConvertF32x4:
223 case kArm64Sxtl:
224 case kArm64Sxtl2:
225 case kArm64Uxtl:
226 case kArm64Uxtl2:
227 case kArm64I32x4Shl:
228 case kArm64I32x4ShrS:
229 case kArm64I32x4Mul:
230 case kArm64I32x4UConvertF32x4:
231 case kArm64I32x4ShrU:
232 case kArm64I32x4BitMask:
233 case kArm64I32x4DotI16x8S:
234 case kArm64I16x8DotI8x16S:
235 case kArm64I32x4DotI8x16AddS:
236 case kArm64I8x16Addv:
237 case kArm64I16x8Addv:
238 case kArm64I32x4Addv:
239 case kArm64I64x2AddPair:
240 case kArm64F32x4AddReducePairwise:
241 case kArm64F64x2AddPair:
242 case kArm64I32x4TruncSatF64x2SZero:
243 case kArm64I32x4TruncSatF64x2UZero:
244 case kArm64IExtractLaneU:
245 case kArm64IExtractLaneS:
246 case kArm64I16x8Shl:
247 case kArm64I16x8ShrS:
248 case kArm64I16x8SConvertI32x4:
249 case kArm64I16x8Mul:
250 case kArm64I16x8ShrU:
251 case kArm64I16x8UConvertI32x4:
252 case kArm64I16x8Q15MulRSatS:
253 case kArm64I16x8BitMask:
254 case kArm64I8x16Shl:
255 case kArm64I8x16ShrS:
256 case kArm64I8x16SConvertI16x8:
257 case kArm64I8x16UConvertI16x8:
258 case kArm64I8x16ShrU:
259 case kArm64I8x16BitMask:
260 case kArm64S128Const:
261 case kArm64S128Dup:
262 case kArm64S128And:
263 case kArm64S128Or:
264 case kArm64S128Xor:
265 case kArm64S128Not:
266 case kArm64S128Select:
267 case kArm64S128AndNot:
268 case kArm64Ssra:
269 case kArm64Usra:
270 case kArm64S64x2UnzipLeft:
271 case kArm64S64x2UnzipRight:
272 case kArm64S32x4ZipLeft:
273 case kArm64S32x4ZipRight:
274 case kArm64S32x4UnzipLeft:
275 case kArm64S32x4UnzipRight:
276 case kArm64S32x4TransposeLeft:
277 case kArm64S32x4TransposeRight:
278 case kArm64S32x4OneLaneSwizzle:
279 case kArm64S64x1Shuffle:
280 case kArm64S64x2Shuffle:
281 case kArm64S32x1Shuffle:
282 case kArm64S32x2Shuffle:
283 case kArm64S32x4Shuffle:
284 case kArm64S16x1Shuffle:
285 case kArm64S16x2Shuffle:
286 case kArm64S8x2Shuffle:
287 case kArm64S16x8ZipLeft:
288 case kArm64S16x8ZipRight:
289 case kArm64S16x8UnzipLeft:
290 case kArm64S16x8UnzipRight:
291 case kArm64S16x8TransposeLeft:
292 case kArm64S16x8TransposeRight:
293 case kArm64S8x16ZipLeft:
294 case kArm64S8x16ZipRight:
295 case kArm64S8x16UnzipLeft:
296 case kArm64S8x16UnzipRight:
297 case kArm64S8x16TransposeLeft:
298 case kArm64S8x16TransposeRight:
299 case kArm64S8x16Concat:
300 case kArm64I8x16Swizzle:
301 case kArm64I8x16Shuffle:
302 case kArm64S32x4Reverse:
303 case kArm64S32x2Reverse:
304 case kArm64S16x4Reverse:
305 case kArm64S16x2Reverse:
306 case kArm64S8x8Reverse:
307 case kArm64S8x4Reverse:
308 case kArm64S8x2Reverse:
309 case kArm64V128AnyTrue:
310 case kArm64I64x2AllTrue:
311 case kArm64I32x4AllTrue:
312 case kArm64I16x8AllTrue:
313 case kArm64I8x16AllTrue:
314 case kArm64RoundingAverageU:
315 case kArm64IAdd:
316 case kArm64ISub:
317 case kArm64IEq:
318 case kArm64INe:
319 case kArm64IGtS:
320 case kArm64IGeS:
321 case kArm64ILtS:
322 case kArm64ILeS:
323 case kArm64IMinS:
324 case kArm64IMaxS:
325 case kArm64IMinU:
326 case kArm64IMaxU:
327 case kArm64IGtU:
328 case kArm64IGeU:
329 case kArm64IAddSatS:
330 case kArm64ISubSatS:
331 case kArm64IAddSatU:
332 case kArm64ISubSatU:
333 case kArm64Bcax:
334 case kArm64Eor3:
335#endif // V8_ENABLE_WEBASSEMBLY
336 case kArm64TestAndBranch32:
337 case kArm64TestAndBranch:
338 case kArm64CompareAndBranch32:
339 case kArm64CompareAndBranch:
340 return kNoOpcodeFlags;
341
342 case kArm64LdrH:
343 case kArm64LdrS:
344 case kArm64LdrD:
345 case kArm64LdrQ:
346 case kArm64Ldrb:
347 case kArm64Ldrsb:
348 case kArm64LdrsbW:
349 case kArm64Ldrh:
350 case kArm64Ldrsh:
351 case kArm64LdrshW:
352 case kArm64Ldrsw:
353 case kArm64LdrW:
354 case kArm64Ldr:
355 case kArm64LdrDecompressTaggedSigned:
356 case kArm64LdrDecompressTagged:
357 case kArm64LdrDecompressProtected:
358 case kArm64LdarDecompressTaggedSigned:
359 case kArm64LdarDecompressTagged:
360 case kArm64LdrDecodeSandboxedPointer:
361 case kArm64Peek:
362#if V8_ENABLE_WEBASSEMBLY
363 case kArm64LoadSplat:
364 case kArm64LoadLane:
365 case kArm64S128Load8x8S:
366 case kArm64S128Load8x8U:
367 case kArm64S128Load16x4S:
368 case kArm64S128Load16x4U:
369 case kArm64S128Load32x2S:
370 case kArm64S128Load32x2U:
371 case kArm64S128LoadPairDeinterleave:
372#endif // V8_ENABLE_WEBASSEMBLY
373 return kIsLoadOperation;
374
375 case kArm64Claim:
376 case kArm64Poke:
377 case kArm64PokePair:
378 case kArm64StrH:
379 case kArm64StrS:
380 case kArm64StrD:
381 case kArm64StrQ:
382 case kArm64Strb:
383 case kArm64Strh:
384 case kArm64StrW:
385 case kArm64StrWPair:
386 case kArm64Str:
387 case kArm64StrPair:
388 case kArm64StrCompressTagged:
389 case kArm64StlrCompressTagged:
390 case kArm64StrIndirectPointer:
391 case kArm64StrEncodeSandboxedPointer:
392 case kArm64DmbIsh:
393 case kArm64DsbIsb:
394#if V8_ENABLE_WEBASSEMBLY
395 case kArm64StoreLane:
396#endif // V8_ENABLE_WEBASSEMBLY
397 return kHasSideEffect;
398
399 case kArm64Word64AtomicLoadUint64:
400 return kIsLoadOperation;
401
402 case kArm64Word64AtomicStoreWord64:
403 case kArm64Word64AtomicAddUint64:
404 case kArm64Word64AtomicSubUint64:
405 case kArm64Word64AtomicAndUint64:
406 case kArm64Word64AtomicOrUint64:
407 case kArm64Word64AtomicXorUint64:
408 case kArm64Word64AtomicExchangeUint64:
409 case kArm64Word64AtomicCompareExchangeUint64:
410 return kHasSideEffect;
411
412#define CASE(Name) case k##Name:
414#undef CASE
415 // Already covered in architecture independent code.
416 UNREACHABLE();
417 }
418
419 UNREACHABLE();
420}
421
423 // Basic latency modeling for arm64 instructions. They have been determined
424 // in an empirical way.
425 switch (instr->arch_opcode()) {
426 case kArm64Add:
427 case kArm64Add32:
428 case kArm64And:
429 case kArm64And32:
430 case kArm64Bic:
431 case kArm64Bic32:
432 case kArm64Cmn:
433 case kArm64Cmn32:
434 case kArm64Cmp:
435 case kArm64Cmp32:
436 case kArm64Eon:
437 case kArm64Eon32:
438 case kArm64Eor:
439 case kArm64Eor32:
440 case kArm64Not:
441 case kArm64Not32:
442 case kArm64Or:
443 case kArm64Or32:
444 case kArm64Orn:
445 case kArm64Orn32:
446 case kArm64Sub:
447 case kArm64Sub32:
448 case kArm64Tst:
449 case kArm64Tst32:
450 if (instr->addressing_mode() != kMode_None) {
451 return 3;
452 } else {
453 return 1;
454 }
455
456 case kArm64Clz:
457 case kArm64Clz32:
458 case kArm64Sbfx:
459 case kArm64Sbfx32:
460 case kArm64Sxtb32:
461 case kArm64Sxth32:
462 case kArm64Sxtw:
463 case kArm64Ubfiz32:
464 case kArm64Sbfiz:
465 case kArm64Ubfx:
466 case kArm64Ubfx32:
467 return 1;
468
469 case kArm64Lsl:
470 case kArm64Lsl32:
471 case kArm64Lsr:
472 case kArm64Lsr32:
473 case kArm64Asr:
474 case kArm64Asr32:
475 case kArm64Ror:
476 case kArm64Ror32:
477 return 1;
478
479 case kArm64LdrDecompressTaggedSigned:
480 case kArm64LdrDecompressTagged:
481 case kArm64LdrDecompressProtected:
482 case kArm64Ldr:
483 case kArm64LdrD:
484 case kArm64LdrS:
485 case kArm64LdrW:
486 case kArm64Ldrb:
487 case kArm64Ldrh:
488 case kArm64Ldrsb:
489 case kArm64Ldrsh:
490 case kArm64Ldrsw:
491 return 11;
492
493 case kArm64Str:
494 case kArm64StrD:
495 case kArm64StrS:
496 case kArm64StrW:
497 case kArm64Strb:
498 case kArm64Strh:
499 return 1;
500
501 case kArm64Madd32:
502 case kArm64Mneg32:
503 case kArm64Msub32:
504 case kArm64Mul32:
505 return 3;
506
507 case kArm64Madd:
508 case kArm64Mneg:
509 case kArm64Msub:
510 case kArm64Mul:
511 return 5;
512
513 case kArm64Idiv32:
514 case kArm64Udiv32:
515 return 12;
516
517 case kArm64Idiv:
518 case kArm64Udiv:
519 return 20;
520
521 case kArm64Float32Add:
522 case kArm64Float32Sub:
523 case kArm64Float64Add:
524 case kArm64Float64Sub:
525 return 5;
526
527 case kArm64Float32Abs:
528 case kArm64Float32Cmp:
529 case kArm64Float32Neg:
530 case kArm64Float64Abs:
531 case kArm64Float64Cmp:
532 case kArm64Float64Neg:
533 return 3;
534
535 case kArm64Float32Div:
536 case kArm64Float32Sqrt:
537 return 12;
538
539 case kArm64Float64Div:
540 case kArm64Float64Sqrt:
541 return 19;
542
543 case kArm64Float32RoundDown:
544 case kArm64Float32RoundTiesEven:
545 case kArm64Float32RoundTruncate:
546 case kArm64Float32RoundUp:
547 case kArm64Float64RoundDown:
548 case kArm64Float64RoundTiesAway:
549 case kArm64Float64RoundTiesEven:
550 case kArm64Float64RoundTruncate:
551 case kArm64Float64RoundUp:
552 return 5;
553
554 case kArm64Float32ToFloat64:
555 case kArm64Float64ToFloat32:
556 case kArm64Float64ToFloat16RawBits:
557 case kArm64Float16RawBitsToFloat64:
558 case kArm64Float64ToInt32:
559 case kArm64Float64ToUint32:
560 case kArm64Float32ToInt64:
561 case kArm64Float64ToInt64:
562 case kArm64Float32ToUint64:
563 case kArm64Float64ToUint64:
564 case kArm64Int32ToFloat64:
565 case kArm64Int64ToFloat32:
566 case kArm64Int64ToFloat64:
567 case kArm64Uint32ToFloat64:
568 case kArm64Uint64ToFloat32:
569 case kArm64Uint64ToFloat64:
570 return 5;
571
572 default:
573 return 2;
574 }
575}
576
577} // namespace compiler
578} // namespace internal
579} // namespace v8
int GetTargetInstructionFlags(const Instruction *instr) const
static int GetInstructionLatency(const Instruction *instr)
#define COMMON_ARCH_OPCODE_LIST(V)
Instruction * instr