v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
macro-assembler-arm64-inl.h
Go to the documentation of this file.
1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CODEGEN_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_
6#define V8_CODEGEN_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_
7
8#include <ctype.h>
9
10#include "src/base/bits.h"
14#include "src/common/globals.h"
16
17namespace v8 {
18namespace internal {
19
20MemOperand FieldMemOperand(Register object, int offset) {
21 return MemOperand(object, offset - kHeapObjectTag);
22}
23
24// Provides access to exit frame parameters (GC-ed).
26 // The slot at [sp] is reserved in all ExitFrames for storing the return
27 // address before doing the actual call, it's necessary for frame iteration
28 // (see StoreReturnAddressAndCall for details).
29 static constexpr int kSPOffset = 1 * kSystemPointerSize;
30 return MemOperand(sp, kSPOffset + offset);
31}
32
33// Provides access to exit frame parameters (GC-ed).
37}
38
39void MacroAssembler::And(const Register& rd, const Register& rn,
40 const Operand& operand) {
41 DCHECK(allow_macro_instructions());
42 DCHECK(!rd.IsZero());
43 LogicalMacro(rd, rn, operand, AND);
44}
45
46void MacroAssembler::Ands(const Register& rd, const Register& rn,
47 const Operand& operand) {
48 DCHECK(allow_macro_instructions());
49 DCHECK(!rd.IsZero());
50 LogicalMacro(rd, rn, operand, ANDS);
51}
52
53void MacroAssembler::Tst(const Register& rn, const Operand& operand) {
54 DCHECK(allow_macro_instructions());
55 LogicalMacro(AppropriateZeroRegFor(rn), rn, operand, ANDS);
56}
57
58void MacroAssembler::Bic(const Register& rd, const Register& rn,
59 const Operand& operand) {
60 DCHECK(allow_macro_instructions());
61 DCHECK(!rd.IsZero());
62 LogicalMacro(rd, rn, operand, BIC);
63}
64
65void MacroAssembler::Bics(const Register& rd, const Register& rn,
66 const Operand& operand) {
67 DCHECK(allow_macro_instructions());
68 DCHECK(!rd.IsZero());
69 LogicalMacro(rd, rn, operand, BICS);
70}
71
72void MacroAssembler::Orr(const Register& rd, const Register& rn,
73 const Operand& operand) {
74 DCHECK(allow_macro_instructions());
75 DCHECK(!rd.IsZero());
76 LogicalMacro(rd, rn, operand, ORR);
77}
78
79void MacroAssembler::Orn(const Register& rd, const Register& rn,
80 const Operand& operand) {
81 DCHECK(allow_macro_instructions());
82 DCHECK(!rd.IsZero());
83 LogicalMacro(rd, rn, operand, ORN);
84}
85
86void MacroAssembler::Eor(const Register& rd, const Register& rn,
87 const Operand& operand) {
88 DCHECK(allow_macro_instructions());
89 DCHECK(!rd.IsZero());
90 LogicalMacro(rd, rn, operand, EOR);
91}
92
93void MacroAssembler::Eon(const Register& rd, const Register& rn,
94 const Operand& operand) {
95 DCHECK(allow_macro_instructions());
96 DCHECK(!rd.IsZero());
97 LogicalMacro(rd, rn, operand, EON);
98}
99
100void MacroAssembler::Ccmp(const Register& rn, const Operand& operand,
101 StatusFlags nzcv, Condition cond) {
102 DCHECK(allow_macro_instructions());
103 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
104 ConditionalCompareMacro(rn, -operand.ImmediateValue(), nzcv, cond, CCMN);
105 } else {
106 ConditionalCompareMacro(rn, operand, nzcv, cond, CCMP);
107 }
108}
109
110void MacroAssembler::CcmpTagged(const Register& rn, const Operand& operand,
111 StatusFlags nzcv, Condition cond) {
113 Ccmp(rn.W(), operand.ToW(), nzcv, cond);
114 } else {
115 Ccmp(rn, operand, nzcv, cond);
116 }
117}
118
119void MacroAssembler::Ccmn(const Register& rn, const Operand& operand,
120 StatusFlags nzcv, Condition cond) {
121 DCHECK(allow_macro_instructions());
122 if (operand.IsImmediate() && (operand.ImmediateValue() < 0)) {
123 ConditionalCompareMacro(rn, -operand.ImmediateValue(), nzcv, cond, CCMP);
124 } else {
125 ConditionalCompareMacro(rn, operand, nzcv, cond, CCMN);
126 }
127}
128
129void MacroAssembler::Add(const Register& rd, const Register& rn,
130 const Operand& operand) {
131 DCHECK(allow_macro_instructions());
132 if (operand.IsImmediate()) {
133 int64_t imm = operand.ImmediateValue();
134 if ((imm > 0) && IsImmAddSub(imm)) {
135 DataProcImmediate(rd, rn, static_cast<int>(imm), ADD);
136 return;
137 } else if ((imm < 0) && IsImmAddSub(-imm)) {
138 DataProcImmediate(rd, rn, static_cast<int>(-imm), SUB);
139 return;
140 }
141 } else if (operand.IsShiftedRegister() && (operand.shift_amount() == 0)) {
142 if (!rd.IsSP() && !rn.IsSP() && !operand.reg().IsSP() &&
143 !operand.reg().IsZero()) {
144 DataProcPlainRegister(rd, rn, operand.reg(), ADD);
145 return;
146 }
147 }
148 AddSubMacro(rd, rn, operand, LeaveFlags, ADD);
149}
150
151void MacroAssembler::Adds(const Register& rd, const Register& rn,
152 const Operand& operand) {
153 DCHECK(allow_macro_instructions());
154 if (operand.IsImmediate() && (operand.ImmediateValue() < 0) &&
155 IsImmAddSub(-operand.ImmediateValue())) {
156 AddSubMacro(rd, rn, -operand.ImmediateValue(), SetFlags, SUB);
157 } else {
158 AddSubMacro(rd, rn, operand, SetFlags, ADD);
159 }
160}
161
162void MacroAssembler::Sub(const Register& rd, const Register& rn,
163 const Operand& operand) {
164 DCHECK(allow_macro_instructions());
165 if (operand.IsImmediate()) {
166 int64_t imm = operand.ImmediateValue();
167 if ((imm > 0) && IsImmAddSub(imm)) {
168 DataProcImmediate(rd, rn, static_cast<int>(imm), SUB);
169 return;
170 } else if ((imm < 0) && IsImmAddSub(-imm)) {
171 DataProcImmediate(rd, rn, static_cast<int>(-imm), ADD);
172 return;
173 }
174 } else if (operand.IsShiftedRegister() && (operand.shift_amount() == 0)) {
175 if (!rd.IsSP() && !rn.IsSP() && !operand.reg().IsSP() &&
176 !operand.reg().IsZero()) {
177 DataProcPlainRegister(rd, rn, operand.reg(), SUB);
178 return;
179 }
180 }
181 AddSubMacro(rd, rn, operand, LeaveFlags, SUB);
182}
183
184void MacroAssembler::Subs(const Register& rd, const Register& rn,
185 const Operand& operand) {
186 DCHECK(allow_macro_instructions());
187 if (operand.IsImmediate() && (operand.ImmediateValue() < 0) &&
188 IsImmAddSub(-operand.ImmediateValue())) {
189 AddSubMacro(rd, rn, -operand.ImmediateValue(), SetFlags, ADD);
190 } else {
191 AddSubMacro(rd, rn, operand, SetFlags, SUB);
192 }
193}
194
195void MacroAssembler::Cmn(const Register& rn, const Operand& operand) {
196 DCHECK(allow_macro_instructions());
197 Adds(AppropriateZeroRegFor(rn), rn, operand);
198}
199
200void MacroAssembler::Cmp(const Register& rn, const Operand& operand) {
201 DCHECK(allow_macro_instructions());
202 if (operand.IsShiftedRegister() && operand.shift_amount() == 0) {
203 if (!rn.IsSP() && !operand.reg().IsSP()) {
204 CmpPlainRegister(rn, operand.reg());
205 return;
206 }
207 }
208 Subs(AppropriateZeroRegFor(rn), rn, operand);
209}
210
211void MacroAssembler::CmpTagged(const Register& rn, const Operand& operand) {
213 Cmp(rn.W(), operand.ToW());
214 } else {
215 Cmp(rn, operand);
216 }
217}
218
219void MacroAssembler::Neg(const Register& rd, const Operand& operand) {
220 DCHECK(allow_macro_instructions());
221 DCHECK(!rd.IsZero());
222 if (operand.IsImmediate()) {
223 Mov(rd, -operand.ImmediateValue());
224 } else {
225 Sub(rd, AppropriateZeroRegFor(rd), operand);
226 }
227}
228
229void MacroAssembler::Negs(const Register& rd, const Operand& operand) {
230 DCHECK(allow_macro_instructions());
231 Subs(rd, AppropriateZeroRegFor(rd), operand);
232}
233
234void MacroAssembler::Adc(const Register& rd, const Register& rn,
235 const Operand& operand) {
236 DCHECK(allow_macro_instructions());
237 DCHECK(!rd.IsZero());
238 AddSubWithCarryMacro(rd, rn, operand, LeaveFlags, ADC);
239}
240
241void MacroAssembler::Adcs(const Register& rd, const Register& rn,
242 const Operand& operand) {
243 DCHECK(allow_macro_instructions());
244 DCHECK(!rd.IsZero());
245 AddSubWithCarryMacro(rd, rn, operand, SetFlags, ADC);
246}
247
248void MacroAssembler::Sbc(const Register& rd, const Register& rn,
249 const Operand& operand) {
250 DCHECK(allow_macro_instructions());
251 DCHECK(!rd.IsZero());
252 AddSubWithCarryMacro(rd, rn, operand, LeaveFlags, SBC);
253}
254
255void MacroAssembler::Sbcs(const Register& rd, const Register& rn,
256 const Operand& operand) {
257 DCHECK(allow_macro_instructions());
258 DCHECK(!rd.IsZero());
259 AddSubWithCarryMacro(rd, rn, operand, SetFlags, SBC);
260}
261
262void MacroAssembler::Ngc(const Register& rd, const Operand& operand) {
263 DCHECK(allow_macro_instructions());
264 DCHECK(!rd.IsZero());
266 Sbc(rd, zr, operand);
267}
268
269void MacroAssembler::Ngcs(const Register& rd, const Operand& operand) {
270 DCHECK(allow_macro_instructions());
271 DCHECK(!rd.IsZero());
273 Sbcs(rd, zr, operand);
274}
275
276void MacroAssembler::Mvn(const Register& rd, uint64_t imm) {
277 DCHECK(allow_macro_instructions());
278 DCHECK(!rd.IsZero());
279 Mov(rd, ~imm);
280}
281
282#define DEFINE_FUNCTION(FN, REGTYPE, REG, OP) \
283 void MacroAssembler::FN(const REGTYPE REG, const MemOperand& addr) { \
284 DCHECK(allow_macro_instructions()); \
285 LoadStoreMacro(REG, addr, OP); \
286 }
288#undef DEFINE_FUNCTION
289
290#define DEFINE_FUNCTION(FN, REGTYPE, REG, REG2, OP) \
291 void MacroAssembler::FN(const REGTYPE REG, const REGTYPE REG2, \
292 const MemOperand& addr) { \
293 DCHECK(allow_macro_instructions()); \
294 LoadStorePairMacro(REG, REG2, addr, OP); \
295 }
297#undef DEFINE_FUNCTION
298
299#define DEFINE_FUNCTION(FN, OP) \
300 void MacroAssembler::FN(const Register& rt, const Register& rn) { \
301 DCHECK(allow_macro_instructions()); \
302 OP(rt, rn); \
303 }
305#undef DEFINE_FUNCTION
306
307#define DEFINE_FUNCTION(FN, OP) \
308 void MacroAssembler::FN(const Register& rs, const Register& rt, \
309 const Register& rn) { \
310 DCHECK(allow_macro_instructions()); \
311 OP(rs, rt, rn); \
312 }
314#undef DEFINE_FUNCTION
315
316#define DEFINE_FUNCTION(FN, OP) \
317 void MacroAssembler::FN(const Register& rs, const Register& rt, \
318 const MemOperand& src) { \
319 DCHECK(allow_macro_instructions()); \
320 OP(rs, rt, src); \
321 }
323#undef DEFINE_FUNCTION
324
325#define DEFINE_FUNCTION(FN, OP) \
326 void MacroAssembler::FN(const Register& rs, const Register& rs2, \
327 const Register& rt, const Register& rt2, \
328 const MemOperand& src) { \
329 DCHECK(allow_macro_instructions()); \
330 OP(rs, rs2, rt, rt2, src); \
331 }
333#undef DEFINE_FUNCTION
334
335#define DEFINE_LOAD_FUNCTION(FN, OP) \
336 void MacroAssembler::FN(const Register& rs, const Register& rt, \
337 const MemOperand& src) { \
338 DCHECK(allow_macro_instructions_); \
339 OP(rs, rt, src); \
340 }
341#define DEFINE_STORE_FUNCTION(FN, OP) \
342 void MacroAssembler::FN(const Register& rs, const MemOperand& src) { \
343 DCHECK(allow_macro_instructions_); \
344 OP(rs, src); \
345 }
346
348 DEFINE_LOAD_FUNCTION, Ld, ld)
351
352#define DEFINE_SWP_FUNCTION(FN, OP) \
353 void MacroAssembler::FN(const Register& rs, const Register& rt, \
354 const MemOperand& src) { \
355 DCHECK(allow_macro_instructions_); \
356 OP(rs, rt, src); \
357 }
358
360
361void MacroAssembler::Asr(const Register& rd, const Register& rn,
362 unsigned shift) {
363 DCHECK(allow_macro_instructions());
364 DCHECK(!rd.IsZero());
365 asr(rd, rn, shift);
366}
367
368void MacroAssembler::Asr(const Register& rd, const Register& rn,
369 const Register& rm) {
370 DCHECK(allow_macro_instructions());
371 DCHECK(!rd.IsZero());
372 asrv(rd, rn, rm);
373}
374
376 DCHECK(allow_macro_instructions());
377 b(label);
378 CheckVeneerPool(false, false);
379}
380
382 DCHECK(allow_macro_instructions());
383 B(label, cond);
384}
385
386void MacroAssembler::Bfi(const Register& rd, const Register& rn, unsigned lsb,
387 unsigned width) {
388 DCHECK(allow_macro_instructions());
389 DCHECK(!rd.IsZero());
390 bfi(rd, rn, lsb, width);
391}
392
393void MacroAssembler::Bfxil(const Register& rd, const Register& rn, unsigned lsb,
394 unsigned width) {
395 DCHECK(allow_macro_instructions());
396 DCHECK(!rd.IsZero());
397 bfxil(rd, rn, lsb, width);
398}
399
401 DCHECK(allow_macro_instructions());
403 bind(label);
404 } else {
405 // Emit this inside an InstructionAccurateScope to ensure there are no extra
406 // instructions between the bind and the target identifier instruction.
407 InstructionAccurateScope scope(this, 1);
408 bind(label);
410 pacibsp();
411 } else {
412 bti(id);
413 }
414 }
415}
416
418
420
423}
424
426#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
428#endif
429}
430
432#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
434#else
435 Bind(label);
436#endif
437}
438
440#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
442#endif
443}
444
446#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
448#endif
449}
450
452#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
454#else
455 Bind(label);
456#endif
457}
458
460#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
462#else
463 Bind(label);
464#endif
465}
466
468 DCHECK(allow_macro_instructions());
469 bl(label);
470}
471
473 DCHECK(allow_macro_instructions());
474 DCHECK(!xn.IsZero());
475 blr(xn);
476}
477
479 DCHECK(allow_macro_instructions());
480 DCHECK(!xn.IsZero());
481 br(xn);
482}
483
484void MacroAssembler::Brk(int code) {
485 DCHECK(allow_macro_instructions());
486 brk(code);
487}
488
489void MacroAssembler::Cinc(const Register& rd, const Register& rn,
490 Condition cond) {
491 DCHECK(allow_macro_instructions());
492 DCHECK(!rd.IsZero());
493 DCHECK((cond != al) && (cond != nv));
494 cinc(rd, rn, cond);
495}
496
497void MacroAssembler::Cinv(const Register& rd, const Register& rn,
498 Condition cond) {
499 DCHECK(allow_macro_instructions());
500 DCHECK(!rd.IsZero());
501 DCHECK((cond != al) && (cond != nv));
502 cinv(rd, rn, cond);
503}
504
505void MacroAssembler::Cls(const Register& rd, const Register& rn) {
506 DCHECK(allow_macro_instructions());
507 DCHECK(!rd.IsZero());
508 cls(rd, rn);
509}
510
511void MacroAssembler::Clz(const Register& rd, const Register& rn) {
512 DCHECK(allow_macro_instructions());
513 DCHECK(!rd.IsZero());
514 clz(rd, rn);
515}
516
517void MacroAssembler::Cneg(const Register& rd, const Register& rn,
518 Condition cond) {
519 DCHECK(allow_macro_instructions());
520 DCHECK(!rd.IsZero());
521 DCHECK((cond != al) && (cond != nv));
522 cneg(rd, rn, cond);
523}
524
525// Conditionally zero the destination register. Only X registers are supported
526// due to the truncation side-effect when used on W registers.
528 DCHECK(allow_macro_instructions());
529 DCHECK(!rd.IsSP() && rd.Is64Bits());
530 DCHECK((cond != al) && (cond != nv));
531 csel(rd, xzr, rd, cond);
532}
533
534// Conditionally move a value into the destination register. Only X registers
535// are supported due to the truncation side-effect when used on W registers.
536void MacroAssembler::CmovX(const Register& rd, const Register& rn,
537 Condition cond) {
538 DCHECK(allow_macro_instructions());
539 DCHECK(!rd.IsSP());
540 DCHECK(rd.Is64Bits() && rn.Is64Bits());
541 DCHECK((cond != al) && (cond != nv));
542 if (rd != rn) {
543 csel(rd, rn, rd, cond);
544 }
545}
546
548 DCHECK(allow_macro_instructions());
549 csdb();
550}
551
553 DCHECK(allow_macro_instructions());
554 DCHECK(!rd.IsZero());
555 DCHECK((cond != al) && (cond != nv));
556 cset(rd, cond);
557}
558
560 DCHECK(allow_macro_instructions());
561 DCHECK(!rd.IsZero());
562 DCHECK((cond != al) && (cond != nv));
563 csetm(rd, cond);
564}
565
566void MacroAssembler::Csinc(const Register& rd, const Register& rn,
567 const Register& rm, Condition cond) {
568 DCHECK(allow_macro_instructions());
569 DCHECK(!rd.IsZero());
570 DCHECK((cond != al) && (cond != nv));
571 csinc(rd, rn, rm, cond);
572}
573
574void MacroAssembler::Csinv(const Register& rd, const Register& rn,
575 const Register& rm, Condition cond) {
576 DCHECK(allow_macro_instructions());
577 DCHECK(!rd.IsZero());
578 DCHECK((cond != al) && (cond != nv));
579 csinv(rd, rn, rm, cond);
580}
581
582void MacroAssembler::Csneg(const Register& rd, const Register& rn,
583 const Register& rm, Condition cond) {
584 DCHECK(allow_macro_instructions());
585 DCHECK(!rd.IsZero());
586 DCHECK((cond != al) && (cond != nv));
587 csneg(rd, rn, rm, cond);
588}
589
591 DCHECK(allow_macro_instructions());
592 dmb(domain, type);
593}
594
596 DCHECK(allow_macro_instructions());
597 dsb(domain, type);
598}
599
600void MacroAssembler::Debug(const char* message, uint32_t code, Instr params) {
601 DCHECK(allow_macro_instructions());
602 debug(message, code, params);
603}
604
605void MacroAssembler::Extr(const Register& rd, const Register& rn,
606 const Register& rm, unsigned lsb) {
607 DCHECK(allow_macro_instructions());
608 DCHECK(!rd.IsZero());
609 extr(rd, rn, rm, lsb);
610}
611
613 DCHECK(allow_macro_instructions());
614 fabs(fd, fn);
615}
616
618 const VRegister& fm) {
619 DCHECK(allow_macro_instructions());
620 fadd(fd, fn, fm);
621}
622
624 StatusFlags nzcv, Condition cond) {
625 DCHECK(allow_macro_instructions());
626 DCHECK((cond != al) && (cond != nv));
627 fccmp(fn, fm, nzcv, cond);
628}
629
630void MacroAssembler::Fccmp(const VRegister& fn, const double value,
631 StatusFlags nzcv, Condition cond) {
632 DCHECK(allow_macro_instructions());
633 UseScratchRegisterScope temps(this);
634 VRegister tmp = temps.AcquireSameSizeAs(fn);
635 Fmov(tmp, value);
636 Fccmp(fn, tmp, nzcv, cond);
637}
638
640 DCHECK(allow_macro_instructions());
641 fcmp(fn, fm);
642}
643
644void MacroAssembler::Fcmp(const VRegister& fn, double value) {
645 DCHECK(allow_macro_instructions());
646 if (value != 0.0) {
647 UseScratchRegisterScope temps(this);
648 VRegister tmp = temps.AcquireSameSizeAs(fn);
649 Fmov(tmp, value);
650 fcmp(fn, tmp);
651 } else {
652 fcmp(fn, value);
653 }
654}
655
657 const VRegister& fm, Condition cond) {
658 DCHECK(allow_macro_instructions());
659 DCHECK((cond != al) && (cond != nv));
660 fcsel(fd, fn, fm, cond);
661}
662
664 DCHECK(allow_macro_instructions());
665 fcvt(fd, fn);
666}
667
669 DCHECK(allow_macro_instructions());
670 DCHECK(!rd.IsZero());
671 fcvtas(rd, fn);
672}
673
675 DCHECK(allow_macro_instructions());
676 DCHECK(!rd.IsZero());
677 fcvtau(rd, fn);
678}
679
681 DCHECK(allow_macro_instructions());
682 DCHECK(!rd.IsZero());
683 fcvtms(rd, fn);
684}
685
687 DCHECK(allow_macro_instructions());
688 DCHECK(!rd.IsZero());
689 fcvtmu(rd, fn);
690}
691
693 DCHECK(allow_macro_instructions());
694 DCHECK(!rd.IsZero());
695 fcvtns(rd, fn);
696}
697
699 DCHECK(allow_macro_instructions());
700 DCHECK(!rd.IsZero());
701 fcvtnu(rd, fn);
702}
703
705 DCHECK(allow_macro_instructions());
706 DCHECK(!rd.IsZero());
707 fcvtzs(rd, fn);
708}
710 DCHECK(allow_macro_instructions());
711 DCHECK(!rd.IsZero());
712 fcvtzu(rd, fn);
713}
714
716 const VRegister& fm) {
717 DCHECK(allow_macro_instructions());
718 fdiv(fd, fn, fm);
719}
720
722 const VRegister& fm, const VRegister& fa) {
723 DCHECK(allow_macro_instructions());
724 fmadd(fd, fn, fm, fa);
725}
726
728 const VRegister& fm) {
729 DCHECK(allow_macro_instructions());
730 fmax(fd, fn, fm);
731}
732
734 const VRegister& fm) {
735 DCHECK(allow_macro_instructions());
736 fmaxnm(fd, fn, fm);
737}
738
740 const VRegister& fm) {
741 DCHECK(allow_macro_instructions());
742 fmin(fd, fn, fm);
743}
744
746 const VRegister& fm) {
747 DCHECK(allow_macro_instructions());
748 fminnm(fd, fn, fm);
749}
750
752 DCHECK(allow_macro_instructions());
753 // Only emit an instruction if fd and fn are different, and they are both D
754 // registers. fmov(s0, s0) is not a no-op because it clears the top word of
755 // d0. Technically, fmov(d0, d0) is not a no-op either because it clears the
756 // top of q0, but VRegister does not currently support Q registers.
757 if (fd != fn || !fd.Is64Bits()) {
758 fmov(fd, fn);
759 }
760}
761
763 DCHECK(allow_macro_instructions());
764 fmov(fd, rn);
765}
766
767void MacroAssembler::Fmov(VRegister vd, double imm) {
768 DCHECK(allow_macro_instructions());
769 uint64_t bits = base::bit_cast<uint64_t>(imm);
770
771 if (bits == 0) {
772 Movi(vd.D(), 0);
773 return;
774 }
775
776 if (vd.Is1S() || vd.Is2S() || vd.Is4S()) {
777 Fmov(vd, static_cast<float>(imm));
778 return;
779 }
780
781 DCHECK(vd.Is1D() || vd.Is2D());
782 if (IsImmFP64(bits)) {
783 fmov(vd, imm);
784 } else {
785 Movi64bitHelper(vd, bits);
786 }
787}
788
789void MacroAssembler::Fmov(VRegister vd, float imm) {
790 DCHECK(allow_macro_instructions());
791 uint32_t bits = base::bit_cast<uint32_t>(imm);
792
793 if (bits == 0) {
794 Movi(vd.D(), 0);
795 return;
796 }
797
798 if (vd.Is1D() || vd.Is2D()) {
799 Fmov(vd, static_cast<double>(imm));
800 return;
801 }
802
803 DCHECK(vd.Is1S() || vd.Is2S() || vd.Is4S());
804 if (IsImmFP32(bits)) {
805 fmov(vd, imm);
806 } else if (vd.IsScalar()) {
807 UseScratchRegisterScope temps(this);
808 Register tmp = temps.AcquireW();
809 Mov(tmp, bits);
810 Fmov(vd, tmp);
811 } else {
812 Movi(vd, bits);
813 }
814}
815
817 DCHECK(allow_macro_instructions());
818 DCHECK(!rd.IsZero());
819 fmov(rd, fn);
820}
821
823 const VRegister& fm, const VRegister& fa) {
824 DCHECK(allow_macro_instructions());
825 fmsub(fd, fn, fm, fa);
826}
827
829 const VRegister& fm) {
830 DCHECK(allow_macro_instructions());
831 fmul(fd, fn, fm);
832}
833
835 const VRegister& fm, const VRegister& fa) {
836 DCHECK(allow_macro_instructions());
837 fnmadd(fd, fn, fm, fa);
838}
839
841 const VRegister& fm, const VRegister& fa) {
842 DCHECK(allow_macro_instructions());
843 fnmsub(fd, fn, fm, fa);
844}
845
847 const VRegister& fm) {
848 DCHECK(allow_macro_instructions());
849 fsub(fd, fn, fm);
850}
851
853 DCHECK(allow_macro_instructions());
854 hint(code);
855}
856
857void MacroAssembler::Hlt(int code) {
858 DCHECK(allow_macro_instructions());
859 hlt(code);
860}
861
863 DCHECK(allow_macro_instructions());
864 isb();
865}
866
867void MacroAssembler::Ldr(const CPURegister& rt, const Operand& operand) {
868 DCHECK(allow_macro_instructions());
869 ldr(rt, operand);
870}
871
872void MacroAssembler::Lsl(const Register& rd, const Register& rn,
873 unsigned shift) {
874 DCHECK(allow_macro_instructions());
875 DCHECK(!rd.IsZero());
876 lsl(rd, rn, shift);
877}
878
879void MacroAssembler::Lsl(const Register& rd, const Register& rn,
880 const Register& rm) {
881 DCHECK(allow_macro_instructions());
882 DCHECK(!rd.IsZero());
883 lslv(rd, rn, rm);
884}
885
886void MacroAssembler::Lsr(const Register& rd, const Register& rn,
887 unsigned shift) {
888 DCHECK(allow_macro_instructions());
889 DCHECK(!rd.IsZero());
890 lsr(rd, rn, shift);
891}
892
893void MacroAssembler::Lsr(const Register& rd, const Register& rn,
894 const Register& rm) {
895 DCHECK(allow_macro_instructions());
896 DCHECK(!rd.IsZero());
897 lsrv(rd, rn, rm);
898}
899
900void MacroAssembler::Madd(const Register& rd, const Register& rn,
901 const Register& rm, const Register& ra) {
902 DCHECK(allow_macro_instructions());
903 DCHECK(!rd.IsZero());
904 madd(rd, rn, rm, ra);
905}
906
907void MacroAssembler::Mneg(const Register& rd, const Register& rn,
908 const Register& rm) {
909 DCHECK(allow_macro_instructions());
910 DCHECK(!rd.IsZero());
911 mneg(rd, rn, rm);
912}
913
914void MacroAssembler::Movk(const Register& rd, uint64_t imm, int shift) {
915 DCHECK(allow_macro_instructions());
916 DCHECK(!rd.IsZero());
917 movk(rd, imm, shift);
918}
919
921 DCHECK(allow_macro_instructions());
922 DCHECK(!rt.IsZero());
923 mrs(rt, sysreg);
924}
925
927 DCHECK(allow_macro_instructions());
928 msr(sysreg, rt);
929}
930
931void MacroAssembler::Msub(const Register& rd, const Register& rn,
932 const Register& rm, const Register& ra) {
933 DCHECK(allow_macro_instructions());
934 DCHECK(!rd.IsZero());
935 msub(rd, rn, rm, ra);
936}
937
938void MacroAssembler::Mul(const Register& rd, const Register& rn,
939 const Register& rm) {
940 DCHECK(allow_macro_instructions());
941 DCHECK(!rd.IsZero());
942 mul(rd, rn, rm);
943}
944
945void MacroAssembler::Rbit(const Register& rd, const Register& rn) {
946 DCHECK(allow_macro_instructions());
947 DCHECK(!rd.IsZero());
948 rbit(rd, rn);
949}
950
952 DCHECK(allow_macro_instructions());
953 DCHECK(!xn.IsZero());
954 ret(xn);
955 CheckVeneerPool(false, false);
956}
957
958void MacroAssembler::Rev(const Register& rd, const Register& rn) {
959 DCHECK(allow_macro_instructions());
960 DCHECK(!rd.IsZero());
961 rev(rd, rn);
962}
963
964void MacroAssembler::Rev16(const Register& rd, const Register& rn) {
965 DCHECK(allow_macro_instructions());
966 DCHECK(!rd.IsZero());
967 rev16(rd, rn);
968}
969
970void MacroAssembler::Rev32(const Register& rd, const Register& rn) {
971 DCHECK(allow_macro_instructions());
972 DCHECK(!rd.IsZero());
973 rev32(rd, rn);
974}
975
976void MacroAssembler::Ror(const Register& rd, const Register& rs,
977 unsigned shift) {
978 DCHECK(allow_macro_instructions());
979 DCHECK(!rd.IsZero());
980 ror(rd, rs, shift);
981}
982
983void MacroAssembler::Ror(const Register& rd, const Register& rn,
984 const Register& rm) {
985 DCHECK(allow_macro_instructions());
986 DCHECK(!rd.IsZero());
987 rorv(rd, rn, rm);
988}
989
990void MacroAssembler::Sbfx(const Register& rd, const Register& rn, unsigned lsb,
991 unsigned width) {
992 DCHECK(allow_macro_instructions());
993 DCHECK(!rd.IsZero());
994 sbfx(rd, rn, lsb, width);
995}
996
997void MacroAssembler::Scvtf(const VRegister& fd, const Register& rn,
998 unsigned fbits) {
999 DCHECK(allow_macro_instructions());
1000 scvtf(fd, rn, fbits);
1001}
1002
1003void MacroAssembler::Sdiv(const Register& rd, const Register& rn,
1004 const Register& rm) {
1005 DCHECK(allow_macro_instructions());
1006 DCHECK(!rd.IsZero());
1007 sdiv(rd, rn, rm);
1008}
1009
1010void MacroAssembler::Smaddl(const Register& rd, const Register& rn,
1011 const Register& rm, const Register& ra) {
1012 DCHECK(allow_macro_instructions());
1013 DCHECK(!rd.IsZero());
1014 smaddl(rd, rn, rm, ra);
1015}
1016
1017void MacroAssembler::Smsubl(const Register& rd, const Register& rn,
1018 const Register& rm, const Register& ra) {
1019 DCHECK(allow_macro_instructions());
1020 DCHECK(!rd.IsZero());
1021 smsubl(rd, rn, rm, ra);
1022}
1023
1024void MacroAssembler::Smull(const Register& rd, const Register& rn,
1025 const Register& rm) {
1026 DCHECK(allow_macro_instructions());
1027 DCHECK(!rd.IsZero());
1028 smull(rd, rn, rm);
1029}
1030
1031void MacroAssembler::Smulh(const Register& rd, const Register& rn,
1032 const Register& rm) {
1033 DCHECK(allow_macro_instructions());
1034 DCHECK(!rd.IsZero());
1035 smulh(rd, rn, rm);
1036}
1037
1038void MacroAssembler::Umull(const Register& rd, const Register& rn,
1039 const Register& rm) {
1040 DCHECK(allow_macro_instructions());
1041 DCHECK(!rd.IsZero());
1042 umaddl(rd, rn, rm, xzr);
1043}
1044
1045void MacroAssembler::Umulh(const Register& rd, const Register& rn,
1046 const Register& rm) {
1047 DCHECK(allow_macro_instructions());
1048 DCHECK(!rd.IsZero());
1049 umulh(rd, rn, rm);
1050}
1051
1052void MacroAssembler::Sxtb(const Register& rd, const Register& rn) {
1053 DCHECK(allow_macro_instructions());
1054 DCHECK(!rd.IsZero());
1055 sxtb(rd, rn);
1056}
1057
1058void MacroAssembler::Sxth(const Register& rd, const Register& rn) {
1059 DCHECK(allow_macro_instructions());
1060 DCHECK(!rd.IsZero());
1061 sxth(rd, rn);
1062}
1063
1064void MacroAssembler::Sxtw(const Register& rd, const Register& rn) {
1065 DCHECK(allow_macro_instructions());
1066 DCHECK(!rd.IsZero());
1067 sxtw(rd, rn);
1068}
1069
1070void MacroAssembler::Ubfiz(const Register& rd, const Register& rn, unsigned lsb,
1071 unsigned width) {
1072 DCHECK(allow_macro_instructions());
1073 DCHECK(!rd.IsZero());
1074 ubfiz(rd, rn, lsb, width);
1075}
1076
1077void MacroAssembler::Sbfiz(const Register& rd, const Register& rn, unsigned lsb,
1078 unsigned width) {
1079 DCHECK(allow_macro_instructions());
1080 DCHECK(!rd.IsZero());
1081 sbfiz(rd, rn, lsb, width);
1082}
1083
1084void MacroAssembler::Ubfx(const Register& rd, const Register& rn, unsigned lsb,
1085 unsigned width) {
1086 DCHECK(allow_macro_instructions());
1087 DCHECK(!rd.IsZero());
1088 ubfx(rd, rn, lsb, width);
1089}
1090
1091void MacroAssembler::Ucvtf(const VRegister& fd, const Register& rn,
1092 unsigned fbits) {
1093 DCHECK(allow_macro_instructions());
1094 ucvtf(fd, rn, fbits);
1095}
1096
1097void MacroAssembler::Udiv(const Register& rd, const Register& rn,
1098 const Register& rm) {
1099 DCHECK(allow_macro_instructions());
1100 DCHECK(!rd.IsZero());
1101 udiv(rd, rn, rm);
1102}
1103
1104void MacroAssembler::Umaddl(const Register& rd, const Register& rn,
1105 const Register& rm, const Register& ra) {
1106 DCHECK(allow_macro_instructions());
1107 DCHECK(!rd.IsZero());
1108 umaddl(rd, rn, rm, ra);
1109}
1110
1111void MacroAssembler::Umsubl(const Register& rd, const Register& rn,
1112 const Register& rm, const Register& ra) {
1113 DCHECK(allow_macro_instructions());
1114 DCHECK(!rd.IsZero());
1115 umsubl(rd, rn, rm, ra);
1116}
1117
1118void MacroAssembler::Uxtb(const Register& rd, const Register& rn) {
1119 DCHECK(allow_macro_instructions());
1120 DCHECK(!rd.IsZero());
1121 uxtb(rd, rn);
1122}
1123
1124void MacroAssembler::Uxth(const Register& rd, const Register& rn) {
1125 DCHECK(allow_macro_instructions());
1126 DCHECK(!rd.IsZero());
1127 uxth(rd, rn);
1128}
1129
1130void MacroAssembler::Uxtw(const Register& rd, const Register& rn) {
1131 DCHECK(allow_macro_instructions());
1132 DCHECK(!rd.IsZero());
1133 uxtw(rd, rn);
1134}
1135
1137 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
1138 Mov(kRootRegister, Operand(isolate_root));
1139 Fmov(fp_zero, 0.0);
1140
1141#ifdef V8_COMPRESS_POINTERS
1142 LoadRootRelative(kPtrComprCageBaseRegister, IsolateData::cage_base_offset());
1143#endif
1144}
1145
1147 DCHECK(dst.Is64Bits() && src.Is64Bits());
1149 Lsl(dst, src, kSmiShift);
1150}
1151
1153
1155 DCHECK(dst.Is64Bits() && src.Is64Bits());
1156 if (v8_flags.enable_slow_asserts) {
1157 AssertSmi(src);
1158 }
1161 Sbfx(dst, src.W(), kSmiShift, kSmiValueSize);
1162 } else {
1163 Asr(dst, src, kSmiShift);
1164 }
1165}
1166
1168 DCHECK(dst.Is64Bits());
1169 if (SmiValuesAre32Bits()) {
1170 if (src.IsImmediateOffset() && src.shift_amount() == 0) {
1171 // Load value directly from the upper half-word.
1172 // Assumes that Smis are shifted by 32 bits and little endianness.
1173 DCHECK_EQ(kSmiShift, 32);
1174 Ldrsw(dst,
1175 MemOperand(src.base(), src.offset() + (kSmiShift / kBitsPerByte),
1176 src.addrmode()));
1177
1178 } else {
1179 Ldr(dst, src);
1180 SmiUntag(dst);
1181 }
1182 } else {
1185 Ldr(dst.W(), src);
1186 } else {
1187 Ldr(dst, src);
1188 }
1189 SmiUntag(dst);
1190 }
1191}
1192
1194
1195void MacroAssembler::SmiToInt32(Register smi) { SmiToInt32(smi, smi); }
1196
1197void MacroAssembler::SmiToInt32(Register dst, Register smi) {
1198 DCHECK(dst.Is64Bits());
1199 if (v8_flags.enable_slow_asserts) {
1200 AssertSmi(smi);
1201 }
1204 Asr(dst.W(), smi.W(), kSmiShift);
1205 } else {
1206 Lsr(dst, smi, kSmiShift);
1207 }
1208}
1209
1211 Label* not_smi_label) {
1212 static_assert((kSmiTagSize == 1) && (kSmiTag == 0));
1213 // Check if the tag bit is set.
1214 if (smi_label) {
1215 Tbz(value, 0, smi_label);
1216 if (not_smi_label) {
1217 B(not_smi_label);
1218 }
1219 } else {
1220 DCHECK(not_smi_label);
1221 Tbnz(value, 0, not_smi_label);
1222 }
1223}
1224
1226 CompareAndBranch(x, y, eq, dest);
1227}
1228
1230 CompareAndBranch(x, y, lt, dest);
1231}
1232
1234 Label* dest) {
1235 CompareAndBranch(x, y, lo, dest);
1236}
1237
1238void MacroAssembler::JumpIfNotSmi(Register value, Label* not_smi_label) {
1239 JumpIfSmi(value, nullptr, not_smi_label);
1240}
1241
1243 UseScratchRegisterScope temps(this);
1244 Register scratch = temps.AcquireX();
1245 AssertFeedbackVector(object, scratch);
1246}
1247
1249
1250template <MacroAssembler::StoreLRMode lr_mode>
1251void MacroAssembler::Push(const CPURegister& src0, const CPURegister& src1,
1252 const CPURegister& src2, const CPURegister& src3) {
1253 DCHECK(AreSameSizeAndType(src0, src1, src2, src3));
1254 DCHECK_IMPLIES((lr_mode == kSignLR), ((src0 == lr) || (src1 == lr) ||
1255 (src2 == lr) || (src3 == lr)));
1256 DCHECK_IMPLIES((lr_mode == kDontStoreLR), ((src0 != lr) && (src1 != lr) &&
1257 (src2 != lr) && (src3 != lr)));
1258
1259#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1260 if (lr_mode == kSignLR) {
1261 Pacibsp();
1262 }
1263#endif
1264
1265 int count = 1 + src1.is_valid() + src2.is_valid() + src3.is_valid();
1266 int size = src0.SizeInBytes();
1267 DCHECK_EQ(0, (size * count) % 16);
1268
1269 PushHelper(count, size, src0, src1, src2, src3);
1270}
1271
1272template <MacroAssembler::StoreLRMode lr_mode>
1273void MacroAssembler::Push(const Register& src0, const VRegister& src1) {
1274 DCHECK_IMPLIES((lr_mode == kSignLR), ((src0 == lr) || (src1 == lr)));
1275 DCHECK_IMPLIES((lr_mode == kDontStoreLR), ((src0 != lr) && (src1 != lr)));
1276#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1277 if (lr_mode == kSignLR) {
1278 Pacibsp();
1279 }
1280#endif
1281
1282 int size = src0.SizeInBytes() + src1.SizeInBytes();
1283 DCHECK_EQ(0, size % 16);
1284
1285 // Reserve room for src0 and push src1.
1286 str(src1, MemOperand(sp, -size, PreIndex));
1287 // Fill the gap with src0.
1288 str(src0, MemOperand(sp, src1.SizeInBytes()));
1289}
1290
1291template <MacroAssembler::LoadLRMode lr_mode>
1292void MacroAssembler::Pop(const CPURegister& dst0, const CPURegister& dst1,
1293 const CPURegister& dst2, const CPURegister& dst3) {
1294 // It is not valid to pop into the same register more than once in one
1295 // instruction, not even into the zero register.
1296 DCHECK(!AreAliased(dst0, dst1, dst2, dst3));
1297 DCHECK(AreSameSizeAndType(dst0, dst1, dst2, dst3));
1298 DCHECK(dst0.is_valid());
1299
1300 int count = 1 + dst1.is_valid() + dst2.is_valid() + dst3.is_valid();
1301 int size = dst0.SizeInBytes();
1302 DCHECK_EQ(0, (size * count) % 16);
1303
1304 PopHelper(count, size, dst0, dst1, dst2, dst3);
1305
1306 DCHECK_IMPLIES((lr_mode == kAuthLR), ((dst0 == lr) || (dst1 == lr) ||
1307 (dst2 == lr) || (dst3 == lr)));
1308 DCHECK_IMPLIES((lr_mode == kDontLoadLR), ((dst0 != lr) && (dst1 != lr)) &&
1309 (dst2 != lr) && (dst3 != lr));
1310
1311#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1312 if (lr_mode == kAuthLR) {
1313 Autibsp();
1314 }
1315#endif
1316}
1317
1318template <MacroAssembler::StoreLRMode lr_mode>
1320 DCHECK_IMPLIES((lr_mode == kSignLR), (src == lr));
1321 DCHECK_IMPLIES((lr_mode == kDontStoreLR), (src != lr));
1322#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1323 if (lr_mode == kSignLR) {
1324 Pacibsp();
1325 }
1326#endif
1327
1328 if (offset.IsImmediate()) {
1329 DCHECK_GE(offset.ImmediateValue(), 0);
1330 } else if (v8_flags.debug_code) {
1331 Cmp(xzr, offset);
1332 Check(le, AbortReason::kStackAccessBelowStackPointer);
1333 }
1334
1335 Str(src, MemOperand(sp, offset));
1336}
1337
1338template <MacroAssembler::LoadLRMode lr_mode>
1340 if (offset.IsImmediate()) {
1341 DCHECK_GE(offset.ImmediateValue(), 0);
1342 } else if (v8_flags.debug_code) {
1343 Cmp(xzr, offset);
1344 Check(le, AbortReason::kStackAccessBelowStackPointer);
1345 }
1346
1347 Ldr(dst, MemOperand(sp, offset));
1348
1349 DCHECK_IMPLIES((lr_mode == kAuthLR), (dst == lr));
1350 DCHECK_IMPLIES((lr_mode == kDontLoadLR), (dst != lr));
1351#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1352 if (lr_mode == kAuthLR) {
1353 Autibsp();
1354 }
1355#endif
1356}
1357
1358void MacroAssembler::Claim(int64_t count, uint64_t unit_size) {
1359 DCHECK_GE(count, 0);
1360 uint64_t size = count * unit_size;
1361
1362 if (size == 0) {
1363 return;
1364 }
1365 DCHECK_EQ(size % 16, 0);
1366#ifdef V8_TARGET_OS_WIN
1367 while (size > kStackPageSize) {
1368 Sub(sp, sp, kStackPageSize);
1369 Str(xzr, MemOperand(sp));
1370 size -= kStackPageSize;
1371 }
1372#endif
1373 Sub(sp, sp, size);
1374}
1375
1376void MacroAssembler::Claim(const Register& count, uint64_t unit_size,
1377 bool assume_sp_aligned) {
1378 if (unit_size == 0) return;
1379 DCHECK(base::bits::IsPowerOfTwo(unit_size));
1380
1381 const int shift = base::bits::CountTrailingZeros(unit_size);
1382 const Operand size(count, LSL, shift);
1383
1384 if (size.IsZero()) {
1385 return;
1386 }
1388
1389#ifdef V8_TARGET_OS_WIN
1390 // "Functions that allocate 4k or more worth of stack must ensure that each
1391 // page prior to the final page is touched in order." Source:
1392 // https://docs.microsoft.com/en-us/cpp/build/arm64-windows-abi-conventions?view=vs-2019#stack
1393
1394 // Callers expect count register to not be clobbered, so copy it.
1395 UseScratchRegisterScope temps(this);
1396 Register bytes_scratch = temps.AcquireX();
1397 Mov(bytes_scratch, size);
1398
1400 Label touch_next_page;
1401 B(&check_offset);
1402 Bind(&touch_next_page);
1403 Sub(sp, sp, kStackPageSize);
1404 // Just to touch the page, before we increment further.
1405 if (assume_sp_aligned) {
1406 Str(xzr, MemOperand(sp));
1407 } else {
1408 Register sp_copy = temps.AcquireX();
1409 Mov(sp_copy, sp);
1410 Str(xzr, MemOperand(sp_copy));
1411 }
1412 Sub(bytes_scratch, bytes_scratch, kStackPageSize);
1413
1415 Cmp(bytes_scratch, kStackPageSize);
1416 B(gt, &touch_next_page);
1417
1418 Sub(sp, sp, bytes_scratch);
1419#else
1420 Sub(sp, sp, size);
1421#endif
1422}
1423
1424void MacroAssembler::Drop(int64_t count, uint64_t unit_size) {
1425 DCHECK_GE(count, 0);
1426 uint64_t size = count * unit_size;
1427
1428 if (size == 0) {
1429 return;
1430 }
1431
1432 Add(sp, sp, size);
1433 DCHECK_EQ(size % 16, 0);
1434}
1435
1436void MacroAssembler::Drop(const Register& count, uint64_t unit_size) {
1437 if (unit_size == 0) return;
1438 DCHECK(base::bits::IsPowerOfTwo(unit_size));
1439
1440 const int shift = base::bits::CountTrailingZeros(unit_size);
1441 const Operand size(count, LSL, shift);
1442
1443 if (size.IsZero()) {
1444 return;
1445 }
1446
1448 Add(sp, sp, size);
1449}
1450
1451void MacroAssembler::DropArguments(const Register& count, int extra_slots) {
1452 UseScratchRegisterScope temps(this);
1453 Register tmp = temps.AcquireX();
1454 Add(tmp, count, extra_slots + 1); // +1 is for rounding the count up to 2.
1455 Bic(tmp, tmp, 1);
1456 Drop(tmp, kXRegSize);
1457}
1458
1462
1465}
1466
1468
1470 Condition cond, Label* label) {
1471 if (rhs.IsImmediate() && (rhs.ImmediateValue() == 0)) {
1472 switch (cond) {
1473 case eq:
1474 case ls:
1475 Cbz(lhs, label);
1476 return;
1477 case lt:
1478 Tbnz(lhs, lhs.SizeInBits() - 1, label);
1479 return;
1480 case ge:
1481 Tbz(lhs, lhs.SizeInBits() - 1, label);
1482 return;
1483 case ne:
1484 case hi:
1485 Cbnz(lhs, label);
1486 return;
1487 default:
1488 break;
1489 }
1490 }
1491 Cmp(lhs, rhs);
1492 B(cond, label);
1493}
1494
1496 const Operand& rhs, Condition cond,
1497 Label* label) {
1499 CompareAndBranch(lhs.W(), rhs.ToW(), cond, label);
1500 } else {
1501 CompareAndBranch(lhs, rhs, cond, label);
1502 }
1503}
1504
1506 const uint64_t bit_pattern,
1507 Label* label) {
1508 int bits = reg.SizeInBits();
1509 DCHECK_GT(CountSetBits(bit_pattern, bits), 0);
1510 if (CountSetBits(bit_pattern, bits) == 1) {
1511 Tbnz(reg, MaskToBit(bit_pattern), label);
1512 } else {
1513 Tst(reg, bit_pattern);
1514 B(ne, label);
1515 }
1516}
1517
1519 const uint64_t bit_pattern,
1520 Label* label) {
1521 int bits = reg.SizeInBits();
1522 DCHECK_GT(CountSetBits(bit_pattern, bits), 0);
1523 if (CountSetBits(bit_pattern, bits) == 1) {
1524 Tbz(reg, MaskToBit(bit_pattern), label);
1525 } else {
1526 Tst(reg, bit_pattern);
1527 B(eq, label);
1528 }
1529}
1530
1531} // namespace internal
1532} // namespace v8
1533
1534#endif // V8_CODEGEN_ARM64_MACRO_ASSEMBLER_ARM64_INL_H_
static bool IsImmFP32(uint32_t bits)
void uxtb(Register dst, Register src, int rotate=0, Condition cond=al)
void rev32(const Register &rd, const Register &rn)
void fcvtnu(const Register &rd, const VRegister &vn)
void cset(const Register &rd, Condition cond)
void umaddl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void fcvtzs(const Register &rd, const VRegister &vn, int fbits=0)
void csetm(const Register &rd, Condition cond)
void hint(SystemHint code)
void bfi(Register dst, Register src, int lsb, int width, Condition cond=al)
void fcvtzu(const Register &rd, const VRegister &vn, int fbits=0)
void fnmsub(const VRegister &vd, const VRegister &vn, const VRegister &vm, const VRegister &va)
void csinc(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void ucvtf(const VRegister &fd, const Register &rn, int fbits=0)
void smull(Register dstL, Register dstH, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void br(const Register &xn)
void b(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void fmsub(const VRegister &vd, const VRegister &vn, const VRegister &vm, const VRegister &va)
void sbfiz(const Register &rd, const Register &rn, int lsb, int width)
void sxtw(const Register &rd, const Register &rn)
void mneg(const Register &rd, const Register &rn, const Register &rm)
void sxtb(Register dst, Register src, int rotate=0, Condition cond=al)
void bti(BranchTargetIdentifier id)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void asrv(const Register &rd, const Register &rn, const Register &rm)
void scvtf(const VRegister &fd, const Register &rn, int fbits=0)
void fmaxnm(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void madd(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void sbfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void bl(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void msr(SRegisterFieldMask fields, const Operand &src, Condition cond=al)
Simd128Register Simd128Register ra
void lslv(const Register &rd, const Register &rn, const Register &rm)
void fcvtms(const Register &rd, const VRegister &vn)
void sxth(Register dst, Register src, int rotate=0, Condition cond=al)
static constexpr bool IsImmAddSub(int64_t immediate)
void lsr(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void fcvtmu(const Register &rd, const VRegister &vn)
void msub(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void cneg(const Register &rd, const Register &rn, Condition cond)
void fcmp(const VRegister &vn, const VRegister &vm)
void cls(const Register &rd, const Register &rn)
void umulh(const Register &rd, const Register &rn, const Register &rm)
void str(Register src, const MemOperand &dst, Condition cond=al)
void rorv(const Register &rd, const Register &rn, const Register &rm)
void fmov(const VRegister &fd, double imm)
void smaddl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void rbit(Register dst, Register src, Condition cond=al)
void shift(Operand dst, Immediate shift_amount, int subcode, int size)
void clz(Register dst, Register src, Condition cond=al)
void extr(const Register &rd, const Register &rn, const Register &rm, int lsb)
void fcvt(const VRegister &vd, const VRegister &vn)
void dmb(BarrierOption option)
void fcvtns(const Register &rd, const VRegister &vn)
void udiv(Register dst, Register src1, Register src2, Condition cond=al)
void smsubl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void fadd(const VRegister &vd, const VRegister &vn, const VRegister &vm)
static bool IsImmFP64(uint64_t bits)
void fminnm(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void fccmp(const VRegister &vn, const VRegister &vm, StatusFlags nzcv, Condition cond)
void fmadd(const VRegister &vd, const VRegister &vn, const VRegister &vm, const VRegister &va)
void lsl(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void cinc(const Register &rd, const Register &rn, Condition cond)
void ubfiz(const Register &rd, const Register &rn, int lsb, int width)
void smulh(const Register &rd, const Register &rn, const Register &rm)
void fsub(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void bfxil(const Register &rd, const Register &rn, int lsb, int width)
void uxth(Register dst, Register src, int rotate=0, Condition cond=al)
void csel(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void ubfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void fmax(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void dsb(BarrierOption option)
void mrs(Register dst, SRegister s, Condition cond=al)
void rev(Register dst, Register src, Condition cond=al)
void fdiv(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void rev16(const Register &rd, const Register &rn)
void DataProcImmediate(const Register &rd, const Register &rn, int immediate, Instr op)
void sdiv(Register dst, Register src1, Register src2, Condition cond=al)
void movk(const Register &rd, uint64_t imm, int shift=-1)
void cinv(const Register &rd, const Register &rn, Condition cond)
void CmpPlainRegister(const Register &rn, const Register &rm)
void fnmadd(const VRegister &vd, const VRegister &vn, const VRegister &vm, const VRegister &va)
void DataProcPlainRegister(const Register &rd, const Register &rn, const Register &rm, Instr op)
void ldr(Register dst, const MemOperand &src, Condition cond=al)
void fmul(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void uxtw(const Register &rd, const Register &rn)
void fmin(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void fcvtas(const Register &rd, const VRegister &vn)
void umsubl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void fcsel(const VRegister &vd, const VRegister &vn, const VRegister &vm, Condition cond)
void fcvtau(const Register &rd, const VRegister &vn)
const Register & AppropriateZeroRegFor(const CPURegister &reg) const
void csinv(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void csneg(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void lsrv(const Register &rd, const Register &rn, const Register &rm)
void CheckVeneerPool(bool force_emit, bool require_jump, size_t margin=kVeneerDistanceMargin)
static constexpr int kFixedSlotCountAboveFp
void Mul(const Register &rd, const Register &rn, const Register &rm)
void CmovX(const Register &rd, const Register &rn, Condition cond)
void Asr(const Register &rd, const Register &rn, unsigned shift)
void Msub(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void Fcvt(const VRegister &fd, const VRegister &fn)
void Sxtb(const Register &rd, const Register &rn)
void AddSubWithCarryMacro(const Register &rd, const Register &rn, const Operand &operand, FlagsUpdate S, AddSubWithCarryOp op)
void Umulh(const Register &rd, const Register &rn, const Register &rm)
void Cmp(const Register &rn, int imm)
void AddSubMacro(const Register &rd, const Register &rn, const Operand &operand, FlagsUpdate S, AddSubOp op)
void Madd(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void CcmpTagged(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond)
void Drop(int count, Condition cond=al)
void Dsb(BarrierDomain domain, BarrierType type)
void Udiv(const Register &rd, const Register &rn, const Register &rm)
void Orr(const Register &rd, const Register &rn, const Operand &operand)
void Neg(const Register &rd, const Operand &operand)
void Adcs(const Register &rd, const Register &rn, const Operand &operand)
void Add(const Register &rd, const Register &rn, const Operand &operand)
void Bics(const Register &rd, const Register &rn, const Operand &operand)
void Mneg(const Register &rd, const Register &rn, const Register &rm)
void Umaddl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void Adds(const Register &rd, const Register &rn, const Operand &operand)
void SmiUntag(Register reg, SBit s=LeaveCC)
void Fcvtau(const Register &rd, const VRegister &fn)
void Fmin(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Bind(Label *label, BranchTargetIdentifier id=BranchTargetIdentifier::kNone)
void Csneg(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void Umsubl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void Bic(const VRegister &vd, const int imm8, const int left_shift=0)
void Fmax(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Fnmadd(const VRegister &fd, const VRegister &fn, const VRegister &fm, const VRegister &fa)
void Ands(const Register &rd, const Register &rn, const Operand &operand)
void AssertPositiveOrZero(Register value) NOOP_UNLESS_DEBUG_CODE
void Uxth(const Register &rd, const Register &rn)
void Fcvtzu(const Register &rd, const VRegister &fn)
void CompareAndBranch(const Register &lhs, const Operand &rhs, Condition cond, Label *label)
void Fmov(VRegister fd, VRegister fn)
void Ngcs(const Register &rd, const Operand &operand)
void Fcvtms(const Register &rd, const VRegister &fn)
void Msr(SystemRegister sysreg, const Register &rt)
void Lsr(const Register &rd, const Register &rn, unsigned shift)
void Tst(const Register &rn, const Operand &operand)
void Smsubl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void Fminnm(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Bfxil(const Register &rd, const Register &rn, unsigned lsb, unsigned width)
void Sxth(const Register &rd, const Register &rn)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void PopHelper(int count, int size, const CPURegister &dst0, const CPURegister &dst1, const CPURegister &dst2, const CPURegister &dst3)
void Poke(const CPURegister &src, const Operand &offset)
void Fabs(const VRegister &fd, const VRegister &fn)
void Smull(const Register &rd, const Register &rn, const Register &rm)
void Cmn(const Register &rn, const Operand &operand)
void Sbc(const Register &rd, const Register &rn, const Operand &operand)
void Ror(const Register &rd, const Register &rs, unsigned shift)
void TestAndBranchIfAllClear(const Register &reg, const uint64_t bit_pattern, Label *label)
void Smaddl(const Register &rd, const Register &rn, const Register &rm, const Register &ra)
void B(Label *label, BranchType type, Register reg=NoReg, int bit=-1)
void Clz(const Register &rd, const Register &rn)
void Peek(const CPURegister &dst, const Operand &offset)
void Fmaxnm(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Uxtw(const Register &rd, const Register &rn)
void Tbz(const Register &rt, unsigned bit_pos, Label *label)
void JumpIfUnsignedLessThan(Register x, int32_t y, Label *dest)
void Scvtf(const VRegister &fd, const Register &rn, unsigned fbits=0)
void Dmb(BarrierDomain domain, BarrierType type)
void Ccmn(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond)
void Mrs(const Register &rt, SystemRegister sysreg)
void Cinc(const Register &rd, const Register &rn, Condition cond)
void Debug(const char *message, uint32_t code, Instr params=BREAK)
void CompareTaggedAndBranch(const Register &lhs, const Operand &rhs, Condition cond, Label *label)
void Lsl(const Register &rd, const Register &rn, unsigned shift)
void SmiTag(Register reg, SBit s=LeaveCC)
void Fcvtns(const Register &rd, const VRegister &fn)
void Umull(const Register &rd, const Register &rn, const Register &rm)
void Eor(const Register &rd, const Register &rn, const Operand &operand)
void Sbcs(const Register &rd, const Register &rn, const Operand &operand)
void Fdiv(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Fmadd(const VRegister &fd, const VRegister &fn, const VRegister &fm, const VRegister &fa)
void Ldr(const CPURegister &rt, const Operand &imm)
void Ubfiz(const Register &rd, const Register &rn, unsigned lsb, unsigned width)
void Uxtb(const Register &rd, const Register &rn)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void Mov(const Register &rd, const Operand &operand, DiscardMoveMode discard_mode=kDontDiscardForSameWReg)
void JumpIfEqual(Register x, int32_t y, Label *dest)
void Sbfiz(const Register &rd, const Register &rn, unsigned lsb, unsigned width)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
void Csinc(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void Sdiv(const Register &rd, const Register &rn, const Register &rm)
void Negs(const Register &rd, const Operand &operand)
void BindExceptionHandler(Label *label)
void Fadd(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Fcmp(const VRegister &fn, const VRegister &fm)
void Fccmp(const VRegister &fn, const VRegister &fm, StatusFlags nzcv, Condition cond)
void Ucvtf(const VRegister &fd, const Register &rn, unsigned fbits=0)
void Rbit(const Register &rd, const Register &rn)
void Orn(const Register &rd, const Register &rn, const Operand &operand)
void LogicalMacro(const Register &rd, const Register &rn, const Operand &operand, LogicalOp op)
void Cls(const Register &rd, const Register &rn)
void Sbfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void Tbnz(const Register &rt, unsigned bit_pos, Label *label)
void Subs(const Register &rd, const Register &rn, const Operand &operand)
void Rev(const Register &rd, const Register &rn)
void Movi(const VRegister &vd, uint64_t imm, Shift shift=LSL, int shift_amount=0)
void Cset(const Register &rd, Condition cond)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void Smulh(const Register &rd, const Register &rn, const Register &rm)
void Fnmsub(const VRegister &fd, const VRegister &fn, const VRegister &fm, const VRegister &fa)
void Claim(int64_t count, uint64_t unit_size=kXRegSize)
void Cneg(const Register &rd, const Register &rn, Condition cond)
void Fsub(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Fcvtnu(const Register &rd, const VRegister &fn)
void Fcvtas(const Register &rd, const VRegister &fn)
void Bfi(const Register &rd, const Register &rn, unsigned lsb, unsigned width)
void Ubfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void Cinv(const Register &rd, const Register &rn, Condition cond)
void Fcvtzs(const Register &rd, const VRegister &fn)
void CmpTagged(const Register &r1, const Register &r2)
void Check(Condition cond, AbortReason reason)
void TestAndBranchIfAnySet(const Register &reg, const uint64_t bit_pattern, Label *label)
void Fcvtmu(const Register &rd, const VRegister &fn)
void Mvn(const Register &rd, uint64_t imm)
void JumpIfLessThan(Register x, int32_t y, Label *dest)
void Ccmp(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond)
void Adc(const Register &rd, const Register &rn, const Operand &operand)
void Sub(const Register &rd, const Register &rn, const Operand &operand)
void CzeroX(const Register &rd, Condition cond)
void Rev16(const Register &rd, const Register &rn)
void Fcsel(const VRegister &fd, const VRegister &fn, const VRegister &fm, Condition cond)
void ConditionalCompareMacro(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond, ConditionalCompareOp op)
void Eon(const Register &rd, const Register &rn, const Operand &operand)
void Cbnz(const Register &rt, Label *label)
void Cbz(const Register &rt, Label *label)
void Csetm(const Register &rd, Condition cond)
void Csinv(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void Sxtw(const Register &rd, const Register &rn)
void Rev32(const Register &rd, const Register &rn)
void Movi64bitHelper(const VRegister &vd, uint64_t imm)
void PushHelper(int count, int size, const CPURegister &src0, const CPURegister &src1, const CPURegister &src2, const CPURegister &src3)
void Extr(const Register &rd, const Register &rn, const Register &rm, unsigned lsb)
void Movk(const Register &rd, uint64_t imm, int shift=-1)
void Fmul(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void Fmsub(const VRegister &fd, const VRegister &fn, const VRegister &fm, const VRegister &fa)
void Ngc(const Register &rd, const Operand &operand)
unsigned shift_amount() const
constexpr bool is_valid() const
Register AcquireSameSizeAs(const Register &reg)
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
Label label
int32_t offset
LiftoffRegister reg
EmitFn fn
int x
#define DEFINE_LOAD_FUNCTION(FN, OP)
#define DEFINE_SWP_FUNCTION(FN, OP)
#define DEFINE_FUNCTION(FN, REGTYPE, REG, OP)
#define ATOMIC_MEMORY_SIMPLE_MACRO_LIST(V, DEF, MASM_PRE, ASM_PRE)
#define STLX_MACRO_LIST(V)
#define CAS_SINGLE_MACRO_LIST(V)
#define LDA_STL_MACRO_LIST(V)
#define LSPAIR_MACRO_LIST(V)
#define ATOMIC_MEMORY_STORE_MACRO_MODES(V, MASM, ASM)
#define ATOMIC_MEMORY_LOAD_MACRO_MODES(V, MASM, ASM)
#define LS_MACRO_LIST(V)
#define CAS_PAIR_MACRO_LIST(V)
constexpr unsigned CountTrailingZeros(T value)
Definition bits.h:144
constexpr bool IsPowerOfTwo(T value)
Definition bits.h:187
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
constexpr Register kRootRegister
constexpr Opcode ADD
constexpr AddrMode PreIndex
constexpr Opcode ORR
V8_EXPORT_PRIVATE int CountSetBits(uint64_t value, int width)
constexpr Opcode AND
constexpr LogicalOp EON
constexpr Opcode BIC
constexpr int kBitsPerByte
Definition globals.h:682
MemOperand ExitFrameCallerStackSlotOperand(int index)
const int kSmiTagSize
Definition v8-internal.h:87
int MaskToBit(uint64_t mask)
constexpr ShiftOp LSL
constexpr int B
constexpr ConditionalCompareOp CCMP
constexpr int L
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr Opcode SBC
constexpr LogicalOp ORN
constexpr bool SmiValuesAre31Bits()
constexpr LogicalOp ANDS
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
const int kHeapObjectTag
Definition v8-internal.h:72
const int kSmiValueSize
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr Opcode SUB
constexpr LogicalOp BICS
constexpr Register kPtrComprCageBaseRegister
const int kSmiTag
Definition v8-internal.h:86
constexpr Opcode ADC
constexpr Opcode EOR
V8_EXPORT_PRIVATE bool AreSameSizeAndType(const CPURegister &reg1, const CPURegister &reg2=NoCPUReg, const CPURegister &reg3=NoCPUReg, const CPURegister &reg4=NoCPUReg, const CPURegister &reg5=NoCPUReg, const CPURegister &reg6=NoCPUReg, const CPURegister &reg7=NoCPUReg, const CPURegister &reg8=NoCPUReg)
constexpr int kXRegSize
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr Register padreg
constexpr ConditionalCompareOp CCMN
#define ror(value, bits)
Definition sha-256.cc:30
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387