5#if V8_TARGET_ARCH_ARM64
36#define __ ACCESS_MASM(masm)
44 return CPURegList(fp_scratch1, fp_scratch2);
51#if V8_ENABLE_WEBASSEMBLY
64 int size =
registers.RegisterSizeInBytes();
70 const CPURegister& src0 =
registers.PopHighestIndex();
71 const CPURegister& src1 =
registers.PopHighestIndex();
72 const CPURegister& src2 =
registers.PopHighestIndex();
73 const CPURegister& src3 =
registers.PopHighestIndex();
74 int count = count_before -
registers.Count();
75 PushHelper(count, size, src0, src1, src2, src3);
80 int size =
registers.RegisterSizeInBytes();
90 const CPURegister& dst0 =
registers.PopLowestIndex();
91 const CPURegister& dst1 =
registers.PopLowestIndex();
92 const CPURegister& dst2 =
registers.PopLowestIndex();
93 const CPURegister& dst3 =
registers.PopLowestIndex();
94 int count = count_before -
registers.Count();
95 PopHelper(count, size, dst0, dst1, dst2, dst3);
100 if (reglist.Count() % 2 != 0) {
101 DCHECK(!reglist.has(xzr));
106 int size =
registers.RegisterSizeInBytes();
114 const CPURegister& src0 =
registers.PopLowestIndex();
115 const CPURegister& src1 =
registers.PopLowestIndex();
121 if (reglist.Count() % 2 != 0) {
122 DCHECK(!reglist.has(xzr));
127 int size =
registers.RegisterSizeInBytes();
135 const CPURegister& dst0 =
registers.PopHighestIndex();
136 const CPURegister& dst1 =
registers.PopHighestIndex();
142 Register exclusion)
const {
144 list.Remove(exclusion);
147 int bytes = list.TotalSizeInBytes();
152 bytes += fp_list.TotalSizeInBytes();
158 Register exclusion) {
161 list.Remove(exclusion);
166 int bytes = list.TotalSizeInBytes();
172 bytes += fp_list.TotalSizeInBytes();
184 bytes += fp_list.TotalSizeInBytes();
188 list.Remove(exclusion);
192 bytes += list.TotalSizeInBytes();
202 if (operand.NeedsRelocation(
this)) {
204 Ldr(temp, operand.immediate());
207 }
else if (operand.IsImmediate()) {
208 int64_t immediate = operand.ImmediateValue();
209 unsigned reg_size = rd.SizeInBits();
214 immediate = ~immediate;
222 DCHECK(rd.Is64Bits() || is_uint32(immediate));
225 if (immediate == 0) {
240 }
else if ((rd.Is64Bits() && (immediate == -1L)) ||
241 (rd.Is32Bits() && (immediate == 0xFFFFFFFFL))) {
260 unsigned n, imm_s, imm_r;
261 if (
IsImmLogical(immediate, reg_size, &n, &imm_s, &imm_r)) {
266 Register temp = temps.AcquireSameSizeAs(rn);
276 Logical(temp, rn, imm_operand, op);
279 Logical(rd, rn, imm_operand, op);
283 }
else if (operand.IsExtendedRegister()) {
284 DCHECK(operand.reg().SizeInBits() <= rd.SizeInBits());
288 DCHECK(operand.reg().Is64Bits() ||
289 ((operand.extend() !=
UXTX) && (operand.extend() !=
SXTX)));
290 Register temp = temps.AcquireSameSizeAs(rn);
292 operand.shift_amount());
297 DCHECK(operand.IsShiftedRegister());
303 DCHECK(allow_macro_instructions());
304 DCHECK(is_uint32(imm) || is_int32(imm) || rd.Is64Bits());
327 unsigned reg_size = rd.SizeInBits();
334 uint64_t ignored_halfword = 0;
335 bool invert_move =
false;
339 ignored_halfword = 0xFFFFL;
346 Register temp = rd.IsSP() ? temps.AcquireSameSizeAs(rd) : rd;
351 bool first_mov_done =
false;
352 for (
int i = 0;
i < (rd.SizeInBits() / 16);
i++) {
353 uint64_t imm16 = (imm >> (16 *
i)) & 0xFFFFL;
354 if (imm16 != ignored_halfword) {
355 if (!first_mov_done) {
357 movn(temp, (~imm16) & 0xFFFFL, 16 *
i);
359 movz(temp, imm16, 16 *
i);
361 first_mov_done =
true;
364 movk(temp, imm16, 16 *
i);
380 if (reference.IsIsolateFieldId()) {
387 CHECK(!reference.IsIsolateFieldId());
388 Mov(rd, Operand(reference));
397 DCHECK(allow_macro_instructions());
403 Register dst = (rd.IsSP()) ? temps.AcquireSameSizeAs(rd) : rd;
405 if (operand.NeedsRelocation(
this)) {
416 Handle<HeapObject>
x(
417 reinterpret_cast<Address*
>(operand.ImmediateValue()));
425 }
else if (operand.IsImmediate()) {
427 Mov(dst, operand.ImmediateValue());
428 }
else if (operand.IsShiftedRegister() && (operand.shift_amount() != 0)) {
432 EmitShift(dst, operand.reg(), operand.shift(), operand.shift_amount());
433 }
else if (operand.IsExtendedRegister()) {
437 operand.shift_amount());
448 if (rd != operand.reg() ||
464 return Mov(rd, Operand(smi));
469 int byte1 = (imm & 0xFF);
470 int byte2 = ((imm >> 8) & 0xFF);
471 if (byte1 == byte2) {
472 movi(vd.Is64Bits() ? vd.V8B() : vd.V16B(), byte1);
473 }
else if (byte1 == 0) {
475 }
else if (byte2 == 0) {
477 }
else if (byte1 == 0xFF) {
478 mvni(vd, ~byte2 & 0xFF,
LSL, 8);
479 }
else if (byte2 == 0xFF) {
480 mvni(vd, ~byte1 & 0xFF);
492 uint8_t bytes[
sizeof(imm)];
493 memcpy(bytes, &imm,
sizeof(imm));
497 bool all0orff =
true;
498 for (
int i = 0;
i < 4; ++
i) {
499 if ((bytes[
i] != 0) && (bytes[
i] != 0xFF)) {
505 if (all0orff ==
true) {
506 movi(vd.Is64Bits() ? vd.V1D() : vd.V2D(), ((imm << 32) | imm));
512 for (
int i = 0;
i < 4;
i++) {
513 if ((imm & (0xFF << (
i * 8))) == imm) {
520 for (
int i = 0;
i < 4;
i++) {
521 uint32_t
mask = ~(0xFF << (
i * 8));
529 if ((imm & 0xFF00FFFF) == 0x0000FFFF) {
535 if ((imm & 0xFFFF00FF) == 0x000000FF) {
541 if ((imm & 0xFF00FFFF) == 0xFF000000) {
542 mvni(vd, ~bytes[2] & 0xFF,
MSL, 16);
546 if ((imm & 0xFFFF00FF) == 0xFFFF0000) {
547 mvni(vd, ~bytes[1] & 0xFF,
MSL, 8);
552 if (((imm >> 16) & 0xFFFF) == (imm & 0xFFFF)) {
569 bool all0orff =
true;
570 for (
int i = 0;
i < 8; ++
i) {
571 int byteval = (imm >> (
i * 8)) & 0xFF;
572 if (byteval != 0 && byteval != 0xFF) {
577 if (all0orff ==
true) {
584 if (((imm >> 32) & 0xFFFFFFFF) == (imm & 0xFFFFFFFF)) {
585 Movi32bitHelper(vd.Is64Bits() ? vd.V2S() : vd.V4S(), imm & 0xFFFFFFFF);
604 DCHECK(allow_macro_instructions());
605 if (shift_amount != 0 ||
shift !=
LSL) {
607 }
else if (vd.Is8B() || vd.Is16B()) {
611 }
else if (vd.Is4H() || vd.Is8H()) {
614 }
else if (vd.Is2S() || vd.Is4S()) {
637 Ins(vd.V2D(), 1, temp);
642 DCHECK(allow_macro_instructions());
644 if (operand.NeedsRelocation(
this)) {
645 Ldr(rd, operand.immediate());
648 }
else if (operand.IsImmediate()) {
650 Mov(rd, ~operand.ImmediateValue());
652 }
else if (operand.IsExtendedRegister()) {
656 operand.shift_amount());
667#define HALFWORD(idx) (((imm >> ((idx)*16)) & 0xFFFF) ? 1u : 0u)
668 switch (reg_size / 16) {
672 return HALFWORD(0) + HALFWORD(1);
674 return HALFWORD(0) + HALFWORD(1) + HALFWORD(2) + HALFWORD(3);
694 const Operand& operand,
698 if (operand.NeedsRelocation(
this)) {
701 Ldr(temp, operand.immediate());
704 }
else if ((operand.IsShiftedRegister() && (operand.shift_amount() == 0)) ||
705 (operand.IsImmediate() &&
715 Register temp = temps.AcquireSameSizeAs(rn);
722 const Operand& operand,
Condition cond) {
723 DCHECK(allow_macro_instructions());
726 if (operand.IsImmediate()) {
729 int64_t imm = operand.ImmediateValue();
732 csel(rd, rn, zr, cond);
733 }
else if (imm == 1) {
734 csinc(rd, rn, zr, cond);
735 }
else if (imm == -1) {
736 csinv(rd, rn, zr, cond);
739 Register temp = temps.AcquireSameSizeAs(rn);
741 csel(rd, rn, temp, cond);
743 }
else if (operand.IsShiftedRegister() && (operand.shift_amount() == 0)) {
745 csel(rd, rn, operand.reg(), cond);
749 Register temp = temps.AcquireSameSizeAs(rn);
751 csel(rd, rn, temp, cond);
757 unsigned n, imm_s, imm_r;
758 int reg_size = dst.SizeInBits();
759 if (
IsImmMovz(imm, reg_size) && !dst.IsSP()) {
764 }
else if (
IsImmMovn(imm, reg_size) && !dst.IsSP()) {
769 }
else if (
IsImmLogical(imm, reg_size, &n, &imm_s, &imm_r)) {
780 int reg_size = dst.SizeInBits();
787 if (reg_size == 64) {
799 shift_low = std::min(shift_low, 4);
801 int64_t imm_low = imm >> shift_low;
809 int64_t imm_high = (imm << shift_high) | ((INT64_C(1) << shift_high) - 1);
814 return Operand(dst,
LSL, shift_low);
818 return Operand(dst,
LSR, shift_high);
830 if (operand.IsZero() && rd == rn && rd.Is64Bits() && rn.Is64Bits() &&
831 !operand.NeedsRelocation(
this) && (
S ==
LeaveFlags)) {
836 if (operand.NeedsRelocation(
this)) {
838 Register temp = temps.AcquireSameSizeAs(rn);
840 operand.ImmediateRMode()));
841 Ldr(temp, operand.immediate());
843 }
else if ((operand.IsImmediate() &&
845 (rn.IsZero() && !operand.IsShiftedRegister()) ||
846 (operand.IsShiftedRegister() && (operand.shift() ==
ROR))) {
848 Register temp = temps.AcquireSameSizeAs(rn);
849 if (operand.IsImmediate()) {
859 }
else if (rn == sp) {
863 Operand imm_operand =
865 AddSub(rd, rn, imm_operand,
S, op);
871 AddSub(rd, rn, operand,
S, op);
879 DCHECK(rd.SizeInBits() == rn.SizeInBits());
882 if (operand.NeedsRelocation(
this)) {
884 Ldr(temp, operand.immediate());
887 }
else if (operand.IsImmediate() ||
888 (operand.IsShiftedRegister() && (operand.shift() ==
ROR))) {
890 Register temp = temps.AcquireSameSizeAs(rn);
894 }
else if (operand.IsShiftedRegister() && (operand.shift_amount() != 0)) {
896 DCHECK(operand.reg().SizeInBits() == rd.SizeInBits());
901 Register temp = temps.AcquireSameSizeAs(rn);
902 EmitShift(temp, operand.reg(), operand.shift(), operand.shift_amount());
905 }
else if (operand.IsExtendedRegister()) {
907 DCHECK(operand.reg().SizeInBits() <= rd.SizeInBits());
911 DCHECK(operand.reg().Is64Bits() ||
912 ((operand.extend() !=
UXTX) && (operand.extend() !=
SXTX)));
913 Register temp = temps.AcquireSameSizeAs(rn);
915 operand.shift_amount());
929 if (addr.IsImmediateOffset()) {
930 int64_t
offset = addr.offset();
939 }
else if (addr.IsRegisterOffset() && (addr.extend() ==
UXTW) &&
940 (addr.shift_amount() == 0)) {
952 int64_t
offset = addr.offset();
954 if (addr.IsRegisterOffset() ||
955 (is_imm_unscaled && (addr.IsPostIndex() || addr.IsPreIndex()))) {
958 }
else if (addr.IsImmediateOffset()) {
963 Register temp = temps.AcquireSameSizeAs(addr.base());
966 }
else if (addr.IsPostIndex()) {
973 DCHECK(!is_imm_unscaled && addr.IsPreIndex());
980 const CPURegister& rt2,
983 if (addr.IsRegisterOffset()) {
986 Register temp = temps.AcquireSameSizeAs(base);
987 Add(temp, base, addr.regoffset());
992 int64_t
offset = addr.offset();
1002 if (addr.IsImmediateOffset()) {
1004 Register temp = temps.AcquireSameSizeAs(base);
1007 }
else if (addr.IsPostIndex()) {
1011 DCHECK(addr.IsPreIndex());
1019 DCHECK(allow_macro_instructions());
1028 if (
label->is_bound()) {
1035 adr(rd, min_adr_offset);
1036 Add(rd, rd, label_offset - min_adr_offset);
1040 Register scratch = temps.AcquireX();
1042 InstructionAccurateScope scope(
this,
1083 DCHECK(allow_macro_instructions());
1086 bool need_extra_instructions =
1100 DCHECK(allow_macro_instructions());
1102 bool need_extra_instructions =
1107 tbz(
rt, bit_pos, &done);
1116 DCHECK(allow_macro_instructions());
1118 bool need_extra_instructions =
1123 tbnz(
rt, bit_pos, &done);
1132 DCHECK(allow_macro_instructions());
1134 bool need_extra_instructions =
1148 DCHECK(allow_macro_instructions());
1150 bool need_extra_instructions =
1166 Label* is_not_representable, Label* is_representable) {
1167 DCHECK(allow_macro_instructions());
1176 if ((is_not_representable !=
nullptr) && (is_representable !=
nullptr)) {
1177 B(is_not_representable,
vs);
1178 B(is_representable);
1179 }
else if (is_not_representable !=
nullptr) {
1180 B(is_not_representable,
vs);
1181 }
else if (is_representable !=
nullptr) {
1182 B(is_representable,
vc);
1187 int case_value_base, Label** labels,
1190 Label fallthrough, jump_table;
1191 if (case_value_base != 0) {
1192 Sub(value, value, case_value_base);
1194 Cmp(value, Immediate(num_labels));
1195 B(&fallthrough,
hs);
1196 Adr(table, &jump_table);
1203 BlockPoolsScope no_pool_inbetween(
this, jump_table_size);
1206 for (
int i = 0;
i < num_labels; ++
i) {
1215 const CPURegister& src2,
const CPURegister& src3,
1216 const CPURegister& src4,
const CPURegister& src5,
1217 const CPURegister& src6,
const CPURegister& src7) {
1220 int count = 5 + src5.is_valid() + src6.is_valid() + src6.is_valid();
1221 int size = src0.SizeInBytes();
1225 PushHelper(count - 4, size, src4, src5, src6, src7);
1229 const CPURegister& dst2,
const CPURegister& dst3,
1230 const CPURegister& dst4,
const CPURegister& dst5,
1231 const CPURegister& dst6,
const CPURegister& dst7) {
1238 int count = 5 + dst5.is_valid() + dst6.is_valid() + dst7.is_valid();
1239 int size = dst0.SizeInBytes();
1242 PopHelper(4, size, dst0, dst1, dst2, dst3);
1243 PopHelper(count - 4, size, dst4, dst5, dst6, dst7);
1248 Register temp = temps.AcquireSameSizeAs(count);
1250 Label loop, leftover2, leftover1, done;
1252 Subs(temp, count, 4);
1257 Subs(temp, temp, 4);
1258 PushHelper(4, src.SizeInBytes(), src, src, src, src);
1263 Tbz(count, 1, &leftover1);
1268 Tbz(count, 0, &done);
1275 const CPURegister& src1,
1276 const CPURegister& src2,
1277 const CPURegister& src3) {
1279 InstructionAccurateScope scope(
this);
1282 DCHECK(size == src0.SizeInBytes());
1288 DCHECK(src1.IsNone() && src2.IsNone() && src3.IsNone());
1292 DCHECK(src2.IsNone() && src3.IsNone());
1313 const CPURegister& dst1,
const CPURegister& dst2,
1314 const CPURegister& dst3) {
1316 InstructionAccurateScope scope(
this);
1319 DCHECK(size == dst0.SizeInBytes());
1325 DCHECK(dst1.IsNone() && dst2.IsNone() && dst3.IsNone());
1329 DCHECK(dst2.IsNone() && dst3.IsNone());
1367 InstructionAccurateScope scope(
this);
1386#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1401 InstructionAccurateScope scope(
this);
1407#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
1427#ifndef V8_ENABLE_LEAPTIERING
1429void TailCallOptimizedCodeSlot(MacroAssembler* masm,
1430 Register optimized_code_entry,
1440 Label heal_optimized_code_slot;
1444 __ LoadWeakValue(optimized_code_entry, optimized_code_entry,
1445 &heal_optimized_code_slot);
1448 __ LoadCodePointerField(
1449 optimized_code_entry,
1454 __ AssertCode(optimized_code_entry);
1455 __ JumpIfCodeIsMarkedForDeoptimization(optimized_code_entry, scratch,
1456 &heal_optimized_code_slot);
1460 __ ReplaceClosureCodeWithOptimizedCode(optimized_code_entry, x1);
1462 __ Move(x2, optimized_code_entry);
1468 __ bind(&heal_optimized_code_slot);
1469 __ GenerateTailCallToReturnedCode(Runtime::kHealOptimizedCodeSlot);
1475#ifdef V8_ENABLE_DEBUG_CODE
1478 IsObjectType(
object, scratch, scratch, FEEDBACK_CELL_TYPE);
1479 Assert(
eq, AbortReason::kExpectedFeedbackCell);
1484 IsObjectType(
object, scratch, scratch, FEEDBACK_VECTOR_TYPE);
1485 Assert(
eq, AbortReason::kExpectedFeedbackVector);
1491 Register optimized_code, Register closure) {
1495#ifdef V8_ENABLE_LEAPTIERING
1518 FrameScope scope(
this, StackFrame::INTERNAL);
1526 static_assert(kJSDispatchHandleShift > 0);
1546#ifndef V8_ENABLE_LEAPTIERING
1551 Register flags, Register feedback_vector,
CodeKind current_code_kind) {
1555 uint32_t flag_mask =
1557 Ldrh(flags,
FieldMemOperand(feedback_vector, FeedbackVector::kFlagsOffset));
1558 Tst(flags, flag_mask);
1563 Register flags, Register feedback_vector,
CodeKind current_code_kind,
1564 Label* flags_need_processing) {
1568 flags_need_processing);
1572 Register flags, Register feedback_vector) {
1575 Label maybe_has_optimized_code, maybe_needs_logging;
1579 &maybe_needs_logging);
1582 bind(&maybe_needs_logging);
1584 &maybe_has_optimized_code);
1587 bind(&maybe_has_optimized_code);
1590 Register optimized_code_entry = x7;
1593 FeedbackVector::kMaybeOptimizedCodeOffset));
1594 TailCallOptimizedCodeSlot(
this, optimized_code_entry, x4);
1605#ifdef V8_ENABLE_DEBUG_CODE
1609 HardAbortScope hard_abort(
this);
1616 Check(
eq, AbortReason::kUnexpectedStackPointer);
1623 if (!
v8_flags.slow_debug_code)
return;
1625 Label unexpected_mode, done;
1627 if (fpcr.IsNone()) {
1628 fpcr = temps.AcquireX();
1638 Tst(fpcr, RMode_mask);
1641 Bind(&unexpected_mode);
1642 Abort(AbortReason::kUnexpectedFPCRMode);
1664 if (!
v8_flags.slow_debug_code)
return;
1667 Check(
ls, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
1679 Check(
eq, AbortReason::kOperandIsNotAMap);
1691 Check(
eq, AbortReason::kOperandIsNotACode);
1697 AssertNotSmi(
object, AbortReason::kOperandIsASmiAndNotAConstructor);
1704 Tst(temp, Operand(Map::Bits1::IsConstructorBit::kMask));
1706 Check(
ne, AbortReason::kOperandIsNotAConstructor);
1712 AssertNotSmi(
object, AbortReason::kOperandIsASmiAndNotAFunction);
1718 LAST_JS_FUNCTION_TYPE);
1719 Check(
ls, AbortReason::kOperandIsNotAFunction);
1725 AssertNotSmi(
object, AbortReason::kOperandIsASmiAndNotAFunction);
1732 Check(
ls, AbortReason::kOperandIsNotACallableFunction);
1738 AssertNotSmi(
object, AbortReason::kOperandIsASmiAndNotABoundFunction);
1743 IsObjectType(
object, temp, temp, JS_BOUND_FUNCTION_TYPE);
1744 Check(
eq, AbortReason::kOperandIsNotABoundFunction);
1756 Mov(
object, Operand(
object,
LSR, 32));
1770 AssertNotSmi(
object, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
1779 LAST_JS_GENERATOR_OBJECT_TYPE);
1781 Check(
ls, AbortReason::kOperandIsNotAGeneratorObject);
1788 Register scratch = temps.AcquireX();
1789 Label done_checking;
1791 JumpIfRoot(
object, RootIndex::kUndefinedValue, &done_checking);
1794 Assert(
eq, AbortReason::kExpectedUndefinedOrCell);
1795 Bind(&done_checking);
1803 Tbz(value, sign_bit, &done);
1804 Abort(AbortReason::kUnexpectedNegativeValue);
1843 Abort(abort_reason);
1850 Check(cond, reason);
1862 Register dst_reg = scope.AcquireX();
1869 Register slot_count) {
1870 DCHECK(!dst.IsZero() && !src.IsZero());
1877 CopyDoubleWordsMode mode) {
1889 Label pointer1_below_pointer2;
1890 Subs(pointer1, pointer1, pointer2);
1891 B(
lt, &pointer1_below_pointer2);
1892 Cmp(pointer1, count);
1893 Check(
ge, AbortReason::kOffsetOutOfRange);
1894 Bind(&pointer1_below_pointer2);
1895 Add(pointer1, pointer1, pointer2);
1898 "pointers must be the same size as doubles");
1909 VRegister temp0 = scope.AcquireD();
1910 VRegister temp1 = scope.AcquireD();
1912 Label
pairs, loop, done;
1916 Sub(count, count, 1);
1931 Sub(count, count, 2);
1956 const VRegister& src) {
1962 Fsub(dst, src, fp_zero);
2000 if (dst == src)
return;
2010 }
else if (dst1 != src0) {
2021 DCHECK(lhs.IsSameSizeAndType(rhs));
2031 DCHECK(lhs.IsSameSizeAndType(rhs));
2036 temp = temps.AcquireS();
2037 }
else if (lhs.IsD()) {
2038 temp = temps.AcquireD();
2041 temp = temps.AcquireQ();
2049 int num_arguments) {
2056 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2059 Mov(x0, num_arguments);
2062 bool switch_to_central =
options().is_wasm;
2067 bool builtin_exit_frame) {
2077 if (function->nargs >= 0) {
2082 Mov(x0, function->nargs);
2088#if V8_HOST_ARCH_ARM64
2099 return v8_flags.sim_stack_alignment;
2104 int num_of_reg_args,
2106 Label* return_location) {
2107 return CallCFunction(function, num_of_reg_args, 0, set_isolate_data_slots,
2112 int num_of_reg_args,
int num_of_double_args,
2114 Label* return_location) {
2119 Mov(temp, function);
2120 return CallCFunction(temp, num_of_reg_args, num_of_double_args,
2121 set_isolate_data_slots, return_location);
2125 int num_of_double_args,
2127 Label* return_location) {
2139 temps.Include(CPURegList(64, {x8, x9, x10, function}));
2140 temps.Exclude(function);
2146 Register pc_scratch = temps.AcquireX();
2148 Adr(pc_scratch, &get_pc);
2167 if (return_location)
bind(return_location);
2187 return call_pc_offset;
2191 int constant_index) {
2218 ExternalReference reference, Register scratch) {
2220 if (reference.IsIsolateFieldId()) {
2223 if (
options().enable_root_relative_access) {
2230 if (
options().isolate_independent_code) {
2248 Mov(scratch, reference);
2253 if (cond ==
nv)
return;
2262 if (cond ==
nv)
return;
2272 Mov(temp, Immediate(imm, rmode));
2292 offset -=
reinterpret_cast<int64_t
>(
pc);
2311 if (
isolate()->builtins()->IsBuiltinHandle(code, &builtin)) {
2319 JumpHelper(
static_cast<int64_t
>(index), rmode, cond);
2321 Jump(code.address(), rmode, cond);
2327 Register scratch = temps.AcquireX();
2328 Mov(scratch, reference);
2333 BlockPoolsScope scope(
this);
2338 BlockPoolsScope scope(
this);
2351 BlockPoolsScope scope(
this);
2354 if (
isolate()->builtins()->IsBuiltinHandle(code, &builtin)) {
2383 Add(target, target, IsolateData::builtin_entry_table_offset());
2394 Ldr(target,
MemOperand(target, IsolateData::builtin_entry_table_offset()));
2419 switch (
options().builtin_call_jump_mode) {
2422 Register scratch = temps.AcquireX();
2432 Register scratch = temps.AcquireX();
2438 if (
options().use_pc_relative_calls_and_jumps_for_mksnapshot) {
2445 Register scratch = temps.AcquireX();
2470 switch (
options().builtin_call_jump_mode) {
2491 if (
options().use_pc_relative_calls_and_jumps_for_mksnapshot) {
2506 Register code_object,
2509#ifdef V8_ENABLE_SANDBOX
2510 LoadCodeEntrypointViaCodePointer(
2536 if (code_object != x17) {
2537 Mov(x17, code_object);
2543 uint16_t argument_count) {
2545#ifdef V8_ENABLE_LEAPTIERING
2550 Ldr(dispatch_handle.W(),
2552 LoadEntrypointAndParameterCountFromJSDispatchTable(code,
parameter_count,
2553 dispatch_handle, scratch);
2556 SbxCheck(
le, AbortReason::kJSSignatureMismatch);
2566#if V8_ENABLE_LEAPTIERING
2568 uint16_t argument_count) {
2579 static_assert(!JSDispatchTable::kSupportsCompaction);
2580 LoadEntrypointFromJSDispatchTable(code, dispatch_handle, scratch);
2591#ifdef V8_ENABLE_LEAPTIERING
2604#ifdef V8_ENABLE_WEBASSEMBLY
2606void MacroAssembler::ResolveWasmCodePointer(Register target,
2607 uint64_t signature_hash) {
2609 ExternalReference global_jump_table =
2610 ExternalReference::wasm_code_pointer_table();
2612 Register scratch = temps.AcquireX();
2613 Mov(scratch, global_jump_table);
2614#ifdef V8_ENABLE_SANDBOX
2615 static_assert(
sizeof(wasm::WasmCodePointerTableEntry) == 16);
2616 Add(target, scratch, Operand(target,
LSL, 4));
2618 MemOperand(target, wasm::WasmCodePointerTable::kOffsetOfSignatureHash));
2619 bool has_second_tmp = temps.CanAcquire();
2620 Register signature_hash_register = has_second_tmp ? temps.AcquireX() :
target;
2621 if (!has_second_tmp) {
2624 Mov(signature_hash_register, signature_hash);
2625 Cmp(scratch, signature_hash_register);
2626 SbxCheck(Condition::kEqual, AbortReason::kWasmSignatureMismatch);
2627 if (!has_second_tmp) {
2631 static_assert(
sizeof(wasm::WasmCodePointerTableEntry) == 8);
2632 Add(target, scratch, Operand(target,
LSL, 3));
2638void MacroAssembler::CallWasmCodePointer(Register target,
2639 uint64_t signature_hash,
2641 ResolveWasmCodePointer(target, signature_hash);
2649void MacroAssembler::CallWasmCodePointerNoSignatureCheck(Register target) {
2650 ExternalReference global_jump_table =
2651 ExternalReference::wasm_code_pointer_table();
2653 Register scratch = temps.AcquireX();
2654 Mov(scratch, global_jump_table);
2655 constexpr unsigned int kEntrySizeLog2 =
2656 std::bit_width(
sizeof(wasm::WasmCodePointerTableEntry)) - 1;
2657 Add(target, scratch, Operand(target,
LSL, kEntrySizeLog2));
2663void MacroAssembler::LoadWasmCodePointer(Register dst,
MemOperand src) {
2664 static_assert(
sizeof(WasmCodePointer) == 4);
2680 temps.Exclude(x16, x17);
2683 Label return_location;
2684 Adr(x17, &return_location);
2685#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
2698 Check(
eq, AbortReason::kReturnAddressNotFoundInFrame);
2702 Bind(&return_location);
2709 Mov(temp, Immediate(target, rmode));
2728 Register scratch = temps.AcquireX();
2736#ifdef V8_ENABLE_LEAPTIERING
2738 Label not_deoptimized;
2740 Abort(AbortReason::kInvalidDeoptimizedCode);
2741 Bind(¬_deoptimized);
2744 Label not_deoptimized;
2747 Bind(¬_deoptimized);
2753 Label* jump_deoptimization_entry_label) {
2755 BlockPoolsScope scope(
this);
2756 bl(jump_deoptimization_entry_label);
2767 : IsolateData::jslimit_offset();
2773 Label* stack_overflow) {
2776 Register scratch = temps.AcquireX();
2785 Sub(scratch, sp, scratch);
2788 B(
le, stack_overflow);
2792 Register actual_argument_count,
2799 Label regular_invoke;
2805 Register extra_argument_count = x2;
2806 Subs(extra_argument_count, formal_parameter_count, actual_argument_count);
2807 B(
le, ®ular_invoke);
2814 Label even_extra_count, skip_move;
2818 Mov(slots_to_copy, actual_argument_count);
2819 Mov(slots_to_claim, extra_argument_count);
2820 Tbz(extra_argument_count, 0, &even_extra_count);
2830 Add(slots_to_claim, extra_argument_count, 1);
2831 And(scratch, actual_argument_count, 1);
2832 Sub(slots_to_claim, slots_to_claim, Operand(scratch,
LSL, 1));
2835 Bind(&even_extra_count);
2836 Cbz(slots_to_claim, &skip_move);
2838 Label stack_overflow;
2840 Claim(slots_to_claim);
2859 LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2860 SlotAddress(pointer_next_value, actual_argument_count);
2861 Mov(count, extra_argument_count);
2863 Str(undefined_value,
2865 Subs(count, count, 1);
2873 Add(total_args_slots, actual_argument_count, extra_argument_count);
2874 Tbz(total_args_slots, 0, &skip);
2880 bind(&stack_overflow);
2888 Bind(®ular_invoke);
2893 Register expected_parameter_count_or_dispatch_handle,
2894 Register actual_parameter_count) {
2897 expected_parameter_count_or_dispatch_handle,
2898 actual_parameter_count));
2907 SmiTag(expected_parameter_count_or_dispatch_handle);
2908 SmiTag(actual_parameter_count);
2909 Push(expected_parameter_count_or_dispatch_handle, actual_parameter_count,
2916 expected_parameter_count_or_dispatch_handle);
2918 SmiUntag(expected_parameter_count_or_dispatch_handle);
2921#ifdef V8_ENABLE_LEAPTIERING
2923 Register function, Register actual_parameter_count,
InvokeType type,
2937 argument_adaption_mode);
2941 Register function, Register
new_target, Register actual_parameter_count,
2957 Register function, Register
new_target, Register actual_parameter_count,
2966 Ldr(dispatch_handle.W(),
2970 Label debug_hook, continue_after_hook;
2972 Mov(x5, ExternalReference::debug_hook_on_function_call_address(
isolate()));
2974 Cbnz(x5, &debug_hook);
2976 bind(&continue_after_hook);
2980 LoadRoot(x3, RootIndex::kUndefinedValue);
2985 Register expected_parameter_count = x2;
2986 LoadParameterCountFromJSDispatchTable(expected_parameter_count,
2987 dispatch_handle, scratch);
2988 InvokePrologue(expected_parameter_count, actual_parameter_count, type);
2995 dispatch_handle, scratch);
3014 actual_parameter_count);
3015 B(&continue_after_hook);
3021 Register expected_parameter_count,
3022 Register actual_parameter_count,
3031 Label debug_hook, continue_after_hook;
3033 Mov(x5, ExternalReference::debug_hook_on_function_call_address(
isolate()));
3035 Cbnz(x5, &debug_hook);
3037 bind(&continue_after_hook);
3041 LoadRoot(x3, RootIndex::kUndefinedValue);
3044 InvokePrologue(expected_parameter_count, actual_parameter_count, type);
3050 constexpr int unused_argument_count = 0;
3065 actual_parameter_count);
3066 B(&continue_after_hook);
3072 Register function, Register
new_target, Register actual_parameter_count,
3082 Register expected_parameter_count = x2;
3089 expected_parameter_count,
3091 Ldrh(expected_parameter_count,
3093 SharedFunctionInfo::kFormalParameterCountOffset));
3096 actual_parameter_count, type);
3100 Register expected_parameter_count,
3101 Register actual_parameter_count,
3115 actual_parameter_count, type);
3120 Register code, Register scratch, Label* if_marked_for_deoptimization) {
3123 if_marked_for_deoptimization);
3127 Label* if_turbofanned) {
3188#if V8_ENABLE_WEBASSEMBLY
3189 if (stub_mode == StubCallMode::kCallWasmRuntimeStub) {
3200 DCHECK_EQ(xzr.SizeInBytes(), double_input.SizeInBytes());
3233 Register type_reg = temps.AcquireX();
3236 if (type == StackFrame::CONSTRUCT || type == StackFrame::FAST_CONSTRUCT) {
3239#if V8_ENABLE_WEBASSEMBLY
3240 if (type == StackFrame::WASM || type == StackFrame::WASM_LIFTOFF_SETUP ||
3241 type == StackFrame::WASM_EXIT) {
3247 Add(fp, sp, kSPToFPDelta);
3266 DCHECK(frame_type == StackFrame::EXIT ||
3267 frame_type == StackFrame::BUILTIN_EXIT ||
3268 frame_type == StackFrame::API_ACCESSOR_EXIT ||
3269 frame_type == StackFrame::API_CALLBACK_EXIT);
3300 int slots_to_claim =
RoundUp(extra_space + 1, 2);
3323 const Register& scratch2) {
3357 Label* target_if_cleared) {
3367 Register scratch2) {
3370 if (
v8_flags.native_code_counters && counter->Enabled()) {
3376 Add(scratch1.W(), scratch1.W(), value);
3386 B(cond, if_cond_pass);
3390 Register scratch, Label* target,
3393 CHECK(
cc == Condition::kUnsignedLessThan ||
3394 cc == Condition::kUnsignedGreaterThanEqual);
3398 LoadMap(scratch, heap_object);
3400 LAST_JS_RECEIVER_TYPE);
3401 B(Condition::kUnsignedLessThanEqual, &ok);
3402 LoadMap(scratch, heap_object);
3404 LAST_PRIMITIVE_HEAP_OBJECT_TYPE);
3405 B(Condition::kUnsignedLessThanEqual, &ok);
3406 Abort(AbortReason::kInvalidReceiver);
3416 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
3422#if V8_STATIC_ROOTS_BOOL
3423void MacroAssembler::CompareInstanceTypeWithUniqueCompressedMap(
3425 std::optional<RootIndex> expected =
3432 temps.CanAcquire());
3435 scratch = temps.AcquireX();
3438 Operand imm_operand =
3442 CmpTagged(map, Immediate(expected_ptr));
3446void MacroAssembler::IsObjectTypeFast(Register
object,
3447 Register compressed_map_scratch,
3452 CompareInstanceTypeWithUniqueCompressedMap(compressed_map_scratch,
3462#if V8_STATIC_ROOTS_BOOL
3465 CompareInstanceTypeWithUniqueCompressedMap(
3479#if V8_STATIC_ROOTS_BOOL
3480 if (
auto range = InstanceTypeChecker::UniqueMapRangeOfInstanceTypeRange(
3481 lower_limit, higher_limit)) {
3483 CompareRange(scratch.W(), scratch.W(), range->first, range->second);
3487 LoadMap(scratch, heap_object);
3500 unsigned lower_limit,
unsigned higher_limit) {
3503 if (lower_limit != 0) {
3504 Sub(scratch.W(), value.W(), Operand(lower_limit));
3505 Cmp(scratch.W(), Operand(higher_limit - lower_limit));
3507 Cmp(value.W(), Immediate(higher_limit));
3512 unsigned lower_limit,
3513 unsigned higher_limit,
3514 Label* on_in_range) {
3515 CompareRange(value, scratch, lower_limit, higher_limit);
3530 Register scratch, Label* fbv_undef) {
3541 Cmp(scratch, FEEDBACK_VECTOR_TYPE);
3545 LoadRoot(dst, RootIndex::kUndefinedValue);
3556 Cmp(type_reg, type);
3566 Register scratch = temps.AcquireX();
3568 CompareRange(type_reg, scratch, lower_limit, higher_limit);
3620 Label* if_not_equal) {
3622 B(
ne, if_not_equal);
3626 unsigned lower_limit,
3627 unsigned higher_limit,
3628 Label* on_in_range) {
3630 if (lower_limit != 0) {
3632 Register scratch = temps.AcquireW();
3633 Sub(scratch, value, Operand(lower_limit));
3676 Stp(value.W(), value.W(), dst_field_operand);
3678 Stp(value, value, dst_field_operand);
3685 Str(value.W(), dst_field_operand);
3687 Str(value, dst_field_operand);
3692 const Register& dst_base,
3693 const Register& dst_index,
3694 const Register& temp) {
3695 Add(temp, dst_base, dst_index);
3697 Stlr(value.W(), temp);
3722 const Register& source) {
3737 Operand imm_operand =
3745#if V8_ENABLE_SANDBOX
3748 Register scratch = temps.AcquireX();
3759 const Register& base,
3760 const Register& index,
3761 const Register& temp) {
3763 Add(temp, base, index);
3773 const Register& base,
3774 const Register& index,
3775 const Register& temp) {
3777 Add(temp, base, index);
3786 Register scratch = temps.AcquireX();
3800 Register scratch = temps.AcquireX();
3803 Cbnz(scratch, is_marking);
3809 Register scratch = temps.AcquireX();
3812 Cbz(scratch, not_marking);
3818 SlotDescriptor slot) {
3825#if V8_STATIC_ROOTS_BOOL
3846 Register scratch = temps.AcquireX();
3851 Abort(AbortReason::kUnalignedCellInWriteBarrier);
3863#ifdef V8_ENABLE_SANDBOX
3865 Operand(value,
LSR, kSandboxedPointerShift));
3873#ifdef V8_ENABLE_SANDBOX
3884#ifdef V8_ENABLE_SANDBOX
3887 Register scratch = temps.AcquireX();
3889 Mov(scratch, Operand(scratch,
LSL, kSandboxedPointerShift));
3890 Str(scratch, dst_field_operand);
3899 Register isolate_root) {
3902#ifdef V8_ENABLE_SANDBOX
3903 DCHECK(!tag_range.IsEmpty());
3906 Register external_table = temps.AcquireX();
3907 if (isolate_root ==
no_reg) {
3913 IsolateData::external_pointer_table_offset() +
3918 kExternalPointerTableEntrySizeLog2));
3929 if (tag_range.Size() == 1) {
3933 Cmp(scratch, Immediate(tag_range.first));
3934 SbxCheck(
eq, AbortReason::kExternalPointerTagMismatch);
3949#ifdef V8_ENABLE_SANDBOX
3958#ifdef V8_ENABLE_SANDBOX
3968#ifdef V8_ENABLE_SANDBOX
3982#ifdef V8_ENABLE_SANDBOX
3985 Register scratch = temps.AcquireX();
3987 FieldMemOperand(value, ExposedTrustedObject::kSelfIndirectPointerOffset));
3988 Str(scratch.W(), dst_field_operand);
3994#ifdef V8_ENABLE_SANDBOX
3995void MacroAssembler::ResolveIndirectPointerHandle(Register
destination,
4002 Label is_trusted_pointer_handle, done;
4003 constexpr int kCodePointerHandleMarkerBit = 0;
4004 static_assert((1 << kCodePointerHandleMarkerBit) ==
4006 Tbz(
handle, kCodePointerHandleMarkerBit, &is_trusted_pointer_handle);
4009 Bind(&is_trusted_pointer_handle);
4013 }
else if (tag == kCodeIndirectPointerTag) {
4020void MacroAssembler::ResolveTrustedPointerHandle(Register
destination,
4023 DCHECK_NE(tag, kCodeIndirectPointerTag);
4039void MacroAssembler::ResolveCodePointerHandle(Register
destination,
4044 LoadCodePointerTableBase(table);
4055void MacroAssembler::LoadCodeEntrypointViaCodePointer(Register
destination,
4061 Register scratch = temps.AcquireX();
4062 LoadCodePointerTableBase(scratch);
4069 Mov(scratch, Immediate(tag));
4074void MacroAssembler::LoadCodePointerTableBase(Register
destination) {
4075#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
4079 ExternalReference::code_pointer_table_base_address(
isolate()));
4090 Mov(
destination, ExternalReference::global_code_pointer_table_base_address());
4095#ifdef V8_ENABLE_LEAPTIERING
4096void MacroAssembler::LoadEntrypointFromJSDispatchTable(Register
destination,
4097 Register dispatch_handle,
4103 Mov(scratch, ExternalReference::js_dispatch_table_address());
4104 Mov(index, Operand(dispatch_handle,
LSR, kJSDispatchHandleShift));
4109void MacroAssembler::LoadEntrypointFromJSDispatchTable(
4114 Mov(scratch, ExternalReference::js_dispatch_table_address());
4121 static_assert(!JSDispatchTable::kSupportsCompaction);
4122 int offset = JSDispatchTable::OffsetOfEntry(dispatch_handle) +
4123 JSDispatchEntry::kEntrypointOffset;
4127void MacroAssembler::LoadParameterCountFromJSDispatchTable(
4128 Register
destination, Register dispatch_handle, Register scratch) {
4133 Mov(scratch, ExternalReference::js_dispatch_table_address());
4134 Mov(index, Operand(dispatch_handle,
LSR, kJSDispatchHandleShift));
4136 static_assert(JSDispatchEntry::kParameterCountMask == 0xffff);
4140void MacroAssembler::LoadEntrypointAndParameterCountFromJSDispatchTable(
4141 Register entrypoint, Register
parameter_count, Register dispatch_handle,
4147 Mov(scratch, ExternalReference::js_dispatch_table_address());
4148 Mov(index, Operand(dispatch_handle,
LSR, kJSDispatchHandleShift));
4150 Ldr(entrypoint,
MemOperand(scratch, JSDispatchEntry::kEntrypointOffset));
4151 static_assert(JSDispatchEntry::kParameterCountMask == 0xffff);
4153 MemOperand(scratch, JSDispatchEntry::kCodeObjectOffset));
4160#ifdef V8_ENABLE_SANDBOX
4172 DCHECK(!regs.IncludesAliasOf(lr));
4182 DCHECK(!regs.IncludesAliasOf(lr));
4244#if V8_ENABLE_WEBASSEMBLY
4245 if (mode == StubCallMode::kCallWasmRuntimeStub) {
4258 Register
object, Operand
offset) {
4266 if (dst_slot !=
object) {
4268 Mov(dst_object,
object);
4276 if (
offset.IsImmediate() || (
offset.reg() != dst_object)) {
4277 Mov(dst_object, dst_slot);
4287 Add(dst_slot, dst_slot, dst_object);
4288 Sub(dst_object, dst_slot, dst_object);
4309 if (slot.contains_indirect_pointer()) {
4311 slot.indirect_pointer_tag());
4313 DCHECK(slot.contains_direct_pointer());
4317 Check(
eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite);
4320 if (
v8_flags.disable_write_barriers) {
4329#if V8_STATIC_ROOTS_BOOL
4342 if (slot.contains_indirect_pointer()) {
4359 if (slot.contains_direct_pointer()) {
4366 DCHECK(slot.contains_indirect_pointer());
4368 slot.indirect_pointer_tag());
4414 Mov(w0,
static_cast<int>(reason));
4415 Call(ExternalReference::abort_with_reason());
4420 HardAbortScope hard_aborts(
this);
4434 Register scratch = temps.AcquireX();
4449 dst, Map::kConstructorOrBackPointerOrNativeContextOffset));
4455 Register feedback_vector,
4459 Label fallthrough, clear_slot;
4464 LoadWeakValue(scratch_and_result, scratch_and_result, &fallthrough);
4477 if (min_opt_level == CodeKind::TURBOFAN_JS) {
4493 Mov(scratch_and_result, 0);
4499 const CPURegister& arg0,
4500 const CPURegister& arg1,
4501 const CPURegister& arg2,
4502 const CPURegister& arg3) {
4516 static const CPURegList kPCSVarargs =
4518 static const CPURegList kPCSVarargsFP =
4524 tmp_list.Remove(x0);
4525 tmp_list.Remove(kPCSVarargs);
4526 tmp_list.Remove(arg0, arg1, arg2, arg3);
4529 fp_tmp_list.Remove(kPCSVarargsFP);
4530 fp_tmp_list.Remove(arg0, arg1, arg2, arg3);
4539 CPURegList pcs_varargs = kPCSVarargs;
4541 CPURegList pcs_varargs_fp = kPCSVarargsFP;
4550 if (
args[
i].IsRegister()) {
4551 pcs[
i] = pcs_varargs.PopLowestIndex().X();
4554 if (
args[
i].Is32Bits()) pcs[
i] = pcs[
i].W();
4555 }
else if (
args[
i].IsVRegister()) {
4561 pcs[
i] = pcs_varargs.PopLowestIndex().X();
4563 pcs[
i] = pcs_varargs_fp.PopLowestIndex().D();
4572 if (
args[
i].Aliases(pcs[
i]))
continue;
4576 if (kPCSVarargs.IncludesAliasOf(
args[
i]) ||
4577 kPCSVarargsFP.IncludesAliasOf(
args[
i])) {
4578 if (
args[
i].IsRegister()) {
4580 Register new_arg = temps.AcquireSameSizeAs(old_arg);
4581 Mov(new_arg, old_arg);
4584 VRegister old_arg =
args[
i].VReg();
4585 VRegister new_arg = temps.AcquireSameSizeAs(old_arg);
4586 Fmov(new_arg, old_arg);
4594 for (
int i = 0;
i < arg_count;
i++) {
4596 if (
args[
i].IsVRegister()) {
4597 if (pcs[
i].SizeInBytes() !=
args[
i].SizeInBytes()) {
4601 VRegister temp0 = temps.AcquireD();
4603 Fmov(pcs[
i].Reg(), temp0);
4612 if (pcs[
i].IsRegister()) {
4616 if (pcs[
i].SizeInBytes() ==
args[
i].SizeInBytes()) {
4631 Label format_address;
4632 Adr(x0, &format_address);
4636 BlockPoolsScope scope(
this);
4639 Bind(&format_address);
4653 if (
options().enable_simulator_code) {
4659 uint32_t arg_pattern_list = 0;
4660 for (
int i = 0;
i < arg_count;
i++) {
4661 uint32_t arg_pattern;
4662 if (
args[
i].IsRegister()) {
4671 dc32(arg_pattern_list);
4675 Call(ExternalReference::printf_function());
4679 CPURegister arg1, CPURegister arg2,
4690 saved_registers.Align();
4699 CPURegList tmp_list = saved_registers;
4701 tmp_list.Remove(arg0, arg1, arg2, arg3);
4702 fp_tmp_list.Remove(arg0, arg1, arg2, arg3);
4711 bool arg0_sp = arg0.is_valid() &&
sp.Aliases(arg0);
4712 bool arg1_sp = arg1.is_valid() &&
sp.Aliases(arg1);
4713 bool arg2_sp = arg2.is_valid() &&
sp.Aliases(arg2);
4714 bool arg3_sp = arg3.is_valid() &&
sp.Aliases(arg3);
4715 if (arg0_sp || arg1_sp || arg2_sp || arg3_sp) {
4718 Register arg_sp = temps.AcquireX();
4720 saved_registers.TotalSizeInBytes() +
4762 "Offsets must be consecutive for ldp!");
4763#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
4766 temps.Exclude(x16, x17);
4777#if V8_ENABLE_WEBASSEMBLY
4778void MacroAssembler::StoreReturnAddressInWasmExitFrame(Label* return_location) {
4780 temps.Exclude(x16, x17);
4781 Adr(x17, return_location);
4782#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
4786 Str(x17,
MemOperand(fp, WasmExitFrameConstants::kCallingPCOffset));
4792 VRegister scratch = temps.AcquireV(
kFormat8B);
4793 VRegister tmp = src.Is32Bits() ? scratch.S() : scratch.D();
4795 Cnt(scratch, scratch);
4796 Addv(scratch.B(), scratch);
4803 VRegister tmp = temps.AcquireQ();
4804 VRegister
mask = temps.AcquireQ();
4807 CpuFeatureScope scope(
this, PMULL1Q);
4809 Movi(
mask.V2D(), 0x0102'0408'1020'4080);
4811 Ushr(tmp.V16B(), src.V16B(), 7);
4814 Pmull2(temp.V1Q(),
mask.V2D(), tmp.V2D());
4815 Pmull(tmp.V1Q(),
mask.V1D(), tmp.V1D());
4817 Trn2(tmp.V8B(), tmp.V8B(), temp.V8B());
4818 Mov(dst.W(), tmp.V8H(), 3);
4822 Sshr(tmp.V16B(), src.V16B(), 7);
4823 Movi(
mask.V2D(), 0x8040'2010'0804'0201);
4824 And(tmp.V16B(),
mask.V16B(), tmp.V16B());
4825 Ext(
mask.V16B(), tmp.V16B(), tmp.V16B(), 8);
4826 Zip1(tmp.V16B(), tmp.V16B(),
mask.V16B());
4827 Addv(tmp.H(), tmp.V8H());
4828 Mov(dst.W(), tmp.V8H(), 0);
4835 VRegister tmp = temps.AcquireQ();
4836 VRegister
mask = temps.AcquireQ();
4839 CpuFeatureScope scope(
this, PMULL1Q);
4842 Ushr(tmp.V8H(), src.V8H(), 15);
4843 Movi(
mask.V1D(), 0x0102'0408'1020'4080);
4846 Xtn(tmp.V8B(), tmp.V8H());
4848 Pmull(tmp.V1Q(), tmp.V1D(),
mask.V1D());
4849 Mov(dst.W(), tmp.V16B(), 7);
4851 Sshr(tmp.V8H(), src.V8H(), 15);
4854 Movi(
mask.V2D(), 0x0080'0040'0020'0010, 0x0008'0004'0002'0001);
4855 And(tmp.V16B(),
mask.V16B(), tmp.V16B());
4856 Addv(tmp.H(), tmp.V8H());
4857 Mov(dst.W(), tmp.V8H(), 0);
4865 Mov(dst.X(), src.D(), 1);
4866 Fmov(tmp.X(), src.D());
4867 And(dst.X(), dst.X(), 0x80000000'80000000);
4868 And(tmp.X(), tmp.X(), 0x80000000'80000000);
4869 Orr(dst.X(), dst.X(), Operand(dst.X(),
LSL, 31));
4870 Orr(tmp.X(), tmp.X(), Operand(tmp.X(),
LSL, 31));
4871 Lsr(dst.X(), dst.X(), 60);
4872 Bfxil(dst.X(), tmp.X(), 62, 2);
4879 Mov(dst.X(), src.D(), 1);
4880 Fmov(tmp.X(), src.D());
4881 Lsr(dst.X(), dst.X(), 62);
4882 Bfxil(dst.X(), tmp.X(), 63, 1);
4888 VRegister tmp = scope.AcquireV(
kFormat2D);
4889 Cmeq(tmp.V2D(), src.V2D(), 0);
4891 Fcmp(tmp.D(), tmp.D());
4902 Register function_address,
4903 ExternalReference thunk_ref, Register thunk_arg,
4904 int slots_to_drop_on_return,
4910 using ER = ExternalReference;
4912 Isolate* isolate = masm->isolate();
4914 ER::handle_scope_next_address(isolate),
no_reg);
4916 ER::handle_scope_limit_address(isolate),
no_reg);
4918 ER::handle_scope_level_address(isolate),
no_reg);
4927 Register prev_next_address_reg = x19;
4936 scratch, scratch2, prev_next_address_reg, prev_limit_reg));
4941 scratch, scratch2, prev_next_address_reg, prev_limit_reg));
4943 scratch, scratch2, prev_next_address_reg, prev_limit_reg));
4947 fix_temps.Include(x16, x17);
4951 "Allocate HandleScope in callee-save registers.");
4952 __ Ldr(prev_next_address_reg, next_mem_op);
4953 __ Ldr(prev_limit_reg, limit_mem_op);
4954 __ Ldr(prev_level_reg, level_mem_op);
4955 __ Add(scratch.W(), prev_level_reg, 1);
4956 __ Str(scratch.W(), level_mem_op);
4959 Label profiler_or_side_effects_check_enabled, done_api_call;
4960 if (with_profiling) {
4961 __ RecordComment(
"Check if profiler or side effects check is enabled");
4962 __ Ldrb(scratch.W(),
4964 __ Cbnz(scratch.W(), &profiler_or_side_effects_check_enabled);
4965#ifdef V8_RUNTIME_CALL_STATS
4967 __ Mov(scratch, ER::address_of_runtime_stats_flag());
4969 __ Cbnz(scratch.W(), &profiler_or_side_effects_check_enabled);
4977 Label propagate_exception;
4978 Label delete_allocated_handles;
4979 Label leave_exit_frame;
4982 __ Ldr(return_value, return_value_operand);
4987 "No more valid handles (the result handle was the last one)."
4988 "Restore previous handle scope.");
4989 __ Str(prev_next_address_reg, next_mem_op);
4991 __ Ldr(scratch.W(), level_mem_op);
4992 __ Sub(scratch.W(), scratch.W(), 1);
4993 __ Cmp(scratch.W(), prev_level_reg);
4994 __ Check(
eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
4996 __ Str(prev_level_reg, level_mem_op);
4998 __ Ldr(scratch, limit_mem_op);
4999 __ Cmp(prev_limit_reg, scratch);
5000 __ B(
ne, &delete_allocated_handles);
5004 __ Bind(&leave_exit_frame);
5006 Register argc_reg = prev_limit_reg;
5007 if (argc_operand !=
nullptr) {
5009 __ Ldr(argc_reg, *argc_operand);
5016 "Check if the function scheduled an exception.");
5017 __ Mov(scratch, ER::exception_address(isolate));
5019 __ JumpIfNotRoot(scratch, RootIndex::kTheHoleValue, &propagate_exception);
5023 AbortReason::kAPICallReturnedInvalidObject);
5025 if (argc_operand ==
nullptr) {
5034 if (with_profiling) {
5036 __ Bind(&profiler_or_side_effects_check_enabled);
5038 if (thunk_arg.is_valid()) {
5040 IsolateFieldId::kApiCallbackThunkArgument);
5041 __ Str(thunk_arg, thunk_arg_mem_op);
5043 __ Mov(scratch, thunk_ref);
5045 __ B(&done_api_call);
5049 __ Bind(&propagate_exception);
5054 masm,
"HandleScope limit has changed. Delete allocated extensions.");
5055 __ Bind(&delete_allocated_handles);
5056 __ Str(prev_limit_reg, limit_mem_op);
5058 Register saved_result = prev_limit_reg;
5059 __ Mov(saved_result, x0);
5063 __ B(&leave_exit_frame);
constexpr int kRegularPageSize
#define Assert(condition)
static int ActivationFrameAlignment()
EmbeddedObjectIndex AddEmbeddedObject(IndirectHandle< HeapObject > object)
size_t EmbeddedObjectIndex
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
const AssemblerOptions & options() const
void LogicalImmediate(const Register &rd, const Register &rn, unsigned n, unsigned imm_s, unsigned imm_r, LogicalOp op)
static Instr RnSP(Register rn)
void mvni(const VRegister &vd, const int imm8, Shift shift=LSL, const int shift_amount=0)
void dup(const VRegister &vd, const VRegister &vn, int vn_index)
static constexpr bool IsImmLSScaled(int64_t offset, unsigned size_log2)
void LoadStoreWRegOffset(Instr memop, const Register ®offset)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void bit(const VRegister &vd, const VRegister &vn, const VRegister &vm)
static bool IsImmLSPair(int64_t offset, unsigned size)
void movn(const Register &rd, uint64_t imm, int shift=-1)
void hint(SystemHint code)
void LoadStoreScaledImmOffset(Instr memop, int offset, unsigned size)
void csinc(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void AddSubWithCarry(const Register &rd, const Register &rn, const Operand &operand, FlagsUpdate S, AddSubWithCarryOp op)
void b(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void bl(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void tbz(const Register &rt, unsigned bit_pos, Label *label)
static Instr Rt(CPURegister rt)
void EmitExtendShift(const Register &rd, const Register &rn, Extend extend, unsigned left_shift)
static constexpr bool IsImmAddSub(int64_t immediate)
friend class UseScratchRegisterScope
void str(Register src, const MemOperand &dst, Condition cond=al)
void ConditionalCompare(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond, ConditionalCompareOp op)
void fmov(const VRegister &fd, double imm)
void shift(Operand dst, Immediate shift_amount, int subcode, int size)
void ldp(const CPURegister &rt, const CPURegister &rt2, const MemOperand &src)
void LoadStore(const CPURegister &rt, const MemOperand &addr, LoadStoreOp op)
void adr(const Register &rd, Label *label)
static unsigned CalcLSDataSizeLog2(LoadStoreOp op)
void mvn(Register dst, const Operand &src, SBit s=LeaveCC, Condition cond=al)
void mov(Register dst, const Operand &src, SBit s=LeaveCC, Condition cond=al)
void csel(const Register &rd, const Register &rn, const Register &rm, Condition cond)
static constexpr bool IsImmConditionalCompare(int64_t immediate)
void movz(const Register &rd, uint64_t imm, int shift=-1)
void tbnz(const Register &rt, unsigned bit_pos, Label *label)
void EmitShift(const Register &rd, const Register &rn, Shift shift, unsigned amount)
void movi(const VRegister &vd, const uint64_t imm, Shift shift=LSL, const int shift_amount=0)
void movk(const Register &rd, uint64_t imm, int shift=-1)
static constexpr bool IsImmLSUnscaled(int64_t offset)
void EmitStringData(const char *string)
void LoadStoreUnscaledImmOffset(Instr memop, int offset)
void ldr(Register dst, const MemOperand &src, Condition cond=al)
void LoadStorePair(const CPURegister &rt, const CPURegister &rt2, const MemOperand &addr, LoadStorePairOp op)
void stp(const CPURegister &rt, const CPURegister &rt2, const MemOperand &dst)
void cbnz(const Register &rt, Label *label)
void AddSub(const Register &rd, const Register &rn, const Operand &operand, FlagsUpdate S, AddSubOp op)
static bool IsImmLogical(uint64_t value, unsigned width, unsigned *n, unsigned *imm_s, unsigned *imm_r)
void cbz(const Register &rt, Label *label)
int SizeOfCodeGeneratedSince(Label *label)
void Logical(const Register &rd, const Register &rn, const Operand &operand, LogicalOp op)
const Register & AppropriateZeroRegFor(const CPURegister ®) const
void csinv(const Register &rd, const Register &rn, const Register &rm, Condition cond)
void CheckVeneerPool(bool force_emit, bool require_jump, size_t margin=kVeneerDistanceMargin)
static constexpr Builtin RecordWrite(SaveFPRegsMode fp_mode)
static bool IsIsolateIndependentBuiltin(Tagged< Code > code)
V8_EXPORT_PRIVATE Handle< Code > code_handle(Builtin builtin)
static constexpr Builtin RuntimeCEntry(int result_size, bool switch_to_central_stack=false)
static constexpr Builtin EphemeronKeyBarrier(SaveFPRegsMode fp_mode)
static constexpr Builtin IndirectPointerBarrier(SaveFPRegsMode fp_mode)
static constexpr Builtin CEntry(int result_size, ArgvMode argv_mode, bool builtin_exit_frame=false, bool switch_to_central_stack=false)
static CPURegList GetCallerSavedV(int size=kDRegSizeInBits)
void set_bits(uint64_t new_bits)
void Combine(const CPURegList &other)
static const int kIsTurbofannedBit
static const int kMarkedForDeoptimizationBit
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kCallerPCOffset
static const int kInvalidContext
static V8_INLINE constexpr int SlotOffset(int index)
static bool IsSupported(CpuFeature f)
static V8_EXPORT_PRIVATE const int kEagerDeoptExitSize
static V8_EXPORT_PRIVATE const int kLazyDeoptExitSize
static constexpr int kCalleeSavedRegisterBytesPushedAfterFpLrPair
static constexpr int kCalleeSavedRegisterBytesPushedBeforeFpLrPair
static constexpr int kSPOffset
static constexpr int kLastExitFrameField
static V8_EXPORT_PRIVATE ExternalReference address_of_code_pointer_table_base_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr uint32_t kFlagsTieringStateIsAnyRequested
static constexpr uint32_t FlagMaskForNeedsProcessingCheckFrom(CodeKind code_kind)
static constexpr int OffsetOfElementAt(int index)
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr RegList ComputeSavedRegisters(Register object, Register slot_address=no_reg)
static constexpr Register IndirectPointerTagRegister()
static constexpr Register ObjectRegister()
static constexpr Register SlotAddressRegister()
static bool IsValidPCRelOffset(ptrdiff_t offset)
static const int ImmPCRelRangeBitwidth
static const int kExternalPointerTableBasePointerOffset
static constexpr int BuiltinEntrySlotOffset(Builtin id)
static constexpr int real_jslimit_offset()
static constexpr intptr_t GetOffset(IsolateFieldId id)
static IsolateGroup * current()
Address BuiltinEntry(Builtin builtin)
bool root_array_available_
static bool IsAddressableThroughRootRegister(Isolate *isolate, const ExternalReference &reference)
static constexpr bool CanBeImmediate(RootIndex index)
V8_INLINE std::string CommentForOffHeapTrampoline(const char *prefix, Builtin builtin)
static int32_t RootRegisterOffsetForExternalReferenceTableEntry(Isolate *isolate, const ExternalReference &reference)
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
Isolate * isolate() const
Tagged_t ReadOnlyRootPtr(RootIndex index)
bool root_array_available() const
void IndirectLoadConstant(Register destination, Handle< HeapObject > object)
static intptr_t RootRegisterOffsetForExternalReference(Isolate *isolate, const ExternalReference &reference)
bool should_abort_hard() const
void IndirectLoadExternalReference(Register destination, ExternalReference reference)
bool NeedExtraInstructionsOrRegisterBranch(Label *label)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal)
void PushAll(RegList registers)
void Asr(const Register &rd, const Register &rn, unsigned shift)
void Abort(AbortReason msg)
void LoadStackLimit(Register destination, StackLimitKind kind)
void Fcvt(const VRegister &fd, const VRegister &fn)
void Call(Register target, Condition cond=al)
void CallJSFunction(Register function_object, uint16_t argument_count)
void AddSubWithCarryMacro(const Register &rd, const Register &rn, const Operand &operand, FlagsUpdate S, AddSubWithCarryOp op)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count, Register actual_parameter_count)
void Cmp(const Register &rn, int imm)
void AssertMap(Register object) NOOP_UNLESS_DEBUG_CODE
void JumpIfIsInRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit, Label *on_in_range)
void AddSubMacro(const Register &rd, const Register &rn, const Operand &operand, FlagsUpdate S, AddSubOp op)
void DecompressTaggedSigned(const Register &destination, const MemOperand &field_operand)
void Drop(int count, Condition cond=al)
void CompareInstanceType(Register map, Register type_reg, InstanceType type)
void Orr(const Register &rd, const Register &rn, const Operand &operand)
void mov(Register rd, Register rj)
void Add(const Register &rd, const Register &rn, const Operand &operand)
void PushArgument(const Register &arg)
void IsObjectType(Register heap_object, Register scratch1, Register scratch2, InstanceType type)
void SmiUntag(Register reg, SBit s=LeaveCC)
void DecodeField(Register dst, Register src)
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void SlotAddress(Register dst, int slot_offset)
void Bind(Label *label, BranchTargetIdentifier id=BranchTargetIdentifier::kNone)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void Ext(const VRegister &vd, const VRegister &vn, const VRegister &vm, int index)
void LoadExternalPointerField(Register destination, MemOperand field_operand, ExternalPointerTagRange tag_range, Register isolate_root=Register::no_reg())
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void PushMultipleTimes(CPURegister src, Register count)
static CPURegList DefaultTmpList()
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal)
void near_call(int offset, RelocInfo::Mode rmode)
void AssertPositiveOrZero(Register value) NOOP_UNLESS_DEBUG_CODE
void LoadStoreMacroComplex(const CPURegister &rt, const MemOperand &addr, LoadStoreOp op)
void LoadEntryFromBuiltin(Builtin builtin, Register destination)
void CompareAndBranch(const Register &lhs, const Operand &rhs, Condition cond, Label *label)
void Fmov(VRegister fd, VRegister fn)
void I64x2AllTrue(Register dst, QwNeonRegister src)
void CompareRoot(Register obj, RootIndex index)
void CompareObjectType(Register heap_object, Register map, Register type_reg, InstanceType type)
void Msr(SystemRegister sysreg, const Register &rt)
void Move(Register dst, Tagged< Smi > smi)
void I32x4BitMask(Register dst, VRegister src)
void Lsr(const Register &rd, const Register &rn, unsigned shift)
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void Tst(const Register &rn, const Operand &operand)
void AtomicDecompressTaggedSigned(const Register &destination, const Register &base, const Register &index, const Register &temp)
void Bfxil(const Register &rd, const Register &rn, unsigned lsb, unsigned width)
void StoreReturnAddressAndCall(Register target)
void CopyDoubleWords(Register dst, Register src, Register count, CopyDoubleWordsMode mode=kDstLessThanSrc)
void StackOverflowCheck(Register num_args, Register scratch, Label *stack_overflow)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void PopHelper(int count, int size, const CPURegister &dst0, const CPURegister &dst1, const CPURegister &dst2, const CPURegister &dst3)
void JumpIfObjectType(Register object, Register map, Register type_reg, InstanceType type, Label *if_cond_pass, Condition cond=eq)
void CallCodeObject(Register code_object)
void LoadSandboxedPointerField(Register destination, MemOperand field_operand)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void PushCalleeSavedRegisters()
void Movi32bitHelper(const VRegister &vd, uint64_t imm)
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag=kDefaultCodeEntrypointTag)
void TestAndBranchIfAllClear(const Register ®, const uint64_t bit_pattern, Label *label)
void B(Label *label, BranchType type, Register reg=NoReg, int bit=-1)
void AtomicDecompressTagged(const Register &destination, const Register &base, const Register &index, const Register &temp)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void CallPrintf(int arg_count=0, const CPURegister *args=nullptr)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Peek(const CPURegister &dst, const Operand &offset)
void AssertCode(Register object) NOOP_UNLESS_DEBUG_CODE
void TryConvertDoubleToInt64(Register result, DoubleRegister input, Label *done)
void near_jump(int offset, RelocInfo::Mode rmode)
void Uxtw(const Register &rd, const Register &rn)
void Tbz(const Register &rt, unsigned bit_pos, Label *label)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void JumpIfUnsignedLessThan(Register x, int32_t y, Label *dest)
void PopcntHelper(Register dst, Register src)
void IndirectCall(Address target, RelocInfo::Mode rmode)
void BailoutIfDeoptimized()
void DecodeSandboxedPointer(Register value)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void Mrs(const Register &rt, SystemRegister sysreg)
void CompareTaggedRoot(Register with, RootIndex index)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
void Debug(const char *message, uint32_t code, Instr params=BREAK)
bool CanUseNearCallOrJump(RelocInfo::Mode rmode)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void SmiTag(Register reg, SBit s=LeaveCC)
void SbxCheck(Condition cc, AbortReason reason)
void Eor(const Register &rd, const Register &rn, const Operand &operand)
void Cmeq(const VRegister &vd, const VRegister &vn, int imm)
void Fjcvtzs(const Register &rd, const VRegister &vn)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void RecordWriteField(Register object, int offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
void Ins(const VRegister &vd, int vd_index, const VRegister &vn, int vn_index)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void Ldr(const CPURegister &rt, const Operand &imm)
void DropSlots(int64_t count)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void Blr(const Register &xn)
void Mov(const Register &rd, const Operand &operand, DiscardMoveMode discard_mode=kDontDiscardForSameWReg)
void LoadIndirectPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void CallIndirectPointerBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode, IndirectPointerTag tag)
int LeaveFrame(StackFrame::Type type)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void LoadGlobalProxy(Register dst)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
static bool IsImmMovz(uint64_t imm, unsigned reg_size)
void JumpIfMarking(Label *is_marking, Label::Distance condition_met_distance=Label::kFar)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
Operand ClearedValue() const
void StoreCodePointerField(Register value, MemOperand dst_field_operand)
void IsObjectTypeInRange(Register heap_object, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void InvokeFunctionWithNewTarget(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void Jump(Register target, Condition cond=al)
void LoadRoot(Register destination, RootIndex index) final
void Fcmp(const VRegister &fn, const VRegister &fm)
void RecordWrite(Register object, Operand offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
void DecompressProtected(const Register &destination, const MemOperand &field_operand)
static constexpr int kExtraSlotClaimedByPrologue
void PushRoot(RootIndex index)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
static bool IsImmMovn(uint64_t imm, unsigned reg_size)
void CanonicalizeNaN(const VRegister &dst, const VRegister &src)
void Csel(const Register &rd, const Register &rn, const Operand &operand, Condition cond)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand)
void CompareRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit)
void JumpCodeObject(Register code_object, JumpMode jump_mode=JumpMode::kJump)
void JumpIfCodeIsTurbofanned(Register code, Register scratch, Label *if_turbofanned)
static int ActivationFrameAlignment()
void LoadFromConstantsTable(Register destination, int constant_index) final
void LoadProtectedPointerField(Register destination, MemOperand field_operand)
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void LogicalMacro(const Register &rd, const Register &rn, const Operand &operand, LogicalOp op)
void PokePair(const CPURegister &src1, const CPURegister &src2, int offset)
void ComputeCodeStartAddress(Register dst)
void MaybeSaveRegisters(RegList registers)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void LoadStorePairMacro(const CPURegister &rt, const CPURegister &rt2, const MemOperand &addr, LoadStorePairOp op)
void Tbnz(const Register &rt, unsigned bit_pos, Label *label)
void LoadTaggedRoot(Register destination, RootIndex index)
void PushCPURegList(CPURegList registers)
void Subs(const Register &rd, const Register &rn, const Operand &operand)
void Movi(const VRegister &vd, uint64_t imm, Shift shift=LSL, int shift_amount=0)
void I16x8BitMask(Register dst, VRegister src)
void Cset(const Register &rd, Condition cond)
void Printf(const char *format, CPURegister arg0=NoCPUReg, CPURegister arg1=NoCPUReg, CPURegister arg2=NoCPUReg, CPURegister arg3=NoCPUReg)
void StoreIndirectPointerField(Register value, MemOperand dst_field_operand)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void MovePair(Register dst0, Register src0, Register dst1, Register src1)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
void PrintfNoPreserve(const char *format, const CPURegister &arg0=NoCPUReg, const CPURegister &arg1=NoCPUReg, const CPURegister &arg2=NoCPUReg, const CPURegister &arg3=NoCPUReg)
void LoadStoreMacro(const CPURegister &rt, const MemOperand &addr, LoadStoreOp op)
void JumpIfNotMarking(Label *not_marking, Label::Distance condition_met_distance=Label::kFar)
void I64x2BitMask(Register dst, QwNeonRegister src)
void MoveObjectAndSlot(Register dst_object, Register dst_slot, Register object, Operand offset)
void Claim(int64_t count, uint64_t unit_size=kXRegSize)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
void LoadCompressedMap(Register dst, Register object)
Condition LoadFeedbackVectorFlagsAndCheckIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void Cneg(const Register &rd, const Register &rn, Condition cond)
void Fsub(const VRegister &fd, const VRegister &fn, const VRegister &fm)
void CallBuiltin(Builtin builtin, Condition cond=al)
void PopCPURegList(CPURegList registers)
void Ubfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void PeekPair(const CPURegister &dst1, const CPURegister &dst2, int offset)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void TruncateDoubleToI(Isolate *isolate, Zone *zone, Register result, DwVfpRegister double_input, StubCallMode stub_mode)
void Fcvtzs(const Register &rd, const VRegister &fn)
@ kDstLessThanSrcAndReverse
void LoadCodePointerField(Register destination, MemOperand field_operand)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void CallEphemeronKeyBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode)
void CmpTagged(const Register &r1, const Register &r2)
void Check(Condition cond, AbortReason reason)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void TestAndBranchIfAnySet(const Register ®, const uint64_t bit_pattern, Label *label)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand)
static int64_t CalculateTargetOffset(Address target, RelocInfo::Mode rmode, uint8_t *pc)
void CopySlots(int dst, Register src, Register slot_count)
void StoreSandboxedPointerField(Register value, MemOperand dst_field_operand)
void JumpIfCodeIsMarkedForDeoptimization(Register code, Register scratch, Label *if_marked_for_deoptimization)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void AssertZeroExtended(Register int32_register)
void Mvn(const Register &rd, uint64_t imm)
static bool IsNearCallOffset(int64_t offset)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure)
void Adr(const Register &rd, Label *label, AdrHint=kAdrNear)
void Ccmp(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void Sub(const Register &rd, const Register &rn, const Operand &operand)
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void I8x16BitMask(Register dst, VRegister src, VRegister temp=NoVReg)
void CompareInstanceTypeRange(Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
Operand MoveImmediateForShiftedOp(const Register &dst, int64_t imm, PreShiftImmMode mode)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void LoadElementsKindFromMap(Register result, Register map)
void ConditionalCompareMacro(const Register &rn, const Operand &operand, StatusFlags nzcv, Condition cond, ConditionalCompareOp op)
void LoadIsolateField(Register dst, IsolateFieldId id)
void Cbnz(const Register &rt, Label *label)
void MaybeRestoreRegisters(RegList registers)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Cbz(const Register &rt, Label *label)
void PopCalleeSavedRegisters()
void TryLoadOptimizedOsrCode(Register scratch_and_result, CodeKind min_opt_level, Register feedback_vector, FeedbackSlot slot, Label *on_result, Label::Distance distance)
void Abs(const Register &rd, const Register &rm, Label *is_not_representable=nullptr, Label *is_representable=nullptr)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void AssertSpAligned() NOOP_UNLESS_DEBUG_CODE
void AssertUndefinedOrAllocationSite(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void Movi16bitHelper(const VRegister &vd, uint64_t imm)
bool TryOneInstrMoveImmediate(const Register &dst, int64_t imm)
void StoreTwoTaggedFields(const Register &value, const MemOperand &dst_field_operand)
void SmiUntagField(Register dst, const MemOperand &src)
void PopAll(RegList registers)
void Movi64bitHelper(const VRegister &vd, uint64_t imm)
void DecompressTagged(const Register &destination, const MemOperand &field_operand)
Condition CheckSmi(Register src)
void StoreRootRelative(int32_t offset, Register value) final
MemOperand ReceiverOperand()
void LoadTaggedSignedField(const Register &destination, const MemOperand &field_operand)
void LoadMap(Register destination, Register object)
void AtomicStoreTaggedField(const Register &value, const Register &dst_base, const Register &dst_index, const Register &temp)
void AssertFPCRState(Register fpcr=NoReg) NOOP_UNLESS_DEBUG_CODE
void TailCallRuntime(Runtime::FunctionId fid)
void PushHelper(int count, int size, const CPURegister &src0, const CPURegister &src1, const CPURegister &src2, const CPURegister &src3)
static unsigned CountSetHalfWords(uint64_t imm, unsigned reg_size)
void Swap(Register srcdst0, Register srcdst1)
void JumpHelper(int64_t offset, RelocInfo::Mode rmode, Condition cond=al)
void LoadNativeContextSlot(Register dst, int index)
static CPURegList DefaultFPTmpList()
static const int kSmiShift
void LoadTaggedFieldWithoutDecompressing(const Register &destination, const MemOperand &field_operand)
void TailCallBuiltin(Builtin builtin, Condition cond=al)
void Br(const Register &xn)
void Switch(Register scratch, Register value, int case_value_base, Label **labels, int num_labels)
void AssertSmiOrHeapObjectInMainCompressionCage(Register object) NOOP_UNLESS_DEBUG_CODE
void DropArguments(Register count)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr intptr_t FlagsOffset()
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
static constexpr intptr_t GetAlignmentMaskForAssembler()
static constexpr int kAdrFarPatchableNNops
static constexpr int kAdrFarPatchableNInstrs
static constexpr Register Create(int code, int size)
static constexpr Register no_reg()
static constexpr bool IsCompressedEmbeddedObject(Mode mode)
static constexpr bool IsCodeTarget(Mode mode)
static constexpr bool IsEmbeddedObjectMode(Mode mode)
static constexpr bool IsReadOnly(RootIndex root_index)
static constexpr bool IsImmortalImmovable(RootIndex root_index)
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
static SlotDescriptor ForCodePointerSlot()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int32_t TypeToMarker(Type type)
static bool IsJavaScript(Type t)
static constexpr int OffsetOfElementAt(int index)
static constexpr VRegister no_reg()
static constexpr Register ObjectRegister()
static constexpr RegList ComputeSavedRegisters(Register object, Register slot_address=no_reg)
static constexpr Register SlotAddressRegister()
static constexpr Builtin GetRecordWriteBuiltin(SaveFPRegsMode fp_mode)
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_LEAPTIERING_BOOL
#define COMPRESS_POINTERS_BOOL
#define V8_ENABLE_SANDBOX_BOOL
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
ZoneVector< RpoNumber > & result
#define ASM_LOCATION_IN_ASSEMBLER(message)
#define ASM_LOCATION(message)
RegListBase< RegisterT > registers
InstructionOperand destination
constexpr unsigned CountTrailingZeros(T value)
V8_INLINE Dest bit_cast(Source const &source)
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
constexpr Tagged_t kNonJsReceiverMapLimit
V8_INLINE constexpr std::optional< RootIndex > UniqueMapOfInstanceType(InstanceType type)
constexpr Register no_reg
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr Register kRootRegister
constexpr int kCodePointerTableEntrySizeLog2
constexpr AddrMode PreIndex
uint32_t AddSubWithCarryOp
constexpr uint64_t kExternalPointerTagShift
constexpr int kTaggedSize
constexpr int64_t kXSignBit
bool IsNone(Tagged< FieldType > obj)
@ kUnsignedGreaterThanEqual
constexpr int kFPRegisterPassedArguments
constexpr int64_t kWRegMask
DwVfpRegister DoubleRegister
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
constexpr int64_t kWSignBit
constexpr uint64_t kExternalPointerPayloadMask
@ kUnknownIndirectPointerTag
static const unsigned kPrintfArgPatternBits
uint32_t ConditionalCompareOp
RegListBase< Register > RegList
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
constexpr bool CodeKindCanTierUp(CodeKind kind)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kCodePointerTableEntryCodeObjectOffset
constexpr int kTrustedPointerTableEntrySizeLog2
constexpr int kWRegSizeInBits
const unsigned kPrintfLength
const Address kWeakHeapObjectMask
constexpr Register kJavaScriptCallArgCountRegister
constexpr int kSystemPointerSizeLog2
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
TagRange< ExternalPointerTag > ExternalPointerTagRange
static const int kRegisterPassedArguments
const unsigned kPrintfMaxArgCount
MemOperand FieldMemOperand(Register object, int offset)
const Instr kImmExceptionIsPrintf
constexpr int kSystemPointerSize
const RegList kCallerSaved
constexpr int kXRegSizeInBitsLog2
const char * GetAbortReason(AbortReason reason)
static constexpr int kMaxCParameters
constexpr uint32_t kDebugZapValue
constexpr uint32_t kZapValue
constexpr bool SmiValuesAre31Bits()
Condition NegateCondition(Condition cond)
constexpr int kWRegSizeInBitsLog2
@ kDontDiscardForSameWReg
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr uint32_t kTrustedPointerHandleShift
constexpr uint32_t kCodePointerHandleShift
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr Register kJavaScriptCallCodeStartRegister
constexpr int kJSDispatchTableEntrySizeLog2
constexpr AddrMode PostIndex
constexpr Register kPtrComprCageBaseRegister
const intptr_t kSmiTagMask
constexpr int kXRegSizeInBits
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
constexpr uint8_t kInstrSize
constexpr int kQRegSizeInBits
constexpr uint64_t kTrustedPointerTableMarkBit
constexpr Register kCArgRegs[]
static int CountLeadingZeros(uint64_t value, int width)
constexpr bool is_uintn(int64_t x, unsigned n)
V8_EXPORT_PRIVATE bool AreSameSizeAndType(const CPURegister ®1, const CPURegister ®2=NoCPUReg, const CPURegister ®3=NoCPUReg, const CPURegister ®4=NoCPUReg, const CPURegister ®5=NoCPUReg, const CPURegister ®6=NoCPUReg, const CPURegister ®7=NoCPUReg, const CPURegister ®8=NoCPUReg)
@ kBranchTypeLastCondition
@ kBranchTypeFirstUsingReg
@ kBranchTypeFirstUsingBit
@ kBranchTypeFirstCondition
constexpr Register kJavaScriptCallDispatchHandleRegister
constexpr uint32_t kCodePointerHandleMarker
const uint32_t kClearedWeakHeapObjectLower32
@ kFirstStrongOrReadOnlyRoot
@ kLastStrongOrReadOnlyRoot
constexpr uint32_t kMaxUInt32
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr uint64_t kExternalPointerShiftedTagMask
static V8_INLINE constexpr bool ExternalPointerCanBeEmpty(ExternalPointerTagRange tag_range)
unsigned CalcLSPairDataSize(LoadStorePairOp op)
constexpr Register padreg
constexpr bool PointerCompressionIsEnabled()
constexpr int kDRegSizeInBits
#define DCHECK_LE(v1, v2)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr bool IsAligned(T value, U alignment)
#define V8_STATIC_ROOTS_BOOL
#define V8_UNLIKELY(condition)