5#ifndef V8_WASM_BASELINE_X64_LIFTOFF_ASSEMBLER_X64_INL_H_
6#define V8_WASM_BASELINE_X64_LIFTOFF_ASSEMBLER_X64_INL_H_
29#define RETURN_FALSE_IF_MISSING_CPU_FEATURE(name) \
30 if (!CpuFeatures::IsSupported(name)) return false; \
31 CpuFeatureScope feature(this, name);
40 "scratch registers must not be used as cache registers");
47 "scratch registers must not be used as cache registers");
61 Register offset_reg, uintptr_t offset_imm,
63 if (is_uint31(offset_imm)) {
64 int32_t offset_imm32 =
static_cast<int32_t
>(offset_imm);
65 return offset_reg ==
no_reg
67 :
Operand(addr, offset_reg, scale_factor, offset_imm32);
71 assm->MacroAssembler::Move(scratch, offset_imm);
72 if (offset_reg !=
no_reg) assm->addq(scratch, offset_reg);
73 return Operand(addr, scratch, scale_factor, 0);
92 assm->Movss(dst.
fp(), src);
95 assm->Movsd(dst.
fp(), src);
98 assm->Movdqu(dst.
fp(), src);
109 assm->
movw(dst, src.gp());
112 assm->
movl(dst, src.gp());
118 assm->
movq(dst, src.gp());
121 assm->Movss(dst, src.fp());
124 assm->Movsd(dst, src.fp());
127 assm->Movdqu(dst, src.fp());
138 }
else if (src.is_const()) {
139 if (src.kind() ==
kI32) {
142 assm->MacroAssembler::Move(dst,
static_cast<int64_t
>(src.i32_const()));
204 kLiftoffFrameSetupFunctionReg) ==
208 LoadConstant(LiftoffRegister(kLiftoffFrameSetupFunctionReg),
214 int stack_param_delta) {
216 pushq(Operand(rbp, 8));
217 pushq(Operand(rbp, 0));
220 const int slot_count = num_callee_stack_params + 2;
221 for (
int i = slot_count - 1;
i >= 0; --
i) {
227 leaq(rsp, Operand(rbp, -stack_param_delta * 8));
236 int offset, SafepointTableBuilder* safepoint_table_builder,
237 bool feedback_vector_slot,
size_t stack_param_slots) {
243 if (feedback_vector_slot) {
250 constexpr int kAvailableSpace = 64;
258 patching_assembler.sub_sp_32(frame_size);
287 if (frame_size <
v8_flags.stack_size * 1024) {
295 if (
v8_flags.experimental_wasm_growable_stacks) {
298 regs_to_save.set(WasmHandleStackOverflowDescriptor::FrameBaseRegister());
303 Immediate(frame_size));
304 movq(WasmHandleStackOverflowDescriptor::FrameBaseRegister(), rbp);
305 addq(WasmHandleStackOverflowDescriptor::FrameBaseRegister(),
306 Immediate(
static_cast<int32_t>(
310 safepoint_table_builder->DefineSafepoint(
this);
313 near_call(
static_cast<intptr_t
>(Builtin::kWasmStackOverflow),
316 safepoint_table_builder->DefineSafepoint(
this);
339 return kOSRTargetOffset;
352 const FreezeCacheState& frozen) {
361 WasmTrustedInstanceData::kTieringBudgetArrayOffset);
365 subl(Operand{budget_array,
offset}, Immediate(budget_used));
370 if (!
v8_flags.experimental_wasm_growable_stacks) {
373 Label done, call_runtime;
378 movq(old_fp.gp(), rbp);
416 switch (value.type().kind()) {
418 if (value.to_i32() == 0) {
421 movl(
reg.gp(), Immediate(value.to_i32()));
451 Operand src{instance,
offset};
485 uint32_t* protected_load_pc,
494 static_cast<uint32_t
>(offset_imm), scale_factor);
495 if (protected_load_pc) *protected_load_pc =
pc_offset();
500 int32_t offset_imm) {
506 int32_t offset_imm) {
508 static_cast<uint32_t
>(offset_imm));
512#ifdef V8_ENABLE_SANDBOX
513void LiftoffAssembler::LoadCodeEntrypointViaCodePointer(Register dst,
517 static_cast<uint32_t
>(offset_imm));
518 MacroAssembler::LoadCodeEntrypointViaCodePointer(dst, src_op,
525 int32_t offset_imm, Register src,
526 LiftoffRegList pinned,
527 uint32_t* protected_store_pc,
528 SkipWriteBarrier skip_write_barrier) {
531 static_cast<uint32_t
>(offset_imm));
532 if (protected_store_pc) *protected_store_pc =
pc_offset();
535 if (skip_write_barrier ||
v8_flags.disable_write_barriers)
return;
552 StubCallMode::kCallWasmRuntimeStub);
557 Register offset_reg, uintptr_t offset_imm,
558 LoadType type, LiftoffRegList ,
560 Load(dst, src_addr, offset_reg, offset_imm, type,
nullptr,
true, i64_offset);
564 Register offset_reg, uintptr_t offset_imm,
565 LoadType type, uint32_t* protected_load_pc,
566 bool ,
bool i64_offset,
574 if (protected_load_pc) *protected_load_pc =
pc_offset();
575 switch (type.value()) {
576 case LoadType::kI32Load8U:
577 case LoadType::kI64Load8U:
578 movzxbl(dst.gp(), src_op);
580 case LoadType::kI32Load8S:
583 case LoadType::kI64Load8S:
586 case LoadType::kI32Load16U:
587 case LoadType::kI64Load16U:
588 movzxwl(dst.gp(), src_op);
590 case LoadType::kI32Load16S:
593 case LoadType::kI64Load16S:
596 case LoadType::kI32Load:
597 case LoadType::kI64Load32U:
598 movl(dst.gp(), src_op);
600 case LoadType::kI64Load32S:
603 case LoadType::kI64Load:
604 movq(dst.gp(), src_op);
606 case LoadType::kF32Load:
607 Movss(dst.fp(), src_op);
609 case LoadType::kF32LoadF16: {
610 CpuFeatureScope f16c_scope(
this, F16C);
611 CpuFeatureScope avx2_scope(
this, AVX2);
612 vpbroadcastw(dst.fp(), src_op);
616 case LoadType::kF64Load:
617 Movsd(dst.fp(), src_op);
619 case LoadType::kS128Load:
620 Movdqu(dst.fp(), src_op);
626 uintptr_t offset_imm, LiftoffRegister src,
627 StoreType type, LiftoffRegList ,
628 uint32_t* protected_store_pc,
629 bool ,
bool i64_offset) {
632 if (protected_store_pc) *protected_store_pc =
pc_offset();
633 switch (type.value()) {
634 case StoreType::kI32Store8:
635 case StoreType::kI64Store8:
636 movb(dst_op, src.gp());
638 case StoreType::kI32Store16:
639 case StoreType::kI64Store16:
640 movw(dst_op, src.gp());
642 case StoreType::kI32Store:
643 case StoreType::kI64Store32:
644 movl(dst_op, src.gp());
646 case StoreType::kI64Store:
647 movq(dst_op, src.gp());
649 case StoreType::kF32Store:
650 Movss(dst_op, src.fp());
652 case StoreType::kF32StoreF16: {
653 CpuFeatureScope fscope(
this, F16C);
658 case StoreType::kF64Store:
659 Movsd(dst_op, src.fp());
661 case StoreType::kS128Store:
662 Movdqu(dst_op, src.fp());
668 uintptr_t offset_imm, LiftoffRegister src,
669 StoreType type, LiftoffRegList ,
678 switch (type.value()) {
679 case StoreType::kI32Store8:
680 case StoreType::kI64Store8:
681 xchgb(src_reg, dst_op);
683 case StoreType::kI32Store16:
684 case StoreType::kI64Store16:
685 xchgw(src_reg, dst_op);
687 case StoreType::kI32Store:
688 case StoreType::kI64Store32:
689 xchgl(src_reg, dst_op);
691 case StoreType::kI64Store:
692 xchgq(src_reg, dst_op);
700 uintptr_t offset_imm, LiftoffRegister value,
701 LiftoffRegister
result, StoreType type,
714 switch (type.value()) {
715 case StoreType::kI32Store8:
716 case StoreType::kI64Store8:
717 xaddb(dst_op, value.gp());
718 movzxbq(
result.gp(), value.gp());
720 case StoreType::kI32Store16:
721 case StoreType::kI64Store16:
722 xaddw(dst_op, value.gp());
723 movzxwq(
result.gp(), value.gp());
725 case StoreType::kI32Store:
726 case StoreType::kI64Store32:
727 xaddl(dst_op, value.gp());
732 case StoreType::kI64Store:
733 xaddq(dst_op, value.gp());
744 uintptr_t offset_imm, LiftoffRegister value,
745 LiftoffRegister
result, StoreType type,
748 LiftoffRegList dont_overwrite =
750 if (offset_reg !=
no_reg) dont_overwrite.
set(offset_reg);
752 if (dont_overwrite.has(value)) {
760 switch (type.value()) {
761 case StoreType::kI32Store8:
762 case StoreType::kI64Store8:
765 xaddb(dst_op, value.gp());
766 movzxbq(
result.gp(), value.gp());
768 case StoreType::kI32Store16:
769 case StoreType::kI64Store16:
772 xaddw(dst_op, value.gp());
773 movzxwq(
result.gp(), value.gp());
775 case StoreType::kI32Store:
776 case StoreType::kI64Store32:
779 xaddl(dst_op, value.gp());
784 case StoreType::kI64Store:
787 xaddq(dst_op, value.gp());
807 if (offset_reg !=
no_reg && !i64_offset)
__ AssertZeroExtended(offset_reg);
814 if (offset_reg !=
no_reg) pinned.
set(offset_reg);
815 __ ClearRegister(rax, {&dst_addr, &offset_reg, &value_reg}, pinned);
818 switch (type.value()) {
819 case StoreType::kI32Store8:
820 case StoreType::kI64Store8: {
823 __ movb(rax, dst_op);
832 case StoreType::kI32Store16:
833 case StoreType::kI64Store16: {
836 __ movw(rax, dst_op);
845 case StoreType::kI32Store:
846 case StoreType::kI64Store32: {
848 __ movl(rax, dst_op);
857 case StoreType::kI64Store: {
859 __ movq(rax, dst_op);
880 uintptr_t offset_imm, LiftoffRegister value,
881 LiftoffRegister
result, StoreType type,
884 offset_reg, offset_imm, value,
result, type, i64_offset);
888 uintptr_t offset_imm, LiftoffRegister value,
889 LiftoffRegister
result, StoreType type,
892 offset_reg, offset_imm, value,
result, type, i64_offset);
896 uintptr_t offset_imm, LiftoffRegister value,
897 LiftoffRegister
result, StoreType type,
900 offset_reg, offset_imm, value,
result, type, i64_offset);
904 uintptr_t offset_imm,
905 LiftoffRegister value,
906 LiftoffRegister
result, StoreType type,
918 switch (type.value()) {
919 case StoreType::kI32Store8:
920 case StoreType::kI64Store8:
921 xchgb(value.gp(), dst_op);
922 movzxbq(
result.gp(), value.gp());
924 case StoreType::kI32Store16:
925 case StoreType::kI64Store16:
926 xchgw(value.gp(), dst_op);
927 movzxwq(
result.gp(), value.gp());
929 case StoreType::kI32Store:
930 case StoreType::kI64Store32:
931 xchgl(value.gp(), dst_op);
936 case StoreType::kI64Store:
937 xchgq(value.gp(), dst_op);
948 Register dst_addr, Register offset_reg, uintptr_t offset_imm,
949 LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister
result,
950 StoreType type,
bool i64_offset) {
952 Register value_reg = new_value.gp();
956 LiftoffRegList pinned = LiftoffRegList{dst_addr, expected, value_reg};
957 if (offset_reg !=
no_reg) pinned.set(offset_reg);
958 ClearRegister(rax, {&dst_addr, &offset_reg, &value_reg}, pinned);
959 if (expected.gp() != rax) {
960 movq(rax, expected.gp());
966 switch (type.value()) {
967 case StoreType::kI32Store8:
968 case StoreType::kI64Store8: {
970 movzxbq(
result.gp(), rax);
973 case StoreType::kI32Store16:
974 case StoreType::kI64Store16: {
976 movzxwq(
result.gp(), rax);
979 case StoreType::kI32Store: {
980 cmpxchgl(dst_op, value_reg);
986 case StoreType::kI64Store32: {
987 cmpxchgl(dst_op, value_reg);
992 case StoreType::kI64Store: {
993 cmpxchgq(dst_op, value_reg);
1007 uint32_t caller_slot_idx,
1014 uint32_t caller_slot_idx,
1016 Register frame_pointer) {
1023 Operand src(rsp,
offset);
1086 Movss(dst,
reg.fp());
1089 Movsd(dst,
reg.fp());
1092 Movdqu(dst,
reg.fp());
1102 switch (value.type().kind()) {
1104 movl(dst, Immediate(value.to_i32()));
1107 if (is_int32(value.to_i64())) {
1109 movq(dst, Immediate(
static_cast<int32_t>(value.to_i64())));
1110 }
else if (is_uint32(value.to_i64())) {
1160 movl(rcx, Immediate(size / 4));
1187 leal(dst, Operand(lhs, imm));
1189 addl(dst, Immediate(imm));
1196 if (dst != lhs)
movl(dst, lhs);
1198 }
else if (lhs == rhs) {
1212 leal(dst, Operand(lhs, -imm));
1214 subl(dst, Immediate(imm));
1224 (assm->*op)(dst, lhs);
1226 if (dst != lhs) (assm->*mov)(dst, lhs);
1227 (assm->*op)(dst, rhs);
1235 if (dst != lhs) (assm->*mov)(dst, lhs);
1248template <
typename type, DivOrRem div_or_rem>
1251 Label* trap_div_unrepresentable) {
1252 constexpr bool needs_unrepresentable_check =
1253 std::is_signed<type>::value && div_or_rem == DivOrRem::kDiv;
1254 constexpr bool special_case_minus_1 =
1255 std::is_signed<type>::value && div_or_rem == DivOrRem::kRem;
1256 DCHECK_EQ(needs_unrepresentable_check, trap_div_unrepresentable !=
nullptr);
1258#define iop(name, ...) \
1260 if (sizeof(type) == 4) { \
1261 assm->name##l(__VA_ARGS__); \
1263 assm->name##q(__VA_ARGS__); \
1273 if (rhs == rax || rhs == rdx) {
1279 iop(test, rhs, rhs);
1280 assm->
j(zero, trap_div_by_zero);
1283 if (needs_unrepresentable_check) {
1290 assm->
j(overflow, trap_div_unrepresentable);
1291 assm->
bind(&do_div);
1292 }
else if (special_case_minus_1) {
1303 assm->
bind(&do_rem);
1308 if (lhs != rax)
iop(mov, rax, lhs);
1309 if (std::is_same<int32_t, type>::value) {
1312 }
else if (std::is_same<uint32_t, type>::value) {
1313 assm->xorl(rdx, rdx);
1315 }
else if (std::is_same<int64_t, type>::value) {
1319 assm->xorq(rdx, rdx);
1324 constexpr Register kResultReg = div_or_rem == DivOrRem::kDiv ? rax : rdx;
1325 if (dst != kResultReg) {
1326 iop(mov, dst, kResultReg);
1328 if (special_case_minus_1) assm->
bind(&done);
1333 Label* trap_div_by_zero,
1334 Label* trap_div_unrepresentable) {
1336 this, dst, lhs, rhs, trap_div_by_zero, trap_div_unrepresentable);
1340 Label* trap_div_by_zero) {
1342 this, dst, lhs, rhs, trap_div_by_zero,
nullptr);
1346 Label* trap_div_by_zero) {
1348 this, dst, lhs, rhs, trap_div_by_zero,
nullptr);
1352 Label* trap_div_by_zero) {
1354 this, dst, lhs, rhs, trap_div_by_zero,
nullptr);
1364 this, dst, lhs, imm);
1384 this, dst, lhs, imm);
1388template <ValueKind kind>
1395 if (amount != rcx) assm->
Move(rcx, amount,
kind);
1403 bool use_scratch =
false;
1404 if (amount != rcx) {
1413 if (dst != src) assm->
Move(dst, src,
kind);
1414 (assm->*emit_shift)(dst);
1424 &Assembler::shll_cl);
1429 if (dst != src)
movl(dst, src);
1430 shll(dst, Immediate(amount & 31));
1436 &Assembler::sarl_cl);
1441 if (dst != src)
movl(dst, src);
1442 sarl(dst, Immediate(amount & 31));
1448 &Assembler::shrl_cl);
1453 if (dst != src)
movl(dst, src);
1454 shrl(dst, Immediate(amount & 31));
1467 CpuFeatureScope scope(
this, POPCNT);
1473 LiftoffRegister rhs) {
1474 if (lhs.gp() != dst.gp()) {
1475 leaq(dst.gp(), Operand(lhs.gp(), rhs.gp(),
times_1, 0));
1477 addq(dst.gp(), rhs.gp());
1483 if (!is_int32(imm)) {
1485 if (lhs.gp() == dst.gp()) {
1490 }
else if (lhs.gp() == dst.gp()) {
1491 addq(dst.gp(), Immediate(
static_cast<int32_t>(imm)));
1493 leaq(dst.gp(), Operand(lhs.gp(),
static_cast<int32_t>(imm)));
1498 LiftoffRegister rhs) {
1499 if (lhs.gp() == rhs.gp()) {
1500 xorq(dst.gp(), dst.gp());
1501 }
else if (dst.gp() == rhs.gp()) {
1503 addq(dst.gp(), lhs.gp());
1505 if (dst.gp() != lhs.gp())
movq(dst.gp(), lhs.gp());
1506 subq(dst.gp(), rhs.gp());
1511 LiftoffRegister rhs) {
1513 this, dst.gp(), lhs.gp(), rhs.gp());
1521 imulq(dst.gp(), lhs.gp(), Immediate{imm});
1526 LiftoffRegister rhs,
1527 Label* trap_div_by_zero,
1528 Label* trap_div_unrepresentable) {
1530 this, dst.gp(), lhs.gp(), rhs.gp(), trap_div_by_zero,
1531 trap_div_unrepresentable);
1536 LiftoffRegister rhs,
1537 Label* trap_div_by_zero) {
1539 this, dst.gp(), lhs.gp(), rhs.gp(), trap_div_by_zero,
nullptr);
1544 LiftoffRegister rhs,
1545 Label* trap_div_by_zero) {
1547 this, dst.gp(), lhs.gp(), rhs.gp(), trap_div_by_zero,
nullptr);
1552 LiftoffRegister rhs,
1553 Label* trap_div_by_zero) {
1555 this, dst.gp(), lhs.gp(), rhs.gp(), trap_div_by_zero,
nullptr);
1560 LiftoffRegister rhs) {
1562 this, dst.gp(), lhs.gp(), rhs.gp());
1568 this, dst.gp(), lhs.gp(), imm);
1572 LiftoffRegister rhs) {
1574 this, dst.gp(), lhs.gp(), rhs.gp());
1580 this, dst.gp(), lhs.gp(), imm);
1584 LiftoffRegister rhs) {
1586 this, dst.gp(), lhs.gp(), rhs.gp());
1592 this, dst.gp(), lhs.gp(), imm);
1598 &Assembler::shlq_cl);
1603 if (dst.gp() != src.gp())
movq(dst.gp(), src.gp());
1604 shlq(dst.gp(), Immediate(amount & 63));
1610 &Assembler::sarq_cl);
1615 if (dst.gp() != src.gp())
movq(dst.gp(), src.gp());
1616 sarq(dst.gp(), Immediate(amount & 63));
1622 &Assembler::shrq_cl);
1627 if (dst != src)
movq(dst.gp(), src.gp());
1628 shrq(dst.gp(), Immediate(amount & 63));
1632 Lzcntq(dst.gp(), src.gp());
1636 Tzcntq(dst.gp(), src.gp());
1640 LiftoffRegister src) {
1642 CpuFeatureScope scope(
this, POPCNT);
1653 if (dst != src)
movl(dst, src);
1661 CpuFeatureScope scope(
this, AVX);
1663 }
else if (dst == rhs) {
1666 if (dst != lhs)
movss(dst, lhs);
1674 CpuFeatureScope scope(
this, AVX);
1676 }
else if (dst == rhs) {
1681 if (dst != lhs)
movss(dst, lhs);
1689 CpuFeatureScope scope(
this, AVX);
1691 }
else if (dst == rhs) {
1694 if (dst != lhs)
movss(dst, lhs);
1702 CpuFeatureScope scope(
this, AVX);
1704 }
else if (dst == rhs) {
1709 if (dst != lhs)
movss(dst, lhs);
1716template <
typename type>
1719 MinOrMax min_or_max) {
1721 Label lhs_below_rhs;
1722 Label lhs_above_rhs;
1725#define dop(name, ...) \
1727 if (sizeof(type) == 4) { \
1728 assm->name##s(__VA_ARGS__); \
1730 assm->name##d(__VA_ARGS__); \
1736 dop(Ucomis, lhs, rhs);
1752 assm->
bind(&is_nan);
1754 dop(Xorp, dst, dst);
1755 dop(Divs, dst, dst);
1758 assm->
bind(&lhs_below_rhs);
1759 DoubleRegister lhs_below_rhs_src = min_or_max == MinOrMax::kMin ? lhs : rhs;
1760 if (dst != lhs_below_rhs_src)
dop(Movs, dst, lhs_below_rhs_src);
1763 assm->
bind(&lhs_above_rhs);
1764 DoubleRegister lhs_above_rhs_src = min_or_max == MinOrMax::kMin ? rhs : lhs;
1765 if (dst != lhs_above_rhs_src)
dop(Movs, dst, lhs_above_rhs_src);
1785 static constexpr int kF32SignBit = 1 << 31;
1795 static constexpr uint32_t
kSignBit = uint32_t{1} << 31;
1806 static constexpr uint32_t
kSignBit = uint32_t{1} << 31;
1848 CpuFeatureScope scope(
this, AVX);
1849 vaddsd(dst, lhs, rhs);
1850 }
else if (dst == rhs) {
1853 if (dst != lhs)
movsd(dst, lhs);
1861 CpuFeatureScope scope(
this, AVX);
1862 vsubsd(dst, lhs, rhs);
1863 }
else if (dst == rhs) {
1868 if (dst != lhs)
movsd(dst, lhs);
1876 CpuFeatureScope scope(
this, AVX);
1877 vmulsd(dst, lhs, rhs);
1878 }
else if (dst == rhs) {
1881 if (dst != lhs)
movsd(dst, lhs);
1889 CpuFeatureScope scope(
this, AVX);
1890 vdivsd(dst, lhs, rhs);
1891 }
else if (dst == rhs) {
1896 if (dst != lhs)
movsd(dst, lhs);
1928 static constexpr uint64_t
kSignBit = uint64_t{1} << 63;
1939 static constexpr uint64_t
kSignBit = uint64_t{1} << 63;
1982template <
typename dst_type,
typename src_type>
1986 if (std::is_same<double, src_type>::value) {
1987 if (std::is_same<int32_t, dst_type>::value) {
1988 __ Cvttsd2si(dst, src);
1989 __ Cvtlsi2sd(converted_back, dst);
1990 }
else if (std::is_same<uint32_t, dst_type>::value) {
1991 __ Cvttsd2siq(dst, src);
1993 __ Cvtqsi2sd(converted_back, dst);
1994 }
else if (std::is_same<int64_t, dst_type>::value) {
1995 __ Cvttsd2siq(dst, src);
1996 __ Cvtqsi2sd(converted_back, dst);
2001 if (std::is_same<int32_t, dst_type>::value) {
2002 __ Cvttss2si(dst, src);
2003 __ Cvtlsi2ss(converted_back, dst);
2004 }
else if (std::is_same<uint32_t, dst_type>::value) {
2005 __ Cvttss2siq(dst, src);
2007 __ Cvtqsi2ss(converted_back, dst);
2008 }
else if (std::is_same<int64_t, dst_type>::value) {
2009 __ Cvttss2siq(dst, src);
2010 __ Cvtqsi2ss(converted_back, dst);
2017template <
typename dst_type,
typename src_type>
2029 if (std::is_same<double, src_type>::value) {
2034 ConvertFloatToIntAndBack<dst_type, src_type>(assm, dst, rounded,
2036 if (std::is_same<double, src_type>::value) {
2037 __ Ucomisd(converted_back, rounded);
2039 __ Ucomiss(converted_back, rounded);
2048template <
typename dst_type,
typename src_type>
2055 CpuFeatureScope feature(assm, SSE4_1);
2065 if (std::is_same<double, src_type>::value) {
2071 ConvertFloatToIntAndBack<dst_type, src_type>(assm, dst, rounded,
2073 if (std::is_same<double, src_type>::value) {
2074 __ Ucomisd(converted_back, rounded);
2076 __ Ucomiss(converted_back, rounded);
2088 __ xorpd(zero_reg, zero_reg);
2091 if (std::is_same<double, src_type>::value) {
2092 __ Ucomisd(src, zero_reg);
2094 __ Ucomiss(src, zero_reg);
2097 if (std::is_same<int32_t, dst_type>::value ||
2098 std::is_same<uint32_t, dst_type>::value) {
2101 Immediate(
static_cast<int32_t>(std::numeric_limits<dst_type>::min())));
2102 }
else if (std::is_same<int64_t, dst_type>::value) {
2103 __ movq(dst, Immediate64(std::numeric_limits<dst_type>::min()));
2109 __ bind(&src_positive);
2110 if (std::is_same<int32_t, dst_type>::value ||
2111 std::is_same<uint32_t, dst_type>::value) {
2114 Immediate(
static_cast<int32_t>(std::numeric_limits<dst_type>::max())));
2115 }
else if (std::is_same<int64_t, dst_type>::value) {
2116 __ movq(dst, Immediate64(std::numeric_limits<dst_type>::max()));
2124template <
typename src_type>
2139 __ xorpd(zero_reg, zero_reg);
2140 if (std::is_same<double, src_type>::value) {
2141 __ Ucomisd(src, zero_reg);
2143 __ Ucomiss(src, zero_reg);
2148 if (std::is_same<double, src_type>::value) {
2149 __ Cvttsd2uiq(dst, src, &overflow);
2151 __ Cvttss2uiq(dst, src, &overflow);
2155 __ bind(&neg_or_nan);
2156 __ movq(dst, zero_reg);
2160 __ movq(dst,
Immediate64(std::numeric_limits<uint64_t>::max()));
2167 LiftoffRegister dst,
2170 case kExprI32ConvertI64:
2171 movl(dst.gp(), src.gp());
2173 case kExprI32SConvertF32:
2177 case kExprI32UConvertF32:
2181 case kExprI32SConvertF64:
2185 case kExprI32UConvertF64:
2189 case kExprI32SConvertSatF32:
2193 case kExprI32UConvertSatF32:
2197 case kExprI32SConvertSatF64:
2201 case kExprI32UConvertSatF64:
2205 case kExprI32ReinterpretF32:
2206 Movd(dst.gp(), src.fp());
2208 case kExprI64SConvertI32:
2211 case kExprI64SConvertF32:
2215 case kExprI64UConvertF32: {
2220 case kExprI64SConvertF64:
2224 case kExprI64UConvertF64: {
2229 case kExprI64SConvertSatF32:
2233 case kExprI64UConvertSatF32: {
2237 case kExprI64SConvertSatF64:
2241 case kExprI64UConvertSatF64: {
2245 case kExprI64UConvertI32:
2248 case kExprI64ReinterpretF64:
2249 Movq(dst.gp(), src.fp());
2251 case kExprF32SConvertI32:
2254 case kExprF32UConvertI32:
2258 case kExprF32SConvertI64:
2261 case kExprF32UConvertI64:
2264 case kExprF32ConvertF64:
2267 case kExprF32ReinterpretI32:
2268 Movd(dst.fp(), src.gp());
2270 case kExprF64SConvertI32:
2273 case kExprF64UConvertI32:
2277 case kExprF64SConvertI64:
2280 case kExprF64UConvertI64:
2283 case kExprF64ConvertF32:
2286 case kExprF64ReinterpretI64:
2287 Movq(dst.fp(), src.gp());
2304 LiftoffRegister src) {
2309 LiftoffRegister src) {
2314 LiftoffRegister src) {
2325 const FreezeCacheState& frozen) {
2334#if defined(V8_COMPRESS_POINTERS)
2358 Register lhs,
int imm,
2359 const FreezeCacheState& frozen) {
2360 cmpl(lhs, Immediate(imm));
2365 Register lhs, int32_t imm,
2366 const FreezeCacheState& frozen) {
2367 cmpq(lhs, Immediate(imm));
2378 Register lhs, Register rhs) {
2385 testq(src.gp(), src.gp());
2391 LiftoffRegister lhs,
2392 LiftoffRegister rhs) {
2393 cmpq(lhs.gp(), rhs.gp());
2406 (assm->*cmp_op)(lhs, rhs);
2411 assm->movl(dst, Immediate(1));
2413 assm->xorl(dst, dst);
2416 assm->bind(¬_nan);
2418 assm->setcc(cond, dst);
2419 assm->movzxbl(dst, dst);
2439 LiftoffRegister true_value,
2440 LiftoffRegister false_value,
2447 if (dst == false_value) {
2450 if (dst != true_value)
movl(dst.gp(), true_value.gp());
2451 cmovl(zero, dst.gp(), false_value.gp());
2454 if (dst == false_value) {
2457 if (dst != true_value)
movq(dst.gp(), true_value.gp());
2458 cmovq(zero, dst.gp(), false_value.gp());
2467 const FreezeCacheState& frozen) {
2475template <
void (Assembler::*avx_op)(XMMRegister, XMMRegister, XMMRegister),
2476 void (Assembler::*sse_op)(XMMRegister, XMMRegister)>
2478 LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs,
2479 LiftoffRegister rhs, std::optional<CpuFeature> feature = std::nullopt) {
2481 CpuFeatureScope scope(assm, AVX);
2482 (assm->*avx_op)(dst.fp(), lhs.fp(), rhs.fp());
2486 std::optional<CpuFeatureScope> sse_scope;
2487 if (feature.has_value()) sse_scope.emplace(assm, *feature);
2489 if (dst.fp() == rhs.fp()) {
2490 (assm->*sse_op)(dst.fp(), lhs.fp());
2492 if (dst.fp() != lhs.fp()) (assm->movaps)(dst.fp(), lhs.fp());
2493 (assm->*sse_op)(dst.fp(), rhs.fp());
2497template <
void (Assembler::*avx_op)(XMMRegister, XMMRegister, XMMRegister),
2498 void (Assembler::*sse_op)(XMMRegister, XMMRegister)>
2500 LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister lhs,
2501 LiftoffRegister rhs, std::optional<CpuFeature> feature = std::nullopt) {
2503 CpuFeatureScope scope(assm, AVX);
2504 (assm->*avx_op)(dst.fp(), lhs.fp(), rhs.fp());
2508 std::optional<CpuFeatureScope> sse_scope;
2509 if (feature.has_value()) sse_scope.emplace(assm, *feature);
2511 if (dst.fp() == rhs.fp()) {
2513 assm->movaps(dst.fp(), lhs.fp());
2516 if (dst.fp() != lhs.fp()) assm->movaps(dst.fp(), lhs.fp());
2517 (assm->*sse_op)(dst.fp(), rhs.fp());
2521template <
void (Assembler::*avx_op)(XMMRegister, XMMRegister, XMMRegister),
2522 void (Assembler::*sse_op)(XMMRegister, XMMRegister), uint8_t width>
2524 LiftoffRegister operand, LiftoffRegister count) {
2525 constexpr int mask = (1 << width) - 1;
2530 CpuFeatureScope scope(assm, AVX);
2533 if (dst.fp() != operand.fp()) assm->movaps(dst.fp(), operand.fp());
2538template <
void (Assembler::*avx_op)(XMMRegister, XMMRegister, uint8_t),
2539 void (Assembler::*sse_op)(XMMRegister, uint8_t), uint8_t width>
2541 LiftoffRegister operand, int32_t count) {
2542 constexpr int mask = (1 << width) - 1;
2543 uint8_t shift =
static_cast<uint8_t
>(count &
mask);
2545 CpuFeatureScope scope(assm, AVX);
2546 (assm->*avx_op)(dst.fp(), operand.fp(), shift);
2548 if (dst.fp() != operand.fp()) assm->movaps(dst.fp(), operand.fp());
2549 (assm->*sse_op)(dst.fp(), shift);
2553inline void EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst,
2554 LiftoffRegister src) {
2555 assm->xorq(dst.gp(), dst.gp());
2556 assm->Ptest(src.fp(), src.fp());
2560template <
void (SharedMacroAssemblerBase::*pcmp)(XMMRegister, XMMRegister)>
2561inline void EmitAllTrue(LiftoffAssembler* assm, LiftoffRegister dst,
2562 LiftoffRegister src,
2563 std::optional<CpuFeature> feature = std::nullopt) {
2564 std::optional<CpuFeatureScope> sse_scope;
2565 if (feature.has_value()) sse_scope.emplace(assm, *feature);
2568 assm->xorq(dst.gp(), dst.gp());
2569 assm->Pxor(tmp, tmp);
2570 (assm->*pcmp)(tmp, src.fp());
2571 assm->Ptest(tmp, tmp);
2572 assm->setcc(
equal, dst.gp());
2578 Register offset_reg, uintptr_t offset_imm,
2581 uint32_t* protected_load_pc,
2585 MachineType memtype = type.mem_type();
2588 Pmovsxbw(dst.fp(), src_op);
2590 Pmovzxbw(dst.fp(), src_op);
2592 Pmovsxwd(dst.fp(), src_op);
2594 Pmovzxwd(dst.fp(), src_op);
2596 Pmovsxdq(dst.fp(), src_op);
2598 Pmovzxdq(dst.fp(), src_op);
2602 Movss(dst.fp(), src_op);
2605 Movsd(dst.fp(), src_op);
2616 Movddup(dst.fp(), src_op);
2622 Register addr, Register offset_reg,
2623 uintptr_t offset_imm, LoadType type,
2624 uint8_t laneidx, uint32_t* protected_load_pc,
2629 MachineType mem_type = type.mem_type();
2631 Pinsrb(dst.fp(), src.fp(), src_op, laneidx, protected_load_pc);
2633 Pinsrw(dst.fp(), src.fp(), src_op, laneidx, protected_load_pc);
2635 Pinsrd(dst.fp(), src.fp(), src_op, laneidx, protected_load_pc);
2638 Pinsrq(dst.fp(), src.fp(), src_op, laneidx, protected_load_pc);
2643 uintptr_t offset_imm, LiftoffRegister src,
2644 StoreType type, uint8_t lane,
2645 uint32_t* protected_store_pc,
2649 if (protected_store_pc) *protected_store_pc =
pc_offset();
2652 Pextrb(dst_op, src.fp(), lane);
2654 Pextrw(dst_op, src.fp(), lane);
2664 LiftoffRegister lhs,
2665 LiftoffRegister rhs,
2666 const uint8_t shuffle[16],
2678 uint64_t mask1[2] = {};
2679 for (
int i = 15;
i >= 0;
i--) {
2680 uint8_t lane = shuffle[
i];
2688 uint64_t mask2[2] = {};
2689 for (
int i = 15;
i >= 0;
i--) {
2690 uint8_t lane = shuffle[
i];
2702 LiftoffRegister lhs,
2703 LiftoffRegister rhs) {
2709 LiftoffRegister lhs,
2710 LiftoffRegister rhs) {
2716 LiftoffRegister src) {
2717 Cvttps2dq(dst.fp(), src.fp());
2721 LiftoffRegister src) {
2726 LiftoffRegister dst, LiftoffRegister src) {
2727 Cvttpd2dq(dst.fp(), src.fp());
2731 LiftoffRegister dst, LiftoffRegister src) {
2736 LiftoffRegister src1,
2737 LiftoffRegister src2,
2738 LiftoffRegister
mask,
2742 if (lane_width == 8) {
2744 }
else if (lane_width == 32) {
2746 }
else if (lane_width == 64) {
2754 LiftoffRegister src) {
2760 LiftoffRegister src) {
2765 LiftoffRegister src) {
2770 LiftoffRegister src) {
2771 Movd(dst.fp(), src.gp());
2772 Pshufd(dst.fp(), dst.fp(),
static_cast<uint8_t
>(0));
2776 LiftoffRegister src) {
2777 Movq(dst.fp(), src.gp());
2778 Movddup(dst.fp(), dst.fp());
2782 LiftoffRegister src) {
2787 LiftoffRegister src) {
2788 Movddup(dst.fp(), src.fp());
2792 LiftoffRegister rhs) {
2794 this, dst, lhs, rhs);
2798 LiftoffRegister rhs) {
2800 this, dst, lhs, rhs);
2806 LiftoffRegister rhs) {
2808 &Assembler::pcmpgtb>(
this, dst, lhs,
2813 LiftoffRegister rhs) {
2820 this, dst, lhs, rhs, SSE4_1);
2821 Pcmpeqb(dst.fp(), ref);
2827 LiftoffRegister rhs) {
2834 this, dst, lhs, rhs, SSE4_1);
2835 Pcmpeqb(dst.fp(), ref);
2839 LiftoffRegister rhs) {
2846 this, dst, lhs, rhs);
2847 Pcmpeqb(dst.fp(), ref);
2851 LiftoffRegister rhs) {
2853 this, dst, lhs, rhs);
2857 LiftoffRegister rhs) {
2859 this, dst, lhs, rhs);
2865 LiftoffRegister rhs) {
2867 &Assembler::pcmpgtw>(
this, dst, lhs,
2872 LiftoffRegister rhs) {
2879 this, dst, lhs, rhs, SSE4_1);
2880 Pcmpeqw(dst.fp(), ref);
2886 LiftoffRegister rhs) {
2893 this, dst, lhs, rhs);
2894 Pcmpeqw(dst.fp(), ref);
2898 LiftoffRegister rhs) {
2905 this, dst, lhs, rhs, SSE4_1);
2906 Pcmpeqw(dst.fp(), ref);
2910 LiftoffRegister rhs) {
2912 this, dst, lhs, rhs);
2916 LiftoffRegister rhs) {
2918 this, dst, lhs, rhs);
2924 LiftoffRegister rhs) {
2926 &Assembler::pcmpgtd>(
this, dst, lhs,
2931 LiftoffRegister rhs) {
2938 this, dst, lhs, rhs, SSE4_1);
2939 Pcmpeqd(dst.fp(), ref);
2945 LiftoffRegister rhs) {
2952 this, dst, lhs, rhs, SSE4_1);
2953 Pcmpeqd(dst.fp(), ref);
2957 LiftoffRegister rhs) {
2964 this, dst, lhs, rhs, SSE4_1);
2965 Pcmpeqd(dst.fp(), ref);
2969 LiftoffRegister rhs) {
2971 this, dst, lhs, rhs, SSE4_1);
2975 LiftoffRegister rhs) {
2977 this, dst, lhs, rhs, SSE4_1);
2983 LiftoffRegister rhs) {
2990 if (dst == lhs || dst == rhs) {
3001 LiftoffRegister rhs) {
3017 if (dst == lhs || dst == rhs) {
3028 LiftoffRegister rhs) {
3030 this, dst, lhs, rhs);
3034 LiftoffRegister rhs) {
3036 &Assembler::cmpneqps>(
this, dst, lhs, rhs);
3040 LiftoffRegister rhs) {
3042 &Assembler::cmpltps>(
this, dst, lhs,
3047 LiftoffRegister rhs) {
3049 &Assembler::cmpleps>(
this, dst, lhs,
3054 LiftoffRegister rhs) {
3056 this, dst, lhs, rhs);
3060 LiftoffRegister rhs) {
3062 &Assembler::cmpneqpd>(
this, dst, lhs, rhs);
3066 LiftoffRegister rhs) {
3068 &Assembler::cmpltpd>(
this, dst, lhs,
3073 LiftoffRegister rhs) {
3075 &Assembler::cmplepd>(
this, dst, lhs,
3080 const uint8_t imms[16]) {
3082 memcpy(vals, imms,
sizeof(vals));
3091 LiftoffRegister rhs) {
3093 this, dst, lhs, rhs);
3097 LiftoffRegister rhs) {
3099 this, dst, lhs, rhs);
3103 LiftoffRegister rhs) {
3105 this, dst, lhs, rhs);
3109 LiftoffRegister src1,
3110 LiftoffRegister src2,
3111 LiftoffRegister
mask) {
3124 LiftoffRegister src) {
3125 if (dst.fp() == src.fp()) {
3129 Pxor(dst.fp(), dst.fp());
3130 Psubb(dst.fp(), src.fp());
3135 LiftoffRegister src) {
3140 LiftoffRegister src) {
3145 LiftoffRegister src) {
3146 Pmovmskb(dst.gp(), src.fp());
3150 LiftoffRegister rhs) {
3161 LiftoffRegister lhs,
3162 LiftoffRegister rhs) {
3168 LiftoffRegister lhs, int32_t rhs) {
3173 LiftoffRegister lhs,
3174 LiftoffRegister rhs) {
3180 LiftoffRegister lhs, int32_t rhs) {
3185 LiftoffRegister rhs) {
3187 this, dst, lhs, rhs);
3191 LiftoffRegister lhs,
3192 LiftoffRegister rhs) {
3194 this, dst, lhs, rhs);
3198 LiftoffRegister lhs,
3199 LiftoffRegister rhs) {
3201 this, dst, lhs, rhs);
3205 LiftoffRegister rhs) {
3207 this, dst, lhs, rhs);
3211 LiftoffRegister lhs,
3212 LiftoffRegister rhs) {
3214 this, dst, lhs, rhs);
3218 LiftoffRegister lhs,
3219 LiftoffRegister rhs) {
3221 &Assembler::psubusb>(
this, dst, lhs,
3226 LiftoffRegister lhs,
3227 LiftoffRegister rhs) {
3229 this, dst, lhs, rhs, SSE4_1);
3233 LiftoffRegister lhs,
3234 LiftoffRegister rhs) {
3236 this, dst, lhs, rhs);
3240 LiftoffRegister lhs,
3241 LiftoffRegister rhs) {
3243 this, dst, lhs, rhs, SSE4_1);
3247 LiftoffRegister lhs,
3248 LiftoffRegister rhs) {
3250 this, dst, lhs, rhs);
3254 LiftoffRegister src) {
3255 if (dst.fp() == src.fp()) {
3259 Pxor(dst.fp(), dst.fp());
3260 Psubw(dst.fp(), src.fp());
3265 LiftoffRegister src) {
3270 LiftoffRegister src) {
3272 Packsswb(tmp, src.fp());
3273 Pmovmskb(dst.gp(), tmp);
3274 shrq(dst.gp(), Immediate(8));
3278 LiftoffRegister rhs) {
3286 this, dst, lhs, rhs);
3290 LiftoffRegister lhs,
3291 LiftoffRegister rhs) {
3297 LiftoffRegister lhs, int32_t rhs) {
3299 this, dst, lhs, rhs);
3303 LiftoffRegister lhs,
3304 LiftoffRegister rhs) {
3310 LiftoffRegister lhs, int32_t rhs) {
3312 this, dst, lhs, rhs);
3316 LiftoffRegister rhs) {
3318 this, dst, lhs, rhs);
3322 LiftoffRegister lhs,
3323 LiftoffRegister rhs) {
3325 this, dst, lhs, rhs);
3329 LiftoffRegister lhs,
3330 LiftoffRegister rhs) {
3332 this, dst, lhs, rhs);
3336 LiftoffRegister rhs) {
3338 this, dst, lhs, rhs);
3342 LiftoffRegister lhs,
3343 LiftoffRegister rhs) {
3345 this, dst, lhs, rhs);
3349 LiftoffRegister lhs,
3350 LiftoffRegister rhs) {
3352 &Assembler::psubusw>(
this, dst, lhs,
3357 LiftoffRegister rhs) {
3359 this, dst, lhs, rhs);
3363 LiftoffRegister lhs,
3364 LiftoffRegister rhs) {
3366 this, dst, lhs, rhs);
3370 LiftoffRegister lhs,
3371 LiftoffRegister rhs) {
3373 this, dst, lhs, rhs, SSE4_1);
3377 LiftoffRegister lhs,
3378 LiftoffRegister rhs) {
3380 this, dst, lhs, rhs);
3384 LiftoffRegister lhs,
3385 LiftoffRegister rhs) {
3387 this, dst, lhs, rhs, SSE4_1);
3391 LiftoffRegister src) {
3397 LiftoffRegister src) {
3402 LiftoffRegister src1,
3403 LiftoffRegister src2) {
3409 LiftoffRegister src1,
3410 LiftoffRegister src2) {
3416 LiftoffRegister src1,
3417 LiftoffRegister src2) {
3422 LiftoffRegister src1,
3423 LiftoffRegister src2) {
3428 LiftoffRegister src1,
3429 LiftoffRegister src2) {
3434 LiftoffRegister src1,
3435 LiftoffRegister src2) {
3437 Pmulhrsw(dst.fp(), src1.fp(), src2.fp());
3439 movdqa(dst.fp(), src1.fp());
3440 pmulhrsw(dst.fp(), src2.fp());
3445 LiftoffRegister lhs,
3446 LiftoffRegister rhs) {
3451 LiftoffRegister lhs,
3452 LiftoffRegister rhs,
3453 LiftoffRegister acc) {
3460 LiftoffRegister tmp1 =
3462 LiftoffRegister tmp2 =
3470 LiftoffRegister src) {
3471 if (dst.fp() == src.fp()) {
3475 Pxor(dst.fp(), dst.fp());
3476 Psubd(dst.fp(), src.fp());
3481 LiftoffRegister src) {
3486 LiftoffRegister src) {
3487 Movmskps(dst.gp(), src.fp());
3491 LiftoffRegister rhs) {
3499 this, dst, lhs, rhs);
3503 LiftoffRegister lhs,
3504 LiftoffRegister rhs) {
3510 LiftoffRegister lhs, int32_t rhs) {
3512 this, dst, lhs, rhs);
3516 LiftoffRegister lhs,
3517 LiftoffRegister rhs) {
3523 LiftoffRegister lhs, int32_t rhs) {
3525 this, dst, lhs, rhs);
3529 LiftoffRegister rhs) {
3531 this, dst, lhs, rhs);
3535 LiftoffRegister rhs) {
3537 this, dst, lhs, rhs);
3541 LiftoffRegister rhs) {
3543 this, dst, lhs, rhs, SSE4_1);
3547 LiftoffRegister lhs,
3548 LiftoffRegister rhs) {
3550 this, dst, lhs, rhs, SSE4_1);
3554 LiftoffRegister lhs,
3555 LiftoffRegister rhs) {
3557 this, dst, lhs, rhs, SSE4_1);
3561 LiftoffRegister lhs,
3562 LiftoffRegister rhs) {
3564 this, dst, lhs, rhs, SSE4_1);
3568 LiftoffRegister lhs,
3569 LiftoffRegister rhs) {
3571 this, dst, lhs, rhs, SSE4_1);
3575 LiftoffRegister lhs,
3576 LiftoffRegister rhs) {
3578 this, dst, lhs, rhs);
3582 LiftoffRegister src) {
3587 LiftoffRegister src) {
3595 XMMRegister src1, XMMRegister src2,
bool low,
3600 }
else if (dst != src2) {
3602 assm->movaps(dst, src1);
3607 assm->movaps(dst, src2);
3614 LiftoffRegister src1,
3615 LiftoffRegister src2) {
3621 LiftoffRegister src1,
3622 LiftoffRegister src2) {
3628 LiftoffRegister src1,
3629 LiftoffRegister src2) {
3636 LiftoffRegister src1,
3637 LiftoffRegister src2) {
3644 LiftoffRegister src) {
3649 LiftoffRegister src) {
3654 LiftoffRegister rhs) {
3662 this, dst, lhs, rhs);
3666 LiftoffRegister lhs,
3667 LiftoffRegister rhs) {
3673 LiftoffRegister lhs, int32_t rhs) {
3678 LiftoffRegister lhs,
3679 LiftoffRegister rhs) {
3685 LiftoffRegister lhs, int32_t rhs) {
3687 this, dst, lhs, rhs);
3691 LiftoffRegister rhs) {
3693 this, dst, lhs, rhs);
3697 LiftoffRegister rhs) {
3699 this, dst, lhs, rhs);
3703 LiftoffRegister rhs) {
3705 LiftoffRegister tmp1 =
3707 LiftoffRegister tmp2 =
3709 I64x2Mul(dst.fp(), lhs.fp(), rhs.fp(), tmp1.fp(), tmp2.fp());
3713 LiftoffRegister src1,
3714 LiftoffRegister src2) {
3720 LiftoffRegister src1,
3721 LiftoffRegister src2) {
3727 LiftoffRegister src1,
3728 LiftoffRegister src2) {
3734 LiftoffRegister src1,
3735 LiftoffRegister src2) {
3741 LiftoffRegister src) {
3742 Movmskpd(dst.gp(), src.fp());
3746 LiftoffRegister src) {
3747 Pmovsxdq(dst.fp(), src.fp());
3751 LiftoffRegister src) {
3756 LiftoffRegister src) {
3757 Pmovzxdq(dst.fp(), src.fp());
3761 LiftoffRegister src) {
3766 LiftoffRegister src) {
3771 LiftoffRegister src) {
3776 LiftoffRegister src) {
3777 Sqrtps(dst.fp(), src.fp());
3781 LiftoffRegister src) {
3783 Roundps(dst.fp(), src.fp(),
kRoundUp);
3788 LiftoffRegister src) {
3795 LiftoffRegister src) {
3802 LiftoffRegister src) {
3809 LiftoffRegister rhs) {
3812 this, dst, lhs, rhs);
3816 LiftoffRegister rhs) {
3818 this, dst, lhs, rhs);
3822 LiftoffRegister rhs) {
3824 this, dst, lhs, rhs);
3828 LiftoffRegister rhs) {
3830 this, dst, lhs, rhs);
3834 LiftoffRegister rhs) {
3839 LiftoffRegister rhs) {
3844 LiftoffRegister rhs) {
3847 this, dst, rhs, lhs);
3851 LiftoffRegister rhs) {
3854 this, dst, rhs, lhs);
3858 LiftoffRegister lhs,
3859 LiftoffRegister rhs) {
3861 this, dst, lhs, rhs);
3865 LiftoffRegister lhs,
3866 LiftoffRegister rhs) {
3868 this, dst, lhs, rhs);
3872 LiftoffRegister src) {
3877 LiftoffRegister src) {
3882 LiftoffRegister src) {
3883 Sqrtpd(dst.fp(), src.fp());
3887 LiftoffRegister src) {
3889 Roundpd(dst.fp(), src.fp(),
kRoundUp);
3894 LiftoffRegister src) {
3901 LiftoffRegister src) {
3908 LiftoffRegister src) {
3915 LiftoffRegister rhs) {
3917 this, dst, lhs, rhs);
3921 LiftoffRegister rhs) {
3923 this, dst, lhs, rhs);
3927 LiftoffRegister rhs) {
3929 this, dst, lhs, rhs);
3933 LiftoffRegister rhs) {
3935 this, dst, lhs, rhs);
3939 LiftoffRegister rhs) {
3944 LiftoffRegister rhs) {
3949 LiftoffRegister lhs,
3950 LiftoffRegister rhs) {
3952 this, dst, lhs, rhs);
3956 LiftoffRegister lhs,
3957 LiftoffRegister rhs) {
3959 this, dst, lhs, rhs);
3963 LiftoffRegister rhs) {
3966 this, dst, rhs, lhs);
3970 LiftoffRegister rhs) {
3973 this, dst, rhs, lhs);
3977 LiftoffRegister src) {
3978 Cvtdq2pd(dst.fp(), src.fp());
3982 LiftoffRegister src) {
3987 LiftoffRegister src) {
3988 Cvtps2pd(dst.fp(), src.fp());
3992 LiftoffRegister src) {
3997 LiftoffRegister src) {
4003 LiftoffRegister src) {
4004 Cvtdq2ps(dst.fp(), src.fp());
4008 LiftoffRegister src) {
4012 CpuFeatureScope scope(
this, AVX);
4015 if (dst.fp() != src.fp())
movaps(dst.fp(), src.fp());
4019 Psrld(dst.fp(), uint8_t{1});
4020 Cvtdq2ps(dst.fp(), dst.fp());
4021 Addps(dst.fp(), dst.fp());
4026 LiftoffRegister src) {
4027 Cvtpd2ps(dst.fp(), src.fp());
4031 LiftoffRegister lhs,
4032 LiftoffRegister rhs) {
4034 &Assembler::packsswb>(
this, dst, lhs,
4039 LiftoffRegister lhs,
4040 LiftoffRegister rhs) {
4042 &Assembler::packuswb>(
this, dst, lhs,
4047 LiftoffRegister lhs,
4048 LiftoffRegister rhs) {
4050 &Assembler::packssdw>(
this, dst, lhs,
4055 LiftoffRegister lhs,
4056 LiftoffRegister rhs) {
4058 &Assembler::packusdw>(
this, dst, lhs,
4063 LiftoffRegister src) {
4064 Pmovsxbw(dst.fp(), src.fp());
4068 LiftoffRegister src) {
4073 LiftoffRegister src) {
4074 Pmovzxbw(dst.fp(), src.fp());
4078 LiftoffRegister src) {
4083 LiftoffRegister src) {
4084 Pmovsxwd(dst.fp(), src.fp());
4088 LiftoffRegister src) {
4093 LiftoffRegister src) {
4094 Pmovzxwd(dst.fp(), src.fp());
4098 LiftoffRegister src) {
4103 LiftoffRegister src) {
4109 LiftoffRegister src) {
4115 LiftoffRegister lhs,
4116 LiftoffRegister rhs) {
4118 this, dst, rhs, lhs);
4122 LiftoffRegister lhs,
4123 LiftoffRegister rhs) {
4125 this, dst, lhs, rhs);
4129 LiftoffRegister lhs,
4130 LiftoffRegister rhs) {
4132 this, dst, lhs, rhs);
4136 LiftoffRegister src) {
4137 Pabsb(dst.fp(), src.fp());
4141 LiftoffRegister src) {
4142 Pabsw(dst.fp(), src.fp());
4146 LiftoffRegister src) {
4147 Pabsd(dst.fp(), src.fp());
4151 LiftoffRegister src) {
4156 LiftoffRegister lhs,
4157 uint8_t imm_lane_idx) {
4158 Pextrb(dst.gp(), lhs.fp(), imm_lane_idx);
4163 LiftoffRegister lhs,
4164 uint8_t imm_lane_idx) {
4165 Pextrb(dst.gp(), lhs.fp(), imm_lane_idx);
4169 LiftoffRegister lhs,
4170 uint8_t imm_lane_idx) {
4171 Pextrw(dst.gp(), lhs.fp(), imm_lane_idx);
4176 LiftoffRegister lhs,
4177 uint8_t imm_lane_idx) {
4178 Pextrw(dst.gp(), lhs.fp(), imm_lane_idx);
4182 LiftoffRegister lhs,
4183 uint8_t imm_lane_idx) {
4184 Pextrd(dst.gp(), lhs.fp(), imm_lane_idx);
4188 LiftoffRegister lhs,
4189 uint8_t imm_lane_idx) {
4190 Pextrq(dst.gp(), lhs.fp(),
static_cast<int8_t
>(imm_lane_idx));
4194 LiftoffRegister lhs,
4195 uint8_t imm_lane_idx) {
4200 LiftoffRegister lhs,
4201 uint8_t imm_lane_idx) {
4206 LiftoffRegister src1,
4207 LiftoffRegister src2,
4208 uint8_t imm_lane_idx) {
4210 CpuFeatureScope scope(
this, AVX);
4211 vpinsrb(dst.fp(), src1.fp(), src2.gp(), imm_lane_idx);
4213 CpuFeatureScope scope(
this, SSE4_1);
4214 if (dst.fp() != src1.fp())
movaps(dst.fp(), src1.fp());
4215 pinsrb(dst.fp(), src2.gp(), imm_lane_idx);
4220 LiftoffRegister src1,
4221 LiftoffRegister src2,
4222 uint8_t imm_lane_idx) {
4224 CpuFeatureScope scope(
this, AVX);
4225 vpinsrw(dst.fp(), src1.fp(), src2.gp(), imm_lane_idx);
4227 if (dst.fp() != src1.fp())
movaps(dst.fp(), src1.fp());
4228 pinsrw(dst.fp(), src2.gp(), imm_lane_idx);
4233 LiftoffRegister src1,
4234 LiftoffRegister src2,
4235 uint8_t imm_lane_idx) {
4237 CpuFeatureScope scope(
this, AVX);
4238 vpinsrd(dst.fp(), src1.fp(), src2.gp(), imm_lane_idx);
4240 CpuFeatureScope scope(
this, SSE4_1);
4241 if (dst.fp() != src1.fp())
movaps(dst.fp(), src1.fp());
4242 pinsrd(dst.fp(), src2.gp(), imm_lane_idx);
4247 LiftoffRegister src1,
4248 LiftoffRegister src2,
4249 uint8_t imm_lane_idx) {
4251 CpuFeatureScope scope(
this, AVX);
4252 vpinsrq(dst.fp(), src1.fp(), src2.gp(), imm_lane_idx);
4254 CpuFeatureScope scope(
this, SSE4_1);
4255 if (dst.fp() != src1.fp())
movaps(dst.fp(), src1.fp());
4256 pinsrq(dst.fp(), src2.gp(), imm_lane_idx);
4261 LiftoffRegister src1,
4262 LiftoffRegister src2,
4263 uint8_t imm_lane_idx) {
4265 CpuFeatureScope scope(
this, AVX);
4266 vinsertps(dst.fp(), src1.fp(), src2.fp(), (imm_lane_idx << 4) & 0x30);
4268 CpuFeatureScope scope(
this, SSE4_1);
4269 if (dst.fp() != src1.fp())
movaps(dst.fp(), src1.fp());
4270 insertps(dst.fp(), src2.fp(), (imm_lane_idx << 4) & 0x30);
4275 LiftoffRegister src1,
4276 LiftoffRegister src2,
4277 uint8_t imm_lane_idx) {
4282 LiftoffRegister src1,
4283 LiftoffRegister src2,
4284 LiftoffRegister src3) {
4289 LiftoffRegister src1,
4290 LiftoffRegister src2,
4291 LiftoffRegister src3) {
4296 LiftoffRegister src1,
4297 LiftoffRegister src2,
4298 LiftoffRegister src3) {
4303 LiftoffRegister src1,
4304 LiftoffRegister src2,
4305 LiftoffRegister src3) {
4310 LiftoffRegister src) {
4314 CpuFeatureScope f16c_scope(
this, F16C);
4315 CpuFeatureScope avx2_scope(
this, AVX2);
4317 vpbroadcastw(dst.fp(), dst.fp());
4322 LiftoffRegister lhs,
4323 uint8_t imm_lane_idx) {
4327 CpuFeatureScope f16c_scope(
this, F16C);
4328 CpuFeatureScope avx_scope(
this, AVX);
4336 LiftoffRegister src1,
4337 LiftoffRegister src2,
4338 uint8_t imm_lane_idx) {
4342 CpuFeatureScope f16c_scope(
this, F16C);
4343 CpuFeatureScope avx_scope(
this, AVX);
4351 LiftoffRegister src) {
4355 CpuFeatureScope avx_scope(
this, AVX);
4361 LiftoffRegister src) {
4365 CpuFeatureScope avx_scope(
this, AVX);
4371 LiftoffRegister src) {
4375 CpuFeatureScope f16c_scope(
this, F16C);
4376 CpuFeatureScope avx_scope(
this, AVX);
4379 vsqrtps(ydst, ydst);
4385 LiftoffRegister src) {
4389 CpuFeatureScope f16c_scope(
this, F16C);
4390 CpuFeatureScope avx_scope(
this, AVX);
4399 LiftoffRegister src) {
4403 CpuFeatureScope f16c_scope(
this, F16C);
4404 CpuFeatureScope avx_scope(
this, AVX);
4413 LiftoffRegister src) {
4417 CpuFeatureScope f16c_scope(
this, F16C);
4418 CpuFeatureScope avx_scope(
this, AVX);
4427 LiftoffRegister src) {
4431 CpuFeatureScope f16c_scope(
this, F16C);
4432 CpuFeatureScope avx_scope(
this, AVX);
4440template <
void (Assembler::*avx_op)(YMMRegister, YMMRegister, YMMRegister)>
4454 assm->vpackssdw(ydst, ydst, ydst);
4459 LiftoffRegister rhs) {
4464 LiftoffRegister rhs) {
4469 LiftoffRegister rhs) {
4474 LiftoffRegister rhs) {
4478template <
void (Assembler::*avx_op)(YMMRegister, YMMRegister, YMMRegister)>
4500 LiftoffRegister rhs) {
4505 LiftoffRegister rhs) {
4510 LiftoffRegister rhs) {
4515 LiftoffRegister rhs) {
4520 LiftoffRegister rhs) {
4525 LiftoffRegister tmp =
4534 LiftoffRegister rhs) {
4539 LiftoffRegister tmp =
4548 LiftoffRegister rhs) {
4554 LiftoffRegister rhs) {
4560 LiftoffRegister src) {
4566 CpuFeatureScope f16c_scope(
this, F16C);
4567 CpuFeatureScope avx_scope(
this, AVX);
4568 CpuFeatureScope avx2_scope(
this, AVX2);
4576 LiftoffRegister src) {
4582 CpuFeatureScope f16c_scope(
this, F16C);
4583 CpuFeatureScope avx_scope(
this, AVX);
4584 CpuFeatureScope avx2_scope(
this, AVX2);
4592 LiftoffRegister src) {
4598 CpuFeatureScope f16c_scope(
this, F16C);
4599 CpuFeatureScope avx_scope(
this, AVX);
4600 CpuFeatureScope avx2_scope(
this, AVX2);
4602 vpmovsxwd(ydst, src.fp());
4603 vcvtdq2ps(ydst, ydst);
4609 LiftoffRegister src) {
4615 CpuFeatureScope f16c_scope(
this, F16C);
4616 CpuFeatureScope avx_scope(
this, AVX);
4617 CpuFeatureScope avx2_scope(
this, AVX2);
4619 vpmovzxwd(ydst, src.fp());
4620 vcvtdq2ps(ydst, ydst);
4626 LiftoffRegister src) {
4630 CpuFeatureScope f16c_scope(
this, F16C);
4637 LiftoffRegister src) {
4642 CpuFeatureScope avx_scope(
this, AVX);
4643 CpuFeatureScope f16c_scope(
this, F16C);
4645 LiftoffRegister ftmp =
4647 LiftoffRegister ftmp2 =
4650 Cvtpd2ph(ftmp2.fp(), ftmp.fp(), tmp.gp());
4654 Cvtpd2ph(dst.fp(), ftmp.fp(), tmp.gp());
4656 Cvtpd2ph(dst.fp(), src.fp(), tmp.gp());
4658 vmovd(tmp.gp(), ftmp2.fp());
4659 vpinsrw(dst.fp(), dst.fp(), tmp.gp(), 1);
4661 pxor(ftmp.fp(), ftmp.fp());
4664 vinsertps(dst.fp(), dst.fp(), ftmp.fp(), (1 << 4) & 0x30);
4669 LiftoffRegister src) {
4673 CpuFeatureScope f16c_scope(
this, F16C);
4680 LiftoffRegister src1,
4681 LiftoffRegister src2,
4682 LiftoffRegister src3) {
4690 F16x8Qfma(ydst, src1.fp(), src2.fp(), src3.fp(), tmp, tmp2);
4695 LiftoffRegister src1,
4696 LiftoffRegister src2,
4697 LiftoffRegister src3) {
4705 F16x8Qfms(ydst, src1.fp(), src2.fp(), src3.fp(), tmp, tmp2);
4714 Label* trap_label) {
4715 if (is_uint31(max_index)) {
4716 cmpq(index, Immediate(
static_cast<int32_t>(max_index)));
4735 while (!gp_regs.is_empty()) {
4736 LiftoffRegister
reg = gp_regs.GetFirstRegSet();
4745 while (!fp_regs.is_empty()) {
4746 LiftoffRegister
reg = fp_regs.GetFirstRegSet();
4757 unsigned fp_offset = 0;
4758 while (!fp_regs.is_empty()) {
4759 LiftoffRegister
reg = fp_regs.GetFirstRegSet();
4760 Movdqu(
reg.fp(), Operand(rsp, fp_offset));
4764 if (fp_offset) addq(rsp, Immediate(fp_offset));
4766 while (!gp_regs.is_empty()) {
4767 LiftoffRegister
reg = gp_regs.GetLastRegSet();
4774 SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
4775 LiftoffRegList ref_spills,
int spill_offset) {
4779 while (!gp_spills.is_empty()) {
4780 LiftoffRegister
reg = gp_spills.GetFirstRegSet();
4781 if (ref_spills.has(
reg)) {
4782 safepoint.DefineTaggedStackSlot(spill_offset);
4784 gp_spills.clear(
reg);
4799 const std::initializer_list<VarState>
args,
const LiftoffRegister* rets,
4801 ExternalReference ext_ref) {
4806 Operand dst{rsp, arg_offset};
4815 constexpr int kNumCCallArgs = 1;
4822 const LiftoffRegister* next_result_reg = rets;
4823 if (return_kind != kVoid) {
4824 constexpr Register kReturnReg = rax;
4825 if (kReturnReg != next_result_reg->gp()) {
4826 Move(*next_result_reg, LiftoffRegister(kReturnReg), return_kind);
4832 if (out_argument_kind != kVoid) {
4837 addq(rsp, Immediate(stack_bytes));
4841 ExternalReference ext_ref) {
4843 int num_args =
static_cast<int>(
args.size());
4849#ifdef V8_TARGET_OS_WIN
4851 int stack_args = kWindowsHomeStackSlots;
4855 ParallelMove parallel_move{
this};
4858 parallel_move.LoadIntoRegister(LiftoffRegister{
kCArgRegs[reg_args]}, arg);
4866 parallel_move.Execute();
4881 compiler::CallDescriptor* call_descriptor,
4887 CallWasmCodePointer(target, call_descriptor->signature_hash());
4891 compiler::CallDescriptor* call_descriptor, Register target) {
4896 CallWasmCodePointer(target, call_descriptor->signature_hash(),
4912 addq(rsp, Immediate(size));
4932 movl(Operand(dst, 0), Immediate(1));
4937 LiftoffRegister src,
4939 LiftoffRegister tmp_s128,
4941 if (lane_kind ==
kF32) {
4942 movaps(tmp_s128.fp(), src.fp());
4943 cmpunordps(tmp_s128.fp(), tmp_s128.fp());
4946 movapd(tmp_s128.fp(), src.fp());
4947 cmpunordpd(tmp_s128.fp(), tmp_s128.fp());
4950 orl(Operand(dst, 0), tmp_gp);
4954 movl(Operand(dst, 0), Immediate(1));
4960 int last_stack_slot = param_slots;
4961 for (
auto& slot :
slots_) {
4962 const int stack_slot = slot.dst_slot_;
4964 last_stack_slot = stack_slot;
4967 switch (src.loc()) {
4969 if (src.kind() ==
kI32) {
4975 }
else if (src.kind() ==
kS128) {
4997 asm_->
pushq(Immediate(src.i32_const()));
5003#undef RETURN_FALSE_IF_MISSING_CPU_FEATURE
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
void emit_trace_instruction(Immediate markid)
void divss(XMMRegister dst, XMMRegister src)
void movsxwq(Register dst, Register src)
void movapd(XMMRegister dst, XMMRegister src)
void cmpxchgb(Operand dst, Register src)
void shll(const VRegister &vd, const VRegister &vn, int shift)
void movss(XMMRegister dst, Operand src)
void subss(XMMRegister dst, XMMRegister src)
void vmovd(XMMRegister dst, Register src)
void vroundps(XMMRegister dst, XMMRegister src, RoundingMode mode)
void vpinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8)
void vaddss(XMMRegister dst, XMMRegister src1, XMMRegister src2)
void popcntl(Register dst, Register src)
void mulss(XMMRegister dst, XMMRegister src)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
void xchgb(Register reg, Operand op)
void vmulss(XMMRegister dst, XMMRegister src1, XMMRegister src2)
void pinsrq(XMMRegister dst, Register src, uint8_t imm8)
void cmovq(Condition cc, Register dst, Register src)
void vpinsrb(XMMRegister dst, XMMRegister src1, Register src2, uint8_t offset)
void vpinsrw(XMMRegister dst, XMMRegister src1, Register src2, uint8_t offset)
void movdqa(XMMRegister dst, Operand src)
void pinsrw(XMMRegister dst, Register src, uint8_t offset)
void vsubss(XMMRegister dst, XMMRegister src1, XMMRegister src2)
void vdivss(XMMRegister dst, XMMRegister src1, XMMRegister src2)
void setcc(Condition cc, Register reg)
void pushq(Immediate value)
void movb(Register dst, Operand src)
void pinsrb(XMMRegister dst, Register src, uint8_t offset)
void pinsrd(XMMRegister dst, Register src, uint8_t offset)
void movsxwl(Register dst, Register src)
void xaddl(Operand dst, Register src)
void btrq(Register dst, Immediate imm8)
void movw(Register reg, uint32_t immediate, Condition cond=al)
void movsd(XMMRegister dst, XMMRegister src)
void movaps(XMMRegister dst, XMMRegister src)
void movsxbl(Register dst, Register src)
void xaddb(Operand dst, Register src)
void movsxbq(Register dst, Register src)
void cmovl(Condition cc, Register dst, Register src)
void xaddw(Operand dst, Register src)
void vcvtph2ps(XMMRegister dst, XMMRegister src)
void cmpxchgw(Operand dst, Register src)
void near_jmp(intptr_t disp, RelocInfo::Mode rmode)
void popcntq(Register dst, Register src)
void xchgw(Register reg, Operand op)
void movsxlq(Register dst, Register src)
void vinsertps(XMMRegister dst, XMMRegister src1, XMMRegister src2, uint8_t offset)
void pmovmskb(Register dst, XMMRegister src)
void movl(Operand dst, Label *src)
void testb(Register reg, Operand op)
void sub_sp_32(uint32_t imm)
Assembler(const AssemblerOptions &, std::unique_ptr< AssemblerBuffer >={})
void vpinsrd(XMMRegister dst, XMMRegister src1, Register src2, uint8_t offset)
void vcvtps2ph(XMMRegister dst, XMMRegister src, uint8_t imm8)
void xaddq(Operand dst, Register src)
void insertps(XMMRegister dst, XMMRegister src, uint8_t offset)
void addss(XMMRegister dst, XMMRegister src)
void movq(XMMRegister dst, Operand src)
static constexpr int kFixedFrameSizeAboveFp
static bool IsSupported(CpuFeature f)
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static constexpr MachineType Uint8()
static constexpr MachineType Int32()
static constexpr MachineType Uint32()
static constexpr MachineType Uint16()
static constexpr MachineType Int16()
static constexpr MachineType Int64()
static constexpr MachineType Int8()
void F32x4ExtractLane(DoubleRegister dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch1, Register scratch2, Register scratch3)
void I32x4TruncSatF64x2SZero(Simd128Register dst, Simd128Register src, Simd128Register scratch)
void Cvtqui2sd(XMMRegister dst, Register src)
void S128Select(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register mask)
void I64x2UConvertI32x4High(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void Cvttsd2uiq(Register dst, Operand src, Label *fail=nullptr)
void Cvttss2uiq(Register dst, Operand src, Label *fail=nullptr)
void LoadAddress(Register destination, ExternalReference source)
void I16x8Q15MulRSatS(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register scratch1, Simd128Register scratch2, Simd128Register scratch3)
void Pextrq(Register dst, XMMRegister src, int8_t imm8)
void I64x2GtS(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void near_call(int offset, RelocInfo::Mode rmode)
void Move(Register dst, Tagged< Smi > smi)
void Cvtqsi2sd(XMMRegister dst, Register src)
void F16x8Max(YMMRegister dst, XMMRegister lhs, XMMRegister rhs, YMMRegister scratch, YMMRegister scratch2)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void I16x8Splat(Simd128Register dst, Register src)
void JumpIfSmi(Register value, Label *smi_label)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void Cvtlsi2ss(XMMRegister dst, Register src)
void Cvtqui2ss(XMMRegister dst, Register src)
void Cvtqsi2ss(XMMRegister dst, Register src)
void F16x8Qfma(YMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, YMMRegister tmp, YMMRegister tmp2)
void I64x2Mul(Simd128Register dst, Simd128Register src1, Simd128Register src2, Register scratch1, Register scrahc2, Register scratch3, Simd128Register scratch4)
void I32x4SConvertF32x4(Simd128Register dst, Simd128Register src, Simd128Register scratch1, Register scratch2)
void F64x2Max(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register scratch1, Simd128Register scratch2)
void Movq(XMMRegister dst, Register src)
void Tzcntq(Register dst, Register src)
void Cvtsd2ss(XMMRegister dst, XMMRegister src)
void I16x8SConvertF16x8(YMMRegister dst, XMMRegister src, YMMRegister tmp, Register scratch)
void I64x2GeS(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void F64x2ConvertLowI32x4U(QwNeonRegister dst, QwNeonRegister src)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand)
void Pinsrq(XMMRegister dst, XMMRegister src1, Register src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void Cvtlsi2sd(XMMRegister dst, Register src)
void I32x4UConvertI16x8High(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void LoadProtectedPointerField(Register destination, MemOperand field_operand)
void F64x2Min(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register scratch1, Simd128Register scratch2)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void Lzcntl(Register dst, Register src)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void I16x8TruncF16x8U(YMMRegister dst, XMMRegister src, YMMRegister tmp)
void I64x2Abs(QwNeonRegister dst, QwNeonRegister src)
Operand StackLimitAsOperand(StackLimitKind kind)
void Cvtpd2ph(XMMRegister dst, XMMRegister src, Register tmp)
void SmiAddConstant(Operand dst, Tagged< Smi > constant)
void F64x2ExtractLane(DoubleRegister dst, Simd128Register src, uint8_t imm_lane_idx, Simd128Register scratch1, Register scratch2)
void AllocateStackSpace(Register bytes)
void AssertZeroExtended(Register int32_register)
void I16x8UConvertI8x16High(Simd128Register dst, Simd128Register src, Register scratch1, Simd128Register scratch2)
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void F16x8Qfms(YMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, YMMRegister tmp, YMMRegister tmp2)
void Cvtss2sd(XMMRegister dst, XMMRegister src)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void I8x16Splat(Simd128Register dst, Register src)
void F32x4Splat(Simd128Register dst, DoubleRegister src, DoubleRegister scratch1, Register scratch2)
void Lzcntq(Register dst, Register src)
void F16x8Min(YMMRegister dst, XMMRegister lhs, XMMRegister rhs, YMMRegister scratch, YMMRegister scratch2)
void F64x2ReplaceLane(Simd128Register dst, Simd128Register src1, DoubleRegister src2, uint8_t imm_lane_idx, Register scratch1, Simd128Register scratch2)
void Tzcntl(Register dst, Register src)
void I32x4TruncSatF64x2UZero(Simd128Register dst, Simd128Register src, Simd128Register scratch)
void I8x16Swizzle(Simd128Register dst, Simd128Register src1, Simd128Register src2, Register scratch1, Register scratch2, Simd128Register scratch3)
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
constexpr int8_t code() const
void S128Load16Splat(XMMRegister dst, Operand src, XMMRegister scratch)
void F32x4Qfms(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, XMMRegister tmp)
void F32x4Max(XMMRegister dst, XMMRegister lhs, XMMRegister rhs, XMMRegister scratch)
void I16x8ExtMulLow(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister scrat, bool is_signed)
void F64x2Qfms(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, XMMRegister tmp)
void Pblendvb(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister mask)
void S128Not(XMMRegister dst, XMMRegister src, XMMRegister scratch)
void I16x8SConvertI8x16High(XMMRegister dst, XMMRegister src)
void I16x8ExtMulHighS(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister scratch)
void S128Store64Lane(Operand dst, XMMRegister src, uint8_t laneidx)
void Pinsrb(XMMRegister dst, XMMRegister src1, Op src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void I32x4DotI8x16I7x16AddS(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, XMMRegister scratch, XMMRegister splat_reg)
void Blendvpd(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister mask)
void Blendvps(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister mask)
void S128Load8Splat(XMMRegister dst, Operand src, XMMRegister scratch)
void I8x16ShrU(XMMRegister dst, XMMRegister src1, uint8_t src2, Register tmp1, XMMRegister tmp2)
void F32x4Min(XMMRegister dst, XMMRegister lhs, XMMRegister rhs, XMMRegister scratch)
void I32x4SConvertI16x8High(XMMRegister dst, XMMRegister src)
void I64x2ShrS(XMMRegister dst, XMMRegister src, uint8_t shift, XMMRegister xmm_tmp)
void I64x2ExtMul(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister scratch, bool low, bool is_signed)
void Pshufb(XMMRegister dst, XMMRegister src, Op mask)
void I64x2SConvertI32x4High(XMMRegister dst, XMMRegister src)
void F64x2Qfma(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, XMMRegister tmp)
void I8x16Shl(XMMRegister dst, XMMRegister src1, uint8_t src2, Register tmp1, XMMRegister tmp2)
void I16x8DotI8x16I7x16S(XMMRegister dst, XMMRegister src1, XMMRegister src2)
void S128Load32Splat(XMMRegister dst, Operand src)
void I8x16ShrS(XMMRegister dst, XMMRegister src1, uint8_t src2, XMMRegister tmp)
void I64x2Neg(XMMRegister dst, XMMRegister src, XMMRegister scratch)
void F32x4Qfma(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister src3, XMMRegister tmp)
void Pinsrw(XMMRegister dst, XMMRegister src1, Op src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void I16x8ExtMulHighU(XMMRegister dst, XMMRegister src1, XMMRegister src2, XMMRegister scratch)
void I32x4ExtAddPairwiseI16x8U(XMMRegister dst, XMMRegister src, XMMRegister tmp)
void S128Store32Lane(Operand dst, XMMRegister src, uint8_t laneidx)
void Negpd(XMMRegister dst, XMMRegister src, Register tmp)
void I32x4TruncF32x4U(XMMRegister dst, XMMRegister src, XMMRegister scratch1, XMMRegister scratch2)
void Absps(XMMRegister dst, XMMRegister src, Register tmp)
void Negph(XMMRegister dst, XMMRegister src, Register tmp)
void Negps(XMMRegister dst, XMMRegister src, Register tmp)
void Pinsrd(XMMRegister dst, XMMRegister src1, Op src2, uint8_t imm8, uint32_t *load_pc_offset=nullptr)
void I16x8ExtAddPairwiseI8x16S(XMMRegister dst, XMMRegister src, XMMRegister scratch, Register tmp)
void Abspd(XMMRegister dst, XMMRegister src, Register tmp)
void Pextrd(Register dst, XMMRegister src, uint8_t imm8)
void Absph(XMMRegister dst, XMMRegister src, Register tmp)
void I16x8ExtAddPairwiseI8x16U(XMMRegister dst, XMMRegister src, Register scratch)
void I32x4ExtAddPairwiseI16x8S(XMMRegister dst, XMMRegister src, Register scratch)
void I8x16Popcnt(XMMRegister dst, XMMRegister src, XMMRegister tmp1, XMMRegister tmp2, Register scratch)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int32_t TypeToMarker(Type type)
static constexpr int kFrameTypeOffset
static constexpr Register GapRegister()
static constexpr int kInstanceDataOffset
static constexpr YMMRegister from_code(int code)
void emit_i8x16_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void ClearRegister(Register reg, std::initializer_list< Register * > possible_uses, LiftoffRegList pinned)
bool emit_f16x8_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_store_nonzero_if_nan(Register dst, DoubleRegister src, ValueKind kind)
bool emit_f32x4_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_f64_div(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void set_trap_on_oob_mem64(Register index, uint64_t max_index, Label *trap_label)
void emit_i16x8_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicXor(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i64_shl(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_shli(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i8x16_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_relaxed_q15mulr_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i8x16_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_ceil(DoubleRegister dst, DoubleRegister src)
void emit_i16x8_extmul_high_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_trunc(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i32_rems(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
void emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i32_clz(Register dst, Register src)
void emit_i8x16_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_shri(Register dst, Register src, int32_t amount)
void emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64_mul(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_store_nonzero(Register dst)
void emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_eqz(Register dst, Register src)
void emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_uconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_floor(DoubleRegister dst, DoubleRegister src)
void emit_f32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void RecordUsedSpillOffset(int offset)
void emit_i16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i8x16_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void FillI64Half(Register, int offset, RegPairHalf)
bool emit_f16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_sari(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void CallCWithStackBuffer(const std::initializer_list< VarState > args, const LiftoffRegister *rets, ValueKind return_kind, ValueKind out_argument_kind, int stack_bytes, ExternalReference ext_ref)
void emit_f32_mul(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32_nearest_int(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_dot_i16x8_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_muli(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i16x8_extmul_low_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_dot_i8x16_i7x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void TailCallNativeWasmCode(Address addr)
void emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void SpillInstanceData(Register instance)
void RecordOolSpillSpaceSize(int size)
void emit_f64x2_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i32_subi(Register dst, Register lhs, int32_t imm)
void emit_i32_shli(Register dst, Register src, int32_t amount)
void emit_i32x4_extmul_low_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void AtomicAdd(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void AtomicSub(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void LoadTransform(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LoadTransformationKind transform, uint32_t *protected_load_pc, bool i64_offset)
void emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_store_nonzero_if_nan(Register dst, LiftoffRegister src, Register tmp_gp, LiftoffRegister tmp_s128, ValueKind lane_kind)
void emit_i32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_sar(Register dst, Register src, Register amount)
void emit_i16x8_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_i16x8_sconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
void AtomicAnd(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i16x8_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicCompareExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister value, StoreType type, bool i64_offset)
void emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_not(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i16(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_dot_i8x16_i7x16_add_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister acc)
bool emit_f16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64x2_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_trunc(DoubleRegister dst, DoubleRegister src)
void emit_f64_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void Fill(LiftoffRegister, int offset, ValueKind)
void emit_i32_shr(Register dst, Register src, Register amount)
void emit_i64_signextend_i32(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void LoadFullPointer(Register dst, Register src_addr, int32_t offset_imm)
void DeallocateStackSlot(uint32_t size)
void emit_i8x16_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_extmul_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i16x8_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_andi(Register dst, Register lhs, int32_t imm)
bool emit_f16x8_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StackCheck(Label *ool_code)
void emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f32x4_promote_low_f16x8(LiftoffRegister dst, LiftoffRegister src)
void Store(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, bool is_store_mem=false, bool i64_offset=false)
bool emit_f16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint32_t *protected_load_pc=nullptr, bool is_load_mem=false, bool i64_offset=false, bool needs_shift=false)
void emit_i64x2_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f16x8_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_high_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_sub(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
int max_used_spill_offset_
void emit_i32_signextend_i16(Register dst, Register src)
void emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_divs(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_f32_neg(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_promote_low_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs, int64_t imm)
void LoadLane(LiftoffRegister dst, LiftoffRegister src, Register addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint8_t lane, uint32_t *protected_load_pc, bool i64_offset)
void emit_i16x8_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_and(Register dst, Register lhs, Register rhs)
void emit_i16x8_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f64x2_trunc(LiftoffRegister dst, LiftoffRegister src)
void CallBuiltin(Builtin builtin)
void emit_i8x16_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_relaxed_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallFrameSetupStub(int declared_function_index)
void emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_abs(DoubleRegister dst, DoubleRegister src)
void emit_i32_remu(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
bool emit_f64x2_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSpillAddress(Register dst, int offset, ValueKind kind)
void emit_i16x8_extmul_high_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f64_neg(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32_ctz(Register dst, Register src)
void StoreCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind, Register frame_pointer)
void emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void AtomicExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i32x4_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Spill(VarState *slot)
void emit_i32_xor(Register dst, Register lhs, Register rhs)
void emit_s128_select(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask)
bool emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void emit_i8x16_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void AssertUnreachable(AbortReason reason)
void CallC(const std::initializer_list< VarState > args, ExternalReference ext_ref)
void emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_or(Register dst, Register lhs, Register rhs)
bool emit_f16x8_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
bool emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero)
bool emit_f16x8_demote_f32x4_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f64x2_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void DropStackSlotsAndRet(uint32_t num_stack_slots)
void emit_f32x4_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_and_not(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_relaxed_laneselect(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask, int lane_width)
void emit_i32x4_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_sconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_sari(Register dst, Register src, int32_t amount)
void LoadConstant(LiftoffRegister, WasmValue)
void emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
int GetTotalFrameSize() const
void emit_i16x8_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallNativeWasmCode(Address addr)
bool emit_f32x4_trunc(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareTailCall(int num_callee_stack_params, int stack_param_delta)
void emit_f32x4_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_nearest_int(DoubleRegister dst, DoubleRegister src)
bool emit_f16x8_ceil(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_divu(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero)
void emit_cond_jump(Condition, Label *, ValueKind value, Register lhs, Register rhs, const FreezeCacheState &frozen)
void LoadFromInstance(Register dst, Register instance, int offset, int size)
void emit_i8x16_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_smi_check(Register obj, Label *target, SmiCheckMode mode, const FreezeCacheState &frozen)
void emit_f64_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
static bool NeedsAlignment(ValueKind kind)
void emit_f32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
static int SlotSizeForType(ValueKind kind)
void LoadProtectedPointer(Register dst, Register src_addr, int32_t offset)
void emit_i32x4_extmul_low_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64_add(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64_sar(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i16x8_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_ori(Register dst, Register lhs, int32_t imm)
void emit_f64x2_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
bool emit_f32x4_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_popcnt(LiftoffRegister dst, LiftoffRegister src)
void emit_u32_to_uintptr(Register dst, Register src)
bool supports_f16_mem_access()
void emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_mul(Register dst, Register lhs, Register rhs)
void emit_i8x16_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
bool emit_f64_floor(DoubleRegister dst, DoubleRegister src)
void emit_f32_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadInstanceDataFromFrame(Register dst)
void emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32x4_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_shri(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i64_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64x2_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i32_popcnt(Register dst, Register src)
void emit_i16x8_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void MoveStackValue(uint32_t dst_offset, uint32_t src_offset, ValueKind)
void emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Move(LiftoffRegister dst, LiftoffRegister src, ValueKind)
void emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_popcnt(LiftoffRegister dst, LiftoffRegister src)
void AtomicStore(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, bool i64_offset)
void emit_f64_abs(DoubleRegister dst, DoubleRegister src)
static constexpr int kStackSlotSize
void emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PatchPrepareStackFrame(int offset, SafepointTableBuilder *, bool feedback_vector_slot, size_t stack_param_slots)
void emit_f32x4_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_ptrsize_cond_jumpi(Condition, Label *, Register lhs, int32_t imm, const FreezeCacheState &frozen)
void LoadCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind)
void emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_sqrt(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_sub(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_xori(Register dst, Register lhs, int32_t imm)
CacheState * cache_state()
void emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_set_cond(Condition, Register dst, Register lhs, Register rhs)
void emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_sub(Register dst, Register lhs, Register rhs)
void TailCallIndirect(compiler::CallDescriptor *call_descriptor, Register target)
void emit_i16x8_q15mulr_sat_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i64_shr(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i16x8_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_div(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
bool emit_f16x8_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_sqrt(LiftoffRegister dst, LiftoffRegister src)
void SpillRegisters(Regs... regs)
void emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_extmul_low_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AllocateStackSlot(Register addr, uint32_t size)
void emit_i32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadTaggedPointer(Register dst, Register src_addr, Register offset_reg, int32_t offset_imm, uint32_t *protected_load_pc=nullptr, bool offset_reg_needs_shift=false)
void PushRegisters(LiftoffRegList)
void emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_cond_jumpi(Condition, Label *, Register lhs, int imm, const FreezeCacheState &frozen)
void emit_i64_eqz(Register dst, LiftoffRegister src)
void StoreTaggedPointer(Register dst_addr, Register offset_reg, int32_t offset_imm, Register src, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, SkipWriteBarrier=kNoSkipWriteBarrier)
void emit_i64_clz(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void IncrementSmi(LiftoffRegister dst, int offset)
void PopRegisters(LiftoffRegList)
LiftoffRegister GetUnusedRegister(RegClass rc, std::initializer_list< LiftoffRegister > try_first, LiftoffRegList pinned)
void emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shuffle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, const uint8_t shuffle[16], bool is_swizzle)
void emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_ceil(DoubleRegister dst, DoubleRegister src)
void emit_i32_addi(Register dst, Register lhs, int32_t imm)
void emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_extmul_high_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicOr(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
bool emit_select(LiftoffRegister dst, Register condition, LiftoffRegister true_value, LiftoffRegister false_value, ValueKind kind)
bool emit_i16x8_uconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
bool emit_type_conversion(WasmOpcode opcode, LiftoffRegister dst, LiftoffRegister src, Label *trap=nullptr)
void emit_i8x16_neg(LiftoffRegister dst, LiftoffRegister src)
void AtomicLoad(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, bool i64_offset)
void emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadReturnStackSlot(LiftoffRegister, int offset, ValueKind)
void emit_i32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_u(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_trunc(DoubleRegister dst, DoubleRegister src)
void LoadTaggedPointerFromInstance(Register dst, Register instance, int offset)
void emit_v128_anytrue(LiftoffRegister dst, LiftoffRegister src)
void FillStackSlotsWithZero(int start, int size)
void emit_i32_shl(Register dst, Register src, Register amount)
void emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_const(LiftoffRegister dst, const uint8_t imms[16])
void emit_f64_sqrt(DoubleRegister dst, DoubleRegister src)
void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StoreLane(Register dst, Register offset, uintptr_t offset_imm, LiftoffRegister src, StoreType type, uint8_t lane, uint32_t *protected_store_pc, bool i64_offset)
Register LoadOldFramePointer()
void CheckTierUp(int declared_func_index, int budget_used, Label *ool_label, const FreezeCacheState &frozen)
void emit_trace_instruction(uint32_t markid)
void emit_i16x8_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64x2_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallIndirect(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, Register target)
void RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint &safepoint, LiftoffRegList all_spills, LiftoffRegList ref_spills, int spill_offset)
void emit_f64x2_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void clear_i32_upper_half(Register dst)
void emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_add(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_sqrt(LiftoffRegister dst, LiftoffRegister src)
void LoadTrustedPointer(Register dst, Register src_addr, int offset, IndirectPointerTag tag)
void emit_i32_add(Register dst, Register lhs, Register rhs)
void emit_i32x4_relaxed_trunc_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
static constexpr int StaticStackFrameSize()
void emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_signextend_i8(Register dst, Register src)
void emit_i8x16_bitmask(LiftoffRegister dst, LiftoffRegister src)
constexpr Register set(Register reg)
constexpr unsigned GetNumRegsSet() const
constexpr DoubleRegister fp() const
constexpr Register gp() const
base::SmallVector< Slot, 8 > slots_
void Construct(int param_slots)
LiftoffAssembler *const asm_
static constexpr int ToTagged(int offset)
static void Pack16Lanes(uint32_t *dst, const uint8_t *shuffle)
#define COMPRESS_POINTERS_BOOL
base::Vector< const DirectHandle< Object > > args
static constexpr unsigned kSignBit
ZoneVector< RpoNumber > & result
#define RETURN_FALSE_IF_MISSING_CPU_FEATURE(name)
MovableLabel continuation
LiftoffRegList regs_to_save
std::optional< OolTrapLabel > trap
constexpr bool IsPowerOfTwo(T value)
constexpr int WhichPowerOfTwo(T value)
constexpr Register kScratchRegister2
void EmitAllTrue(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister src, VectorFormat format)
void EmitIntDivOrRem(LiftoffAssembler *assm, Register dst, Register lhs, Register rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void AtomicBinop(LiftoffAssembler *lasm, Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, Binop op)
void EmitCommutativeBinOpImm(LiftoffAssembler *assm, Register dst, Register lhs, int32_t imm)
void EmitSatTruncateFloatToInt(LiftoffAssembler *assm, Register dst, DoubleRegister src)
void EmitFloatMinOrMax(LiftoffAssembler *assm, RegisterType dst, RegisterType lhs, RegisterType rhs, MinOrMax min_or_max)
void StoreToMemory(LiftoffAssembler *assm, MemOperand dst, const LiftoffAssembler::VarState &src)
constexpr Operand kOSRTargetSlot
constexpr DoubleRegister kScratchDoubleReg
void EmitAnyTrue(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister src)
void EmitFloatSetCond(LiftoffAssembler *assm, Condition cond, Register dst, DoubleRegister lhs, DoubleRegister rhs)
void EmitSimdNonCommutativeBinOp(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, std::optional< CpuFeature > feature=std::nullopt)
void EmitSimdShiftOp(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister operand, LiftoffRegister count)
void EmitTruncateFloatToInt(LiftoffAssembler *assm, Register dst, DoubleRegister src, Label *trap)
MemOperand GetStackSlot(int offset)
constexpr Operand kInstanceDataOperand
void EmitSimdShiftOpImm(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister operand, int32_t count)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr DoubleRegister kScratchDoubleReg2
void EmitShiftOperation(LiftoffAssembler *assm, Register dst, Register src, Register amount, void(Assembler::*emit_shift)(Register))
void EmitCommutativeBinOp(LiftoffAssembler *assm, Register dst, Register lhs, Register rhs)
MemOperand GetMemOp(LiftoffAssembler *assm, UseScratchRegisterScope *temps, Register addr, Register offset, int32_t offset_imm, unsigned shift_amount=0)
void EmitSatTruncateFloatToUInt64(LiftoffAssembler *assm, Register dst, DoubleRegister src)
void ConvertFloatToIntAndBack(LiftoffAssembler *assm, Register dst, DoubleRegister src, DoubleRegister converted_back, LiftoffRegList pinned)
void EmitSimdCommutativeBinOp(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, std::optional< CpuFeature > feature=std::nullopt)
void LoadFromStack(LiftoffAssembler *assm, LiftoffRegister dst, Operand src, ValueKind kind)
void I32x4ExtMulHelper(LiftoffAssembler *assm, XMMRegister dst, XMMRegister src1, XMMRegister src2, bool low, bool is_signed)
static constexpr RegClass reg_class_for(ValueKind kind)
constexpr DoubleRegister kFpReturnRegisters[]
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr DoubleRegList kLiftoffAssemblerFpCacheRegs
constexpr int value_kind_full_size(ValueKind kind)
constexpr RegList kLiftoffAssemblerGpCacheRegs
constexpr Register kGpReturnRegisters[]
bool F16x8CmpOpViaF32(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool F16x8BinOpViaF32(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
int declared_function_index(const WasmModule *module, int func_index)
typedef void(VECTORCALL PWasmOp)(const uint8_t *code
constexpr int value_kind_size(ValueKind kind)
static constexpr LiftoffRegList kGpCacheRegList
static constexpr LiftoffRegList kFpCacheRegList
constexpr bool is_reference(ValueKind kind)
LiftoffAssembler::ValueKindSig ValueKindSig
constexpr Register no_reg
constexpr VFPRoundingMode kRoundToNearest
constexpr int kSimd128Size
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
kWasmInternalFunctionIndirectPointerTag instance_data
constexpr DoubleRegister kScratchDoubleReg2
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
constexpr Register kReturnRegister0
constexpr Register kScratchRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
const intptr_t kSmiTagMask
constexpr VFPRoundingMode kRoundToZero
constexpr Register kCArgRegs[]
std::unique_ptr< AssemblerBuffer > ExternalAssemblerBuffer(void *start, int size)
bool is_signed(Condition cond)
constexpr YMMRegister kScratchSimd256Reg
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
uint64_t make_uint64(uint32_t high, uint32_t low)
Register cached_instance_data
bool is_used(LiftoffRegister reg) const
LiftoffRegList used_registers
#define V8_LIKELY(condition)