5#ifndef V8_WASM_BASELINE_MIPS64_LIFTOFF_ASSEMBLER_MIPS64_INL_H_
6#define V8_WASM_BASELINE_MIPS64_LIFTOFF_ASSEMBLER_MIPS64_INL_H_
56 Register
offset, T offset_imm,
57 bool i64_offset =
false,
unsigned shift_amount = 0) {
63 if (shift_amount != 0) {
70 if (is_int31(offset_imm)) {
80inline void Load(LiftoffAssembler* assm, LiftoffRegister dst,
MemOperand src,
84 assm->Lh(dst.gp(), src);
87 assm->Lw(dst.gp(), src);
92 assm->Ld(dst.gp(), src);
95 assm->Lwc1(dst.fp(), src);
98 assm->Ldc1(dst.fp(), src);
101 assm->ld_b(dst.fp().toW(), src);
108inline void Store(LiftoffAssembler* assm,
MemOperand dst, LiftoffRegister src,
112 assm->Ush(src.gp(), dst, t8);
115 assm->Usw(src.gp(), dst);
120 assm->Usd(src.gp(), dst);
123 assm->Uswc1(src.fp(), dst, t8);
126 assm->Usdc1(src.fp(), dst, t8);
129 assm->st_b(src.fp().toW(), dst);
136inline void Store(LiftoffAssembler* assm, Register base, int32_t
offset,
151 assm->push(
reg.gp());
170#if defined(V8_TARGET_BIG_ENDIAN)
171inline void ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst,
172 LoadType type, LiftoffRegList pinned) {
173 bool is_float =
false;
174 LiftoffRegister tmp = dst;
175 switch (type.value()) {
176 case LoadType::kI64Load8U:
177 case LoadType::kI64Load8S:
178 case LoadType::kI32Load8U:
179 case LoadType::kI32Load8S:
182 case LoadType::kF32Load:
184 tmp = assm->GetUnusedRegister(
kGpReg, pinned);
185 assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, dst);
187 case LoadType::kI64Load32U:
188 assm->MacroAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 4);
190 case LoadType::kI32Load:
191 case LoadType::kI64Load32S:
192 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
194 case LoadType::kI32Load16S:
195 case LoadType::kI64Load16S:
196 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
198 case LoadType::kI32Load16U:
199 case LoadType::kI64Load16U:
200 assm->MacroAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 2);
202 case LoadType::kF64Load:
204 tmp = assm->GetUnusedRegister(
kGpReg, pinned);
205 assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, dst);
207 case LoadType::kI64Load:
208 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 8);
215 switch (type.value()) {
216 case LoadType::kF32Load:
217 assm->emit_type_conversion(kExprF32ReinterpretI32, dst, tmp);
219 case LoadType::kF64Load:
220 assm->emit_type_conversion(kExprF64ReinterpretI64, dst, tmp);
228inline void ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src,
229 StoreType type, LiftoffRegList pinned) {
230 bool is_float =
false;
231 LiftoffRegister tmp = src;
232 switch (type.value()) {
233 case StoreType::kI64Store8:
234 case StoreType::kI32Store8:
237 case StoreType::kF32Store:
239 tmp = assm->GetUnusedRegister(
kGpReg, pinned);
240 assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, src);
242 case StoreType::kI32Store:
243 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
245 case StoreType::kI32Store16:
246 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
248 case StoreType::kF64Store:
250 tmp = assm->GetUnusedRegister(
kGpReg, pinned);
251 assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, src);
253 case StoreType::kI64Store:
254 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 8);
256 case StoreType::kI64Store32:
257 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
259 case StoreType::kI64Store16:
260 assm->MacroAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
267 switch (type.value()) {
268 case StoreType::kF32Store:
269 assm->emit_type_conversion(kExprF32ReinterpretI32, src, tmp);
271 case StoreType::kF64Store:
272 assm->emit_type_conversion(kExprF64ReinterpretI64, src, tmp);
306 kLiftoffFrameSetupFunctionReg) ==
314 LoadConstant(LiftoffRegister(kLiftoffFrameSetupFunctionReg),
320 int stack_param_delta) {
331 int slot_count = num_callee_stack_params + 2;
332 for (
int i = slot_count - 1;
i >= 0; --
i) {
338 daddiu(
sp, fp, -stack_param_delta * 8);
345 int offset, SafepointTableBuilder* safepoint_table_builder,
346 bool feedback_vector_slot,
size_t stack_param_slots) {
352 if (feedback_vector_slot) {
358 constexpr int kAvailableSpace = 256;
359 MacroAssembler patching_assembler(
366 patching_assembler.Daddu(
sp,
sp, Operand(-frame_size));
386 patching_assembler.BranchLong(imm32);
393 if (frame_size <
v8_flags.stack_size * 1024) {
396 Daddu(stack_limit, stack_limit, Operand(frame_size));
400 Call(
static_cast<Address>(Builtin::kWasmStackOverflow),
403 safepoint_table_builder->DefineSafepoint(
this);
410 Daddu(
sp,
sp, Operand(-frame_size));
444 const FreezeCacheState& frozen) {
454 WasmTrustedInstanceData::kTieringBudgetArrayOffset);
457 int budget_arr_offset =
kInt32Size * declared_func_index;
460 MemOperand budget_addr(budget_array, budget_arr_offset);
461 Lw(budget, budget_addr);
462 Subu(budget, budget, budget_used);
463 Sw(budget, budget_addr);
465 Branch(ool_label,
less, budget, Operand(zero_reg));
476 switch (value.type().kind()) {
539 uint32_t* protected_load_pc,
542 unsigned shift_amount = !needs_shift ? 0 : 3;
544 false, shift_amount);
550 if (protected_load_pc) {
556 int32_t offset_imm) {
562 int32_t offset_imm) {
569 int32_t offset_imm, Register src,
570 LiftoffRegList pinned,
571 uint32_t* protected_store_pc,
572 SkipWriteBarrier skip_write_barrier) {
581 if (protected_store_pc) {
585 if (skip_write_barrier ||
v8_flags.disable_write_barriers)
return;
593 Daddu(scratch, dst_op.rm(), dst_op.offset());
595 StubCallMode::kCallWasmRuntimeStub);
600 Register offset_reg, uintptr_t offset_imm,
601 LoadType type, uint32_t* protected_load_pc,
602 bool is_load_mem,
bool i64_offset,
604 unsigned shift_amount = needs_shift ? type.size_log_2() : 0;
606 i64_offset, shift_amount);
608 switch (type.value()) {
609 case LoadType::kI32Load8U:
610 case LoadType::kI64Load8U:
611 Lbu(dst.gp(), src_op);
613 case LoadType::kI32Load8S:
614 case LoadType::kI64Load8S:
615 Lb(dst.gp(), src_op);
617 case LoadType::kI32Load16U:
618 case LoadType::kI64Load16U:
621 case LoadType::kI32Load16S:
622 case LoadType::kI64Load16S:
625 case LoadType::kI64Load32U:
628 case LoadType::kI32Load:
629 case LoadType::kI64Load32S:
632 case LoadType::kI64Load:
635 case LoadType::kF32Load:
638 case LoadType::kF32LoadF16:
641 case LoadType::kF64Load:
644 case LoadType::kS128Load:
651#if defined(V8_TARGET_BIG_ENDIAN)
653 pinned.set(src_op.rm());
654 liftoff::ChangeEndiannessLoad(
this, dst, type, pinned);
660 if (protected_load_pc) {
666 uintptr_t offset_imm, LiftoffRegister src,
667 StoreType type, LiftoffRegList pinned,
668 uint32_t* protected_store_pc,
bool is_store_mem,
673#if defined(V8_TARGET_BIG_ENDIAN)
675 pinned.set(dst_op.rm());
678 Move(tmp, src, type.value_type());
682 liftoff::ChangeEndiannessStore(
this, src, type, pinned);
686 switch (type.value()) {
687 case StoreType::kI32Store8:
688 case StoreType::kI64Store8:
689 Sb(src.gp(), dst_op);
691 case StoreType::kI32Store16:
692 case StoreType::kI64Store16:
695 case StoreType::kI32Store:
696 case StoreType::kI64Store32:
699 case StoreType::kI64Store:
702 case StoreType::kF32Store:
705 case StoreType::kF32StoreF16:
708 case StoreType::kF64Store:
711 case StoreType::kS128Store:
721 if (protected_store_pc) {
727 Register offset_reg, uintptr_t offset_imm,
728 LoadType type, LiftoffRegList pinned,
733 switch (type.value()) {
734 case LoadType::kI32Load8U:
735 case LoadType::kI64Load8U: {
736 Lbu(dst.gp(), src_op);
740 case LoadType::kI32Load16U:
741 case LoadType::kI64Load16U: {
742 Lhu(dst.gp(), src_op);
746 case LoadType::kI32Load: {
747 Lw(dst.gp(), src_op);
751 case LoadType::kI64Load32U: {
752 Lwu(dst.gp(), src_op);
756 case LoadType::kI64Load: {
757 Ld(dst.gp(), src_op);
767 uintptr_t offset_imm, LiftoffRegister src,
768 StoreType type, LiftoffRegList pinned,
773 switch (type.value()) {
774 case StoreType::kI64Store8:
775 case StoreType::kI32Store8: {
777 Sb(src.gp(), dst_op);
780 case StoreType::kI64Store16:
781 case StoreType::kI32Store16: {
783 Sh(src.gp(), dst_op);
786 case StoreType::kI64Store32:
787 case StoreType::kI32Store: {
789 Sw(src.gp(), dst_op);
792 case StoreType::kI64Store: {
794 Sd(src.gp(), dst_op);
802#define ASSEMBLE_ATOMIC_BINOP(load_linked, store_conditional, bin_instr) \
807 load_linked(result.gp(), MemOperand(temp0, 0)); \
808 bin_instr(temp1, result.gp(), Operand(value.gp())); \
809 store_conditional(temp1, MemOperand(temp0, 0)); \
810 BranchShort(&binop, eq, temp1, Operand(zero_reg)); \
814#define ASSEMBLE_ATOMIC_BINOP_EXT(load_linked, store_conditional, size, \
815 bin_instr, aligned) \
818 andi(temp3, temp0, aligned); \
819 Dsubu(temp0, temp0, Operand(temp3)); \
820 sll(temp3, temp3, 3); \
823 load_linked(temp1, MemOperand(temp0, 0)); \
824 ExtractBits(result.gp(), temp1, temp3, size, false); \
825 bin_instr(temp2, result.gp(), value.gp()); \
826 InsertBits(temp1, temp2, temp3, size); \
827 store_conditional(temp1, MemOperand(temp0, 0)); \
828 BranchShort(&binop, eq, temp1, Operand(zero_reg)); \
832#define ATOMIC_BINOP_CASE(name, inst32, inst64) \
833 void LiftoffAssembler::Atomic##name( \
834 Register dst_addr, Register offset_reg, uintptr_t offset_imm, \
835 LiftoffRegister value, LiftoffRegister result, StoreType type, \
837 LiftoffRegList pinned{dst_addr, value, result}; \
838 if (offset_reg != no_reg) pinned.set(offset_reg); \
839 Register temp0 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
840 Register temp1 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
841 Register temp2 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
842 Register temp3 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
843 MemOperand dst_op = \
844 liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm, i64_offset); \
845 Daddu(temp0, dst_op.rm(), dst_op.offset()); \
846 switch (type.value()) { \
847 case StoreType::kI64Store8: \
848 ASSEMBLE_ATOMIC_BINOP_EXT(Lld, Scd, 8, inst64, 7); \
850 case StoreType::kI32Store8: \
851 ASSEMBLE_ATOMIC_BINOP_EXT(Ll, Sc, 8, inst32, 3); \
853 case StoreType::kI64Store16: \
854 ASSEMBLE_ATOMIC_BINOP_EXT(Lld, Scd, 16, inst64, 7); \
856 case StoreType::kI32Store16: \
857 ASSEMBLE_ATOMIC_BINOP_EXT(Ll, Sc, 16, inst32, 3); \
859 case StoreType::kI64Store32: \
860 ASSEMBLE_ATOMIC_BINOP_EXT(Lld, Scd, 32, inst64, 7); \
862 case StoreType::kI32Store: \
863 ASSEMBLE_ATOMIC_BINOP(Ll, Sc, inst32); \
865 case StoreType::kI64Store: \
866 ASSEMBLE_ATOMIC_BINOP(Lld, Scd, inst64); \
878#undef ASSEMBLE_ATOMIC_BINOP
879#undef ASSEMBLE_ATOMIC_BINOP_EXT
880#undef ATOMIC_BINOP_CASE
882#define ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(load_linked, store_conditional) \
887 load_linked(result.gp(), MemOperand(temp0, 0)); \
888 mov(temp1, value.gp()); \
889 store_conditional(temp1, MemOperand(temp0, 0)); \
890 BranchShort(&exchange, eq, temp1, Operand(zero_reg)); \
894#define ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(load_linked, store_conditional, \
898 andi(temp1, temp0, aligned); \
899 Dsubu(temp0, temp0, Operand(temp1)); \
900 sll(temp1, temp1, 3); \
903 load_linked(temp2, MemOperand(temp0, 0)); \
904 ExtractBits(result.gp(), temp2, temp1, size, false); \
905 InsertBits(temp2, value.gp(), temp1, size); \
906 store_conditional(temp2, MemOperand(temp0, 0)); \
907 BranchShort(&exchange, eq, temp2, Operand(zero_reg)); \
912 uintptr_t offset_imm,
913 LiftoffRegister value,
914 LiftoffRegister
result, StoreType type,
917 if (offset_reg !=
no_reg) pinned.set(offset_reg);
923 Daddu(temp0, dst_op.rm(), dst_op.offset());
924 switch (type.value()) {
925 case StoreType::kI64Store8:
928 case StoreType::kI32Store8:
931 case StoreType::kI64Store16:
934 case StoreType::kI32Store16:
937 case StoreType::kI64Store32:
940 case StoreType::kI32Store:
943 case StoreType::kI64Store:
950#undef ASSEMBLE_ATOMIC_EXCHANGE_INTEGER
951#undef ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT
953#define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER(load_linked, \
956 Label compareExchange; \
959 bind(&compareExchange); \
960 load_linked(result.gp(), MemOperand(temp0, 0)); \
961 BranchShort(&exit, ne, expected.gp(), Operand(result.gp())); \
962 mov(temp2, new_value.gp()); \
963 store_conditional(temp2, MemOperand(temp0, 0)); \
964 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \
969#define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT( \
970 load_linked, store_conditional, size, aligned) \
972 Label compareExchange; \
974 andi(temp1, temp0, aligned); \
975 Dsubu(temp0, temp0, Operand(temp1)); \
976 sll(temp1, temp1, 3); \
978 bind(&compareExchange); \
979 load_linked(temp2, MemOperand(temp0, 0)); \
980 ExtractBits(result.gp(), temp2, temp1, size, false); \
981 ExtractBits(temp2, expected.gp(), zero_reg, size, false); \
982 BranchShort(&exit, ne, temp2, Operand(result.gp())); \
983 InsertBits(temp2, new_value.gp(), temp1, size); \
984 store_conditional(temp2, MemOperand(temp0, 0)); \
985 BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \
991 Register dst_addr, Register offset_reg, uintptr_t offset_imm,
992 LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister
result,
993 StoreType type,
bool i64_offset) {
994 LiftoffRegList pinned{dst_addr, expected, new_value,
result};
995 if (offset_reg !=
no_reg) pinned.set(offset_reg);
1001 Daddu(temp0, dst_op.rm(), dst_op.offset());
1002 switch (type.value()) {
1003 case StoreType::kI64Store8:
1006 case StoreType::kI32Store8:
1009 case StoreType::kI64Store16:
1012 case StoreType::kI32Store16:
1015 case StoreType::kI64Store32:
1018 case StoreType::kI32Store:
1021 case StoreType::kI64Store:
1028#undef ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER
1029#undef ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT
1034 uint32_t caller_slot_idx,
1041 uint32_t caller_slot_idx,
1043 Register frame_pointer) {
1122 switch (value.type().kind()) {
1190 Daddu(a0, fp, Operand(-
start - size));
1191 Daddu(a1, fp, Operand(-
start));
1205 Dsubu(dst, fp, Operand(
offset));
1217 LiftoffRegister src) {
1224 Register scratch = temps.Acquire();
1226 Daddu(scratch, scratch, Operand(1));
1236 Label* trap_div_by_zero,
1237 Label* trap_div_unrepresentable) {
1248 MacroAssembler::Div(dst, lhs, rhs);
1252 Label* trap_div_by_zero) {
1254 MacroAssembler::Divu(dst, lhs, rhs);
1258 Label* trap_div_by_zero) {
1260 MacroAssembler::Mod(dst, lhs, rhs);
1264 Label* trap_div_by_zero) {
1266 MacroAssembler::Modu(dst, lhs, rhs);
1269#define I32_BINOP(name, instruction) \
1270 void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \
1272 instruction(dst, lhs, rhs); \
1285#define I32_BINOP_I(name, instruction) \
1286 void LiftoffAssembler::emit_i32_##name##i(Register dst, Register lhs, \
1288 instruction(dst, lhs, Operand(imm)); \
1314#define I32_SHIFTOP(name, instruction) \
1315 void LiftoffAssembler::emit_i32_##name(Register dst, Register src, \
1316 Register amount) { \
1317 instruction(dst, src, amount); \
1319#define I32_SHIFTOP_I(name, instruction) \
1320 I32_SHIFTOP(name, instruction##v) \
1321 void LiftoffAssembler::emit_i32_##name##i(Register dst, Register src, \
1323 instruction(dst, src, amount & 31); \
1335 MacroAssembler::Daddu(dst.gp(), lhs.gp(), Operand(imm));
1339 LiftoffRegister rhs) {
1340 MacroAssembler::Dmul(dst.gp(), lhs.gp(), rhs.gp());
1350 Register scratch = temps.Acquire();
1352 MacroAssembler::Dmul(dst.gp(), lhs.gp(), scratch);
1356 LiftoffRegister rhs,
1357 Label* trap_div_by_zero,
1358 Label* trap_div_unrepresentable) {
1369 MacroAssembler::Ddiv(dst.gp(), lhs.gp(), rhs.gp());
1374 LiftoffRegister rhs,
1375 Label* trap_div_by_zero) {
1377 MacroAssembler::Ddivu(dst.gp(), lhs.gp(), rhs.gp());
1382 LiftoffRegister rhs,
1383 Label* trap_div_by_zero) {
1385 MacroAssembler::Dmod(dst.gp(), lhs.gp(), rhs.gp());
1390 LiftoffRegister rhs,
1391 Label* trap_div_by_zero) {
1393 MacroAssembler::Dmodu(dst.gp(), lhs.gp(), rhs.gp());
1397#define I64_BINOP(name, instruction) \
1398 void LiftoffAssembler::emit_i64_##name( \
1399 LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
1400 instruction(dst.gp(), lhs.gp(), rhs.gp()); \
1413#define I64_BINOP_I(name, instruction) \
1414 void LiftoffAssembler::emit_i64_##name##i( \
1415 LiftoffRegister dst, LiftoffRegister lhs, int32_t imm) { \
1416 instruction(dst.gp(), lhs.gp(), Operand(imm)); \
1427#define I64_SHIFTOP(name, instruction) \
1428 void LiftoffAssembler::emit_i64_##name( \
1429 LiftoffRegister dst, LiftoffRegister src, Register amount) { \
1430 instruction(dst.gp(), src.gp(), amount); \
1432#define I64_SHIFTOP_I(name, instruction) \
1433 I64_SHIFTOP(name, instruction##v) \
1434 void LiftoffAssembler::emit_i64_##name##i(LiftoffRegister dst, \
1435 LiftoffRegister src, int amount) { \
1438 instruction(dst.gp(), src.gp(), amount); \
1440 instruction##32(dst.gp(), src.gp(), amount - 32); \
1451 Dext(dst, src, 0, 32);
1501 binsli_w(dst.toW(), scratch.toW(), 0);
1504 Register scratch1 = temps.Acquire();
1505 Register scratch2 = temps.Acquire();
1506 mfc1(scratch1, lhs);
1507 mfc1(scratch2, rhs);
1508 srl(scratch2, scratch2, 31);
1509 Ins(scratch1, scratch2, 31, 1);
1510 mtc1(scratch1, dst);
1547 binsli_d(dst.toW(), scratch.toW(), 0);
1550 Register scratch1 = temps.Acquire();
1551 Register scratch2 = temps.Acquire();
1552 dmfc1(scratch1, lhs);
1553 dmfc1(scratch2, rhs);
1554 dsrl32(scratch2, scratch2, 31);
1555 Dins(scratch1, scratch2, 63, 1);
1556 dmtc1(scratch1, dst);
1560#define FP_BINOP(name, instruction) \
1561 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
1562 DoubleRegister rhs) { \
1563 instruction(dst, lhs, rhs); \
1565#define FP_UNOP(name, instruction) \
1566 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1567 instruction(dst, src); \
1569#define FP_UNOP_RETURN_TRUE(name, instruction) \
1570 bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1571 instruction(dst, src); \
1598#undef FP_UNOP_RETURN_TRUE
1601 LiftoffRegister dst,
1602 LiftoffRegister src, Label*
trap) {
1604 case kExprI32ConvertI64:
1607 case kExprI32SConvertF32: {
1627 case kExprI32UConvertF32: {
1645 case kExprI32SConvertF64: {
1659 case kExprI32UConvertF64: {
1672 case kExprI32ReinterpretF32:
1675 case kExprI64SConvertI32:
1676 sll(dst.gp(), src.gp(), 0);
1678 case kExprI64UConvertI32:
1681 case kExprI64SConvertF32: {
1701 case kExprI64UConvertF32: {
1710 case kExprI64SConvertF64: {
1730 case kExprI64UConvertF64: {
1739 case kExprI64ReinterpretF64:
1740 dmfc1(dst.gp(), src.fp());
1742 case kExprF32SConvertI32: {
1744 mtc1(src.gp(), scratch.fp());
1745 cvt_s_w(dst.fp(), scratch.fp());
1748 case kExprF32UConvertI32:
1751 case kExprF32ConvertF64:
1754 case kExprF32ReinterpretI32:
1757 case kExprF64SConvertI32: {
1759 mtc1(src.gp(), scratch.fp());
1760 cvt_d_w(dst.fp(), scratch.fp());
1763 case kExprF64UConvertI32:
1766 case kExprF64ConvertF32:
1769 case kExprF64ReinterpretI64:
1770 dmtc1(src.gp(), dst.fp());
1772 case kExprI32SConvertSatF32: {
1779 mov(dst.gp(), zero_reg);
1782 li(dst.gp(),
static_cast<int32_t
>(std::numeric_limits<int32_t>::min()));
1785 static_cast<float>(std::numeric_limits<int32_t>::min()));
1794 case kExprI32UConvertSatF32: {
1795 Label isnan_or_lessthan_or_equal_zero;
1796 mov(dst.gp(), zero_reg);
1801 bind(&isnan_or_lessthan_or_equal_zero);
1804 case kExprI32SConvertSatF64: {
1810 mov(dst.gp(), zero_reg);
1813 li(dst.gp(),
static_cast<int32_t
>(std::numeric_limits<int32_t>::min()));
1816 static_cast<double>(std::numeric_limits<int32_t>::min()));
1825 case kExprI32UConvertSatF64: {
1826 Label isnan_or_lessthan_or_equal_zero;
1827 mov(dst.gp(), zero_reg);
1832 bind(&isnan_or_lessthan_or_equal_zero);
1835 case kExprI64SConvertSatF32: {
1841 mov(dst.gp(), zero_reg);
1844 li(dst.gp(),
static_cast<int64_t
>(std::numeric_limits<int64_t>::min()));
1847 static_cast<float>(std::numeric_limits<int64_t>::min()));
1856 case kExprI64UConvertSatF32: {
1857 Label isnan_or_lessthan_or_equal_zero;
1858 mov(dst.gp(), zero_reg);
1863 bind(&isnan_or_lessthan_or_equal_zero);
1866 case kExprI64SConvertSatF64: {
1872 mov(dst.gp(), zero_reg);
1875 li(dst.gp(),
static_cast<int64_t
>(std::numeric_limits<int64_t>::min()));
1878 static_cast<double>(std::numeric_limits<int64_t>::min()));
1887 case kExprI64UConvertSatF64: {
1888 Label isnan_or_lessthan_or_equal_zero;
1889 mov(dst.gp(), zero_reg);
1894 bind(&isnan_or_lessthan_or_equal_zero);
1911 LiftoffRegister src) {
1912 seb(dst.gp(), src.gp());
1916 LiftoffRegister src) {
1917 seh(dst.gp(), src.gp());
1921 LiftoffRegister src) {
1922 sll(dst.gp(), src.gp(), 0);
1936 const FreezeCacheState& frozen) {
1948 Register lhs, int32_t imm,
1949 const FreezeCacheState& frozen) {
1954 Register lhs, int32_t imm,
1955 const FreezeCacheState& frozen) {
1964 Register lhs, Register rhs) {
1969 sltiu(dst, src.gp(), 1);
1973 LiftoffRegister lhs,
1974 LiftoffRegister rhs) {
1975 CompareWord(cond, dst, lhs.gp(), Operand(rhs.gp()));
2008inline void EmitAnyTrue(LiftoffAssembler* assm, LiftoffRegister dst,
2009 LiftoffRegister src) {
2013 assm->li(dst.gp(), 0l);
2014 assm->li(dst.gp(), 1);
2015 assm->bind(&all_false);
2023 assm->
li(dst.
gp(), 1);
2024 assm->
li(dst.
gp(), 0l);
2025 assm->
bind(&all_true);
2031 Store(assm, dst, src.reg(), src.kind());
2037 if (src.is_const()) {
2038 if (src.i32_const() == 0) {
2041 assm->
li(temp, src.i32_const());
2053 assm->
Sw(temp, dst);
2056 assm->
Sd(temp, dst);
2065 Label not_nan, cont;
2094 Label not_nan, cont;
2121 LiftoffRegister true_value,
2122 LiftoffRegister false_value,
2129 const FreezeCacheState& frozen) {
2131 Register scratch = temps.Acquire();
2138 Register offset_reg, uintptr_t offset_imm,
2141 uint32_t* protected_load_pc,
2144 Register scratch = temps.Acquire();
2147 MSARegister dst_msa = dst.fp().toW();
2149 MachineType memtype = type.mem_type();
2152 Ld(scratch, src_op);
2154 fill_d(dst_msa, scratch);
2159 fill_d(dst_msa, scratch);
2162 fill_d(dst_msa, scratch);
2167 fill_d(dst_msa, scratch);
2170 fill_d(dst_msa, scratch);
2175 fill_d(dst_msa, scratch);
2179 xor_v(dst_msa, dst_msa, dst_msa);
2181 Lwu(scratch, src_op);
2185 Ld(scratch, src_op);
2191 Lb(scratch, src_op);
2192 fill_b(dst_msa, scratch);
2194 Lh(scratch, src_op);
2195 fill_h(dst_msa, scratch);
2197 Lw(scratch, src_op);
2198 fill_w(dst_msa, scratch);
2200 Ld(scratch, src_op);
2201 fill_d(dst_msa, scratch);
2207 Register addr, Register offset_reg,
2208 uintptr_t offset_imm, LoadType type,
2209 uint8_t laneidx, uint32_t* protected_load_pc,
2214 LoadStoreLaneParams load_params(type.mem_type().representation(), laneidx);
2219 uintptr_t offset_imm, LiftoffRegister src,
2220 StoreType type, uint8_t lane,
2221 uint32_t* protected_store_pc,
2225 if (protected_store_pc) *protected_store_pc =
pc_offset();
2226 LoadStoreLaneParams store_params(type.mem_rep(), lane);
2231 LiftoffRegister lhs,
2232 LiftoffRegister rhs,
2233 const uint8_t shuffle[16],
2235 MSARegister dst_msa = dst.fp().toW();
2236 MSARegister lhs_msa = lhs.fp().toW();
2237 MSARegister rhs_msa = rhs.fp().toW();
2239 uint64_t control_hi = 0;
2240 uint64_t control_low = 0;
2241 for (
int i = 7;
i >= 0;
i--) {
2243 control_hi |= shuffle[
i + 8];
2245 control_low |= shuffle[
i];
2248 if (dst_msa == lhs_msa) {
2251 }
else if (dst_msa == rhs_msa) {
2260 vshf_b(dst_msa, rhs_msa, lhs_msa);
2264 LiftoffRegister lhs,
2265 LiftoffRegister rhs) {
2266 MSARegister dst_msa = dst.fp().toW();
2267 MSARegister lhs_msa = lhs.fp().toW();
2268 MSARegister rhs_msa = rhs.fp().toW();
2275 move_v(dst_msa, rhs_msa);
2280 LiftoffRegister lhs,
2281 LiftoffRegister rhs) {
2286 LiftoffRegister src) {
2291 LiftoffRegister src) {
2296 LiftoffRegister dst, LiftoffRegister src) {
2301 LiftoffRegister dst, LiftoffRegister src) {
2306 LiftoffRegister src1,
2307 LiftoffRegister src2,
2308 LiftoffRegister
mask,
2314 LiftoffRegister src) {
2315 fill_b(dst.fp().toW(), src.gp());
2319 LiftoffRegister src) {
2320 fill_h(dst.fp().toW(), src.gp());
2324 LiftoffRegister src) {
2325 fill_w(dst.fp().toW(), src.gp());
2329 LiftoffRegister src) {
2330 fill_d(dst.fp().toW(), src.gp());
2334 LiftoffRegister src) {
2340 LiftoffRegister src) {
2345#define SIMD_BINOP(name1, name2, type) \
2346 void LiftoffAssembler::emit_##name1##_extmul_low_##name2( \
2347 LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
2348 MacroAssembler::ExtMulLow(type, dst.fp().toW(), src1.fp().toW(), \
2351 void LiftoffAssembler::emit_##name1##_extmul_high_##name2( \
2352 LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
2353 MacroAssembler::ExtMulHigh(type, dst.fp().toW(), src1.fp().toW(), \
2368#define SIMD_BINOP(name1, name2, type) \
2369 void LiftoffAssembler::emit_##name1##_extadd_pairwise_##name2( \
2370 LiftoffRegister dst, LiftoffRegister src) { \
2371 MacroAssembler::ExtAddPairwise(type, dst.fp().toW(), src.fp().toW()); \
2381 LiftoffRegister src1,
2382 LiftoffRegister src2) {
2383 mulr_q_h(dst.fp().toW(), src1.fp().toW(), src2.fp().toW());
2387 LiftoffRegister src1,
2388 LiftoffRegister src2) {
2393 LiftoffRegister lhs,
2394 LiftoffRegister rhs) {
2399 LiftoffRegister lhs,
2400 LiftoffRegister rhs,
2401 LiftoffRegister acc) {
2406 LiftoffRegister rhs) {
2407 ceq_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2411 LiftoffRegister rhs) {
2412 ceq_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2413 nor_v(dst.fp().toW(), dst.fp().toW(), dst.fp().toW());
2417 LiftoffRegister rhs) {
2418 clt_s_b(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2422 LiftoffRegister rhs) {
2423 clt_u_b(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2427 LiftoffRegister rhs) {
2428 cle_s_b(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2432 LiftoffRegister rhs) {
2433 cle_u_b(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2437 LiftoffRegister rhs) {
2438 ceq_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2442 LiftoffRegister rhs) {
2443 ceq_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2444 nor_v(dst.fp().toW(), dst.fp().toW(), dst.fp().toW());
2448 LiftoffRegister rhs) {
2449 clt_s_h(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2453 LiftoffRegister rhs) {
2454 clt_u_h(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2458 LiftoffRegister rhs) {
2459 cle_s_h(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2463 LiftoffRegister rhs) {
2464 cle_u_h(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2468 LiftoffRegister rhs) {
2469 ceq_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2473 LiftoffRegister rhs) {
2474 ceq_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2475 nor_v(dst.fp().toW(), dst.fp().toW(), dst.fp().toW());
2479 LiftoffRegister rhs) {
2480 clt_s_w(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2484 LiftoffRegister rhs) {
2485 clt_u_w(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2489 LiftoffRegister rhs) {
2490 cle_s_w(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2494 LiftoffRegister rhs) {
2495 cle_u_w(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
2499 LiftoffRegister rhs) {
2500 fceq_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2504 LiftoffRegister rhs) {
2505 fcune_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2509 LiftoffRegister rhs) {
2510 fclt_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2514 LiftoffRegister rhs) {
2515 fcle_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2519 LiftoffRegister rhs) {
2520 ceq_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2524 LiftoffRegister rhs) {
2525 ceq_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2526 nor_v(dst.fp().toW(), dst.fp().toW(), dst.fp().toW());
2530 LiftoffRegister src) {
2536 LiftoffRegister rhs) {
2537 fceq_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2541 LiftoffRegister rhs) {
2542 fcune_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2546 LiftoffRegister rhs) {
2547 fclt_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2551 LiftoffRegister rhs) {
2552 fcle_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2556 const uint8_t imms[16]) {
2557 MSARegister dst_msa = dst.fp().toW();
2559 memcpy(vals, imms,
sizeof(vals));
2567 nor_v(dst.fp().toW(), src.fp().toW(), src.fp().toW());
2571 LiftoffRegister rhs) {
2572 and_v(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2576 LiftoffRegister rhs) {
2577 or_v(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2581 LiftoffRegister rhs) {
2582 xor_v(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2586 LiftoffRegister lhs,
2587 LiftoffRegister rhs) {
2593 LiftoffRegister src1,
2594 LiftoffRegister src2,
2595 LiftoffRegister
mask) {
2597 bsel_v(dst.fp().toW(), src2.fp().toW(), src1.fp().toW());
2606 LiftoffRegister src) {
2612 LiftoffRegister src) {
2617 LiftoffRegister src) {
2622 LiftoffRegister src) {
2625 srli_b(scratch0, src.fp().toW(), 7);
2626 srli_h(scratch1, scratch0, 7);
2627 or_v(scratch0, scratch0, scratch1);
2628 srli_w(scratch1, scratch0, 14);
2629 or_v(scratch0, scratch0, scratch1);
2630 srli_d(scratch1, scratch0, 28);
2631 or_v(scratch0, scratch0, scratch1);
2632 shf_w(scratch1, scratch0, 0x0E);
2633 ilvev_b(scratch0, scratch1, scratch0);
2638 LiftoffRegister rhs) {
2645 slli_b(dst.fp().toW(), lhs.fp().toW(), rhs & 7);
2649 LiftoffRegister lhs,
2650 LiftoffRegister rhs) {
2656 LiftoffRegister lhs, int32_t rhs) {
2657 srai_b(dst.fp().toW(), lhs.fp().toW(), rhs & 7);
2661 LiftoffRegister lhs,
2662 LiftoffRegister rhs) {
2668 LiftoffRegister lhs, int32_t rhs) {
2669 srli_b(dst.fp().toW(), lhs.fp().toW(), rhs & 7);
2673 LiftoffRegister rhs) {
2674 addv_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2678 LiftoffRegister lhs,
2679 LiftoffRegister rhs) {
2680 adds_s_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2684 LiftoffRegister lhs,
2685 LiftoffRegister rhs) {
2686 adds_u_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2690 LiftoffRegister rhs) {
2691 subv_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2695 LiftoffRegister lhs,
2696 LiftoffRegister rhs) {
2697 subs_s_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2701 LiftoffRegister lhs,
2702 LiftoffRegister rhs) {
2703 subs_u_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2707 LiftoffRegister lhs,
2708 LiftoffRegister rhs) {
2709 min_s_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2713 LiftoffRegister lhs,
2714 LiftoffRegister rhs) {
2715 min_u_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2719 LiftoffRegister lhs,
2720 LiftoffRegister rhs) {
2721 max_s_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2725 LiftoffRegister lhs,
2726 LiftoffRegister rhs) {
2727 max_u_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2731 LiftoffRegister src) {
2732 pcnt_b(dst.fp().toW(), src.fp().toW());
2736 LiftoffRegister src) {
2742 LiftoffRegister src) {
2747 LiftoffRegister src) {
2750 srli_h(scratch0, src.fp().toW(), 15);
2751 srli_w(scratch1, scratch0, 15);
2752 or_v(scratch0, scratch0, scratch1);
2753 srli_d(scratch1, scratch0, 30);
2754 or_v(scratch0, scratch0, scratch1);
2755 shf_w(scratch1, scratch0, 0x0E);
2756 slli_d(scratch1, scratch1, 4);
2757 or_v(scratch0, scratch0, scratch1);
2762 LiftoffRegister rhs) {
2769 slli_h(dst.fp().toW(), lhs.fp().toW(), rhs & 15);
2773 LiftoffRegister lhs,
2774 LiftoffRegister rhs) {
2780 LiftoffRegister lhs, int32_t rhs) {
2781 srai_h(dst.fp().toW(), lhs.fp().toW(), rhs & 15);
2785 LiftoffRegister lhs,
2786 LiftoffRegister rhs) {
2792 LiftoffRegister lhs, int32_t rhs) {
2793 srli_h(dst.fp().toW(), lhs.fp().toW(), rhs & 15);
2797 LiftoffRegister rhs) {
2798 addv_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2802 LiftoffRegister lhs,
2803 LiftoffRegister rhs) {
2804 adds_s_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2808 LiftoffRegister lhs,
2809 LiftoffRegister rhs) {
2810 adds_u_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2814 LiftoffRegister rhs) {
2815 subv_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2819 LiftoffRegister lhs,
2820 LiftoffRegister rhs) {
2821 subs_s_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2825 LiftoffRegister lhs,
2826 LiftoffRegister rhs) {
2827 subs_u_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2831 LiftoffRegister rhs) {
2832 mulv_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2836 LiftoffRegister lhs,
2837 LiftoffRegister rhs) {
2838 min_s_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2842 LiftoffRegister lhs,
2843 LiftoffRegister rhs) {
2844 min_u_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2848 LiftoffRegister lhs,
2849 LiftoffRegister rhs) {
2850 max_s_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2854 LiftoffRegister lhs,
2855 LiftoffRegister rhs) {
2856 max_u_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2860 LiftoffRegister src) {
2866 LiftoffRegister src) {
2871 LiftoffRegister src) {
2874 srli_w(scratch0, src.fp().toW(), 31);
2875 srli_d(scratch1, scratch0, 31);
2876 or_v(scratch0, scratch0, scratch1);
2877 shf_w(scratch1, scratch0, 0x0E);
2878 slli_d(scratch1, scratch1, 2);
2879 or_v(scratch0, scratch0, scratch1);
2884 LiftoffRegister rhs) {
2891 slli_w(dst.fp().toW(), lhs.fp().toW(), rhs & 31);
2895 LiftoffRegister lhs,
2896 LiftoffRegister rhs) {
2902 LiftoffRegister lhs, int32_t rhs) {
2903 srai_w(dst.fp().toW(), lhs.fp().toW(), rhs & 31);
2907 LiftoffRegister lhs,
2908 LiftoffRegister rhs) {
2914 LiftoffRegister lhs, int32_t rhs) {
2915 srli_w(dst.fp().toW(), lhs.fp().toW(), rhs & 31);
2919 LiftoffRegister rhs) {
2920 addv_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2924 LiftoffRegister rhs) {
2925 subv_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2929 LiftoffRegister rhs) {
2930 mulv_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2934 LiftoffRegister lhs,
2935 LiftoffRegister rhs) {
2936 min_s_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2940 LiftoffRegister lhs,
2941 LiftoffRegister rhs) {
2942 min_u_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2946 LiftoffRegister lhs,
2947 LiftoffRegister rhs) {
2948 max_s_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2952 LiftoffRegister lhs,
2953 LiftoffRegister rhs) {
2954 max_u_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2958 LiftoffRegister lhs,
2959 LiftoffRegister rhs) {
2960 dotp_s_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
2964 LiftoffRegister src) {
2970 LiftoffRegister src) {
2975 LiftoffRegister src) {
2984 LiftoffRegister rhs) {
2991 slli_d(dst.fp().toW(), lhs.fp().toW(), rhs & 63);
2995 LiftoffRegister lhs,
2996 LiftoffRegister rhs) {
3002 LiftoffRegister lhs, int32_t rhs) {
3003 srai_d(dst.fp().toW(), lhs.fp().toW(), rhs & 63);
3007 LiftoffRegister lhs,
3008 LiftoffRegister rhs) {
3014 LiftoffRegister lhs, int32_t rhs) {
3015 srli_d(dst.fp().toW(), lhs.fp().toW(), rhs & 63);
3019 LiftoffRegister rhs) {
3020 addv_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3024 LiftoffRegister rhs) {
3025 subv_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3029 LiftoffRegister rhs) {
3030 mulv_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3034 LiftoffRegister rhs) {
3035 clt_s_d(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
3039 LiftoffRegister rhs) {
3040 cle_s_d(dst.fp().toW(), rhs.fp().toW(), lhs.fp().toW());
3044 LiftoffRegister src) {
3045 bclri_w(dst.fp().toW(), src.fp().toW(), 31);
3049 LiftoffRegister src) {
3050 bnegi_w(dst.fp().toW(), src.fp().toW(), 31);
3054 LiftoffRegister src) {
3055 fsqrt_w(dst.fp().toW(), src.fp().toW());
3059 LiftoffRegister src) {
3065 LiftoffRegister src) {
3071 LiftoffRegister src) {
3077 LiftoffRegister src) {
3083 LiftoffRegister rhs) {
3084 fadd_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3088 LiftoffRegister rhs) {
3089 fsub_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3093 LiftoffRegister rhs) {
3094 fmul_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3098 LiftoffRegister rhs) {
3099 fdiv_w(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3103 LiftoffRegister rhs) {
3104 MSARegister dst_msa = dst.fp().toW();
3105 MSARegister lhs_msa = lhs.fp().toW();
3106 MSARegister rhs_msa = rhs.fp().toW();
3111 fseq_w(scratch0, lhs_msa, rhs_msa);
3112 bsel_v(scratch0, rhs_msa, lhs_msa);
3113 or_v(scratch1, scratch0, rhs_msa);
3115 fseq_w(scratch0, scratch1, scratch1);
3116 bsel_v(scratch0, scratch1, lhs_msa);
3118 fsle_w(dst_msa, scratch1, scratch0);
3119 bsel_v(dst_msa, scratch0, scratch1);
3121 fmin_w(dst_msa, dst_msa, dst_msa);
3125 LiftoffRegister rhs) {
3126 MSARegister dst_msa = dst.fp().toW();
3127 MSARegister lhs_msa = lhs.fp().toW();
3128 MSARegister rhs_msa = rhs.fp().toW();
3133 fseq_w(scratch0, lhs_msa, rhs_msa);
3134 bsel_v(scratch0, rhs_msa, lhs_msa);
3135 and_v(scratch1, scratch0, rhs_msa);
3137 fseq_w(scratch0, scratch1, scratch1);
3138 bsel_v(scratch0, scratch1, lhs_msa);
3140 fsle_w(dst_msa, scratch0, scratch1);
3141 bsel_v(dst_msa, scratch0, scratch1);
3143 fmax_w(dst_msa, dst_msa, dst_msa);
3147 LiftoffRegister lhs,
3148 LiftoffRegister rhs) {
3153 LiftoffRegister lhs,
3154 LiftoffRegister rhs) {
3159 LiftoffRegister rhs) {
3160 MSARegister dst_msa = dst.fp().toW();
3161 MSARegister lhs_msa = lhs.fp().toW();
3162 MSARegister rhs_msa = rhs.fp().toW();
3164 fclt_w(dst_msa, rhs_msa, lhs_msa);
3165 bsel_v(dst_msa, lhs_msa, rhs_msa);
3169 LiftoffRegister rhs) {
3170 MSARegister dst_msa = dst.fp().toW();
3171 MSARegister lhs_msa = lhs.fp().toW();
3172 MSARegister rhs_msa = rhs.fp().toW();
3174 fclt_w(dst_msa, lhs_msa, rhs_msa);
3175 bsel_v(dst_msa, lhs_msa, rhs_msa);
3179 LiftoffRegister src) {
3180 bclri_d(dst.fp().toW(), src.fp().toW(), 63);
3184 LiftoffRegister src) {
3185 bnegi_d(dst.fp().toW(), src.fp().toW(), 63);
3189 LiftoffRegister src) {
3190 fsqrt_d(dst.fp().toW(), src.fp().toW());
3194 LiftoffRegister src) {
3200 LiftoffRegister src) {
3206 LiftoffRegister src) {
3212 LiftoffRegister src) {
3218 LiftoffRegister rhs) {
3219 fadd_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3223 LiftoffRegister rhs) {
3224 fsub_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3228 LiftoffRegister rhs) {
3229 fmul_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3233 LiftoffRegister rhs) {
3234 fdiv_d(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3238 LiftoffRegister rhs) {
3239 MSARegister dst_msa = dst.fp().toW();
3240 MSARegister lhs_msa = lhs.fp().toW();
3241 MSARegister rhs_msa = rhs.fp().toW();
3246 fseq_d(scratch0, lhs_msa, rhs_msa);
3247 bsel_v(scratch0, rhs_msa, lhs_msa);
3248 or_v(scratch1, scratch0, rhs_msa);
3250 fseq_d(scratch0, scratch1, scratch1);
3251 bsel_v(scratch0, scratch1, lhs_msa);
3253 fsle_d(dst_msa, scratch1, scratch0);
3254 bsel_v(dst_msa, scratch0, scratch1);
3256 fmin_d(dst_msa, dst_msa, dst_msa);
3260 LiftoffRegister rhs) {
3261 MSARegister dst_msa = dst.fp().toW();
3262 MSARegister lhs_msa = lhs.fp().toW();
3263 MSARegister rhs_msa = rhs.fp().toW();
3268 fseq_d(scratch0, lhs_msa, rhs_msa);
3269 bsel_v(scratch0, rhs_msa, lhs_msa);
3270 and_v(scratch1, scratch0, rhs_msa);
3272 fseq_d(scratch0, scratch1, scratch1);
3273 bsel_v(scratch0, scratch1, lhs_msa);
3275 fsle_d(dst_msa, scratch0, scratch1);
3276 bsel_v(dst_msa, scratch0, scratch1);
3278 fmax_d(dst_msa, dst_msa, dst_msa);
3282 LiftoffRegister rhs) {
3283 MSARegister dst_msa = dst.fp().toW();
3284 MSARegister lhs_msa = lhs.fp().toW();
3285 MSARegister rhs_msa = rhs.fp().toW();
3287 fclt_d(dst_msa, rhs_msa, lhs_msa);
3288 bsel_v(dst_msa, lhs_msa, rhs_msa);
3292 LiftoffRegister rhs) {
3293 MSARegister dst_msa = dst.fp().toW();
3294 MSARegister lhs_msa = lhs.fp().toW();
3295 MSARegister rhs_msa = rhs.fp().toW();
3297 fclt_d(dst_msa, lhs_msa, rhs_msa);
3298 bsel_v(dst_msa, lhs_msa, rhs_msa);
3302 LiftoffRegister lhs,
3303 LiftoffRegister rhs) {
3308 LiftoffRegister lhs,
3309 LiftoffRegister rhs) {
3314 LiftoffRegister src) {
3323 LiftoffRegister src) {
3330 LiftoffRegister src) {
3331 fexupr_d(dst.fp().toW(), src.fp().toW());
3335 LiftoffRegister src) {
3340 LiftoffRegister src) {
3345 LiftoffRegister src) {
3353 LiftoffRegister src) {
3361 LiftoffRegister src) {
3362 ffint_s_w(dst.fp().toW(), src.fp().toW());
3366 LiftoffRegister src) {
3367 ffint_u_w(dst.fp().toW(), src.fp().toW());
3371 LiftoffRegister src) {
3377 LiftoffRegister lhs,
3378 LiftoffRegister rhs) {
3380 sat_s_h(dst.fp().toW(), lhs.fp().toW(), 7);
3385 LiftoffRegister lhs,
3386 LiftoffRegister rhs) {
3391 sat_u_h(dst.fp().toW(), dst.fp().toW(), 7);
3396 LiftoffRegister lhs,
3397 LiftoffRegister rhs) {
3399 sat_s_w(dst.fp().toW(), lhs.fp().toW(), 15);
3404 LiftoffRegister lhs,
3405 LiftoffRegister rhs) {
3410 sat_u_w(dst.fp().toW(), dst.fp().toW(), 15);
3415 LiftoffRegister src) {
3418 srai_h(dst.fp().toW(), dst.fp().toW(), 8);
3422 LiftoffRegister src) {
3425 srai_h(dst.fp().toW(), dst.fp().toW(), 8);
3429 LiftoffRegister src) {
3435 LiftoffRegister src) {
3441 LiftoffRegister src) {
3444 srai_w(dst.fp().toW(), dst.fp().toW(), 16);
3448 LiftoffRegister src) {
3451 srai_w(dst.fp().toW(), dst.fp().toW(), 16);
3455 LiftoffRegister src) {
3461 LiftoffRegister src) {
3467 LiftoffRegister src) {
3470 srai_d(dst.fp().toW(), dst.fp().toW(), 32);
3474 LiftoffRegister src) {
3477 srai_d(dst.fp().toW(), dst.fp().toW(), 32);
3481 LiftoffRegister src) {
3487 LiftoffRegister src) {
3493 LiftoffRegister lhs,
3494 LiftoffRegister rhs) {
3495 aver_u_b(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3499 LiftoffRegister lhs,
3500 LiftoffRegister rhs) {
3501 aver_u_h(dst.fp().toW(), lhs.fp().toW(), rhs.fp().toW());
3505 LiftoffRegister src) {
3511 LiftoffRegister src) {
3517 LiftoffRegister src) {
3523 LiftoffRegister lhs,
3524 uint8_t imm_lane_idx) {
3525 copy_s_b(dst.gp(), lhs.fp().toW(), imm_lane_idx);
3529 LiftoffRegister lhs,
3530 uint8_t imm_lane_idx) {
3531 copy_u_b(dst.gp(), lhs.fp().toW(), imm_lane_idx);
3535 LiftoffRegister lhs,
3536 uint8_t imm_lane_idx) {
3537 copy_s_h(dst.gp(), lhs.fp().toW(), imm_lane_idx);
3541 LiftoffRegister lhs,
3542 uint8_t imm_lane_idx) {
3543 copy_u_h(dst.gp(), lhs.fp().toW(), imm_lane_idx);
3547 LiftoffRegister lhs,
3548 uint8_t imm_lane_idx) {
3549 copy_s_w(dst.gp(), lhs.fp().toW(), imm_lane_idx);
3553 LiftoffRegister lhs,
3554 uint8_t imm_lane_idx) {
3555 copy_s_d(dst.gp(), lhs.fp().toW(), imm_lane_idx);
3559 LiftoffRegister lhs,
3560 uint8_t imm_lane_idx) {
3566 LiftoffRegister lhs,
3567 uint8_t imm_lane_idx) {
3573 LiftoffRegister src1,
3574 LiftoffRegister src2,
3575 uint8_t imm_lane_idx) {
3577 move_v(dst.fp().toW(), src1.fp().toW());
3579 insert_b(dst.fp().toW(), imm_lane_idx, src2.gp());
3583 LiftoffRegister src1,
3584 LiftoffRegister src2,
3585 uint8_t imm_lane_idx) {
3587 move_v(dst.fp().toW(), src1.fp().toW());
3589 insert_h(dst.fp().toW(), imm_lane_idx, src2.gp());
3593 LiftoffRegister src1,
3594 LiftoffRegister src2,
3595 uint8_t imm_lane_idx) {
3597 move_v(dst.fp().toW(), src1.fp().toW());
3599 insert_w(dst.fp().toW(), imm_lane_idx, src2.gp());
3603 LiftoffRegister src1,
3604 LiftoffRegister src2,
3605 uint8_t imm_lane_idx) {
3607 move_v(dst.fp().toW(), src1.fp().toW());
3609 insert_d(dst.fp().toW(), imm_lane_idx, src2.gp());
3613 LiftoffRegister src1,
3614 LiftoffRegister src2,
3615 uint8_t imm_lane_idx) {
3618 move_v(dst.fp().toW(), src1.fp().toW());
3624 LiftoffRegister src1,
3625 LiftoffRegister src2,
3626 uint8_t imm_lane_idx) {
3629 move_v(dst.fp().toW(), src1.fp().toW());
3635 LiftoffRegister src1,
3636 LiftoffRegister src2,
3637 LiftoffRegister src3) {
3642 LiftoffRegister src1,
3643 LiftoffRegister src2,
3644 LiftoffRegister src3) {
3649 LiftoffRegister src1,
3650 LiftoffRegister src2,
3651 LiftoffRegister src3) {
3656 LiftoffRegister src1,
3657 LiftoffRegister src2,
3658 LiftoffRegister src3) {
3663 LiftoffRegister src) {
3668 LiftoffRegister lhs,
3669 uint8_t imm_lane_idx) {
3674 LiftoffRegister src1,
3675 LiftoffRegister src2,
3676 uint8_t imm_lane_idx) {
3681 LiftoffRegister src) {
3686 LiftoffRegister src) {
3691 LiftoffRegister src) {
3696 LiftoffRegister src) {
3701 LiftoffRegister src) {
3706 LiftoffRegister src) {
3711 LiftoffRegister src) {
3716 LiftoffRegister rhs) {
3721 LiftoffRegister rhs) {
3726 LiftoffRegister rhs) {
3731 LiftoffRegister rhs) {
3736 LiftoffRegister rhs) {
3741 LiftoffRegister rhs) {
3746 LiftoffRegister rhs) {
3751 LiftoffRegister rhs) {
3756 LiftoffRegister rhs) {
3761 LiftoffRegister rhs) {
3766 LiftoffRegister rhs) {
3771 LiftoffRegister rhs) {
3776 LiftoffRegister src) {
3781 LiftoffRegister src) {
3786 LiftoffRegister src) {
3791 LiftoffRegister src) {
3796 LiftoffRegister src) {
3801 LiftoffRegister src) {
3806 LiftoffRegister src) {
3811 LiftoffRegister src1,
3812 LiftoffRegister src2,
3813 LiftoffRegister src3) {
3818 LiftoffRegister src1,
3819 LiftoffRegister src2,
3820 LiftoffRegister src3) {
3829 Branch(ool_code,
ule,
sp, Operand(limit_address));
3842 while (!gp_regs.is_empty()) {
3843 LiftoffRegister
reg = gp_regs.GetFirstRegSet();
3853 unsigned slot_size =
IsEnabled(MIPS_SIMD) ? 16 : 8;
3856 while (!fp_regs.is_empty()) {
3857 LiftoffRegister
reg = fp_regs.GetFirstRegSet();
3872 unsigned fp_offset = 0;
3873 while (!fp_regs.is_empty()) {
3874 LiftoffRegister
reg = fp_regs.GetFirstRegSet();
3881 fp_offset += (
IsEnabled(MIPS_SIMD) ? 16 : 8);
3885 unsigned gp_offset = 0;
3886 while (!gp_regs.is_empty()) {
3887 LiftoffRegister
reg = gp_regs.GetLastRegSet();
3896 SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
3897 LiftoffRegList ref_spills,
int spill_offset) {
3901 while (!gp_spills.is_empty()) {
3902 LiftoffRegister
reg = gp_spills.GetFirstRegSet();
3903 if (ref_spills.has(
reg)) {
3904 safepoint.DefineTaggedStackSlot(spill_offset);
3906 gp_spills.clear(
reg);
3921 const std::initializer_list<VarState>
args,
const LiftoffRegister* rets,
3923 ExternalReference ext_ref) {
3924 Daddu(
sp,
sp, -stack_bytes);
3935 constexpr Register kFirstArgReg = a0;
3936 mov(kFirstArgReg,
sp);
3939 constexpr int kNumCCallArgs = 1;
3944 const LiftoffRegister* next_result_reg = rets;
3945 if (return_kind != kVoid) {
3946 constexpr Register kReturnReg = v0;
3952 if (return_kind ==
kI32) {
3953 sll(next_result_reg->gp(), kReturnReg, 0);
3954 }
else if (kReturnReg != next_result_reg->gp()) {
3955 Move(*next_result_reg, LiftoffRegister(kReturnReg), return_kind);
3958 if (kReturnReg != next_result_reg->gp()) {
3959 Move(*next_result_reg, LiftoffRegister(kReturnReg), return_kind);
3966 if (out_argument_kind != kVoid) {
3970 Daddu(
sp,
sp, stack_bytes);
3974 ExternalReference ext_ref) {
3976 const int num_args =
static_cast<int>(args_list.size());
3986 ParallelMove parallel_move{
this};
3987 for (
int reg_arg = 0; reg_arg < num_args; ++reg_arg) {
3988 parallel_move.LoadIntoRegister(LiftoffRegister{
kCArgRegs[reg_arg]},
3991 parallel_move.Execute();
4006 compiler::CallDescriptor* call_descriptor,
4010 DCHECK(target.is_valid());
4011 CallWasmCodePointer(target);
4015 compiler::CallDescriptor* call_descriptor, Register target) {
4016 DCHECK(target.is_valid());
4027 Daddu(
sp,
sp, -size);
4032 Daddu(
sp,
sp, size);
4040 Register scratch = temps.Acquire();
4055 LiftoffRegister src,
4057 LiftoffRegister tmp_s128,
4060 if (lane_kind ==
kF32) {
4061 fcun_w(tmp_s128.fp().toW(), src.fp().toW(), src.fp().toW());
4064 fcun_d(tmp_s128.fp().toW(), src.fp().toW(), src.fp().toW());
4080 int last_stack_slot = param_slots;
4081 for (
auto& slot :
slots_) {
4082 const int stack_slot = slot.dst_slot_;
4085 last_stack_slot = stack_slot;
4087 switch (src.loc()) {
4089 if (src.kind() !=
kS128) {
bool IsEnabled(CpuFeature f)
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
void pcnt_b(MSARegister wd, MSARegister ws)
void bnegi_w(MSARegister wd, MSARegister ws, uint32_t m)
void clt_s_w(MSARegister wd, MSARegister ws, MSARegister wt)
void copy_u_h(Register rd, MSARegister ws, uint32_t n)
void ld(Register rd, const MemOperand &rs)
void fcun_w(MSARegister wd, MSARegister ws, MSARegister wt)
void sd(Register rd, const MemOperand &rs)
void addv_b(MSARegister wd, MSARegister ws, MSARegister wt)
void aver_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void min_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void fill_w(MSARegister wd, Register rs)
void seb(Register rd, Register rt)
void ld_d(Register rd, Register rj, int32_t si12)
void clti_s_w(MSARegister wd, MSARegister ws, uint32_t imm5)
void fmax_d(FPURegister fd, FPURegister fj, FPURegister fk)
void aver_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void fadd_w(MSARegister wd, MSARegister ws, MSARegister wt)
void cvt_s_w(FPURegister fd, FPURegister fs)
void fsqrt_d(FPURegister fd, FPURegister fj)
void mulv_d(MSARegister wd, MSARegister ws, MSARegister wt)
void fill_d(MSARegister wd, Register rs)
void fmax_w(MSARegister wd, MSARegister ws, MSARegister wt)
void addu(Register rd, Register rs, Register rt)
void binsli_w(MSARegister wd, MSARegister ws, uint32_t m)
void sll_w(Register rd, Register rj, Register rk)
void asub_s_w(MSARegister wd, MSARegister ws, MSARegister wt)
void srli_b(MSARegister wd, MSARegister ws, uint32_t m)
void ftrunc_s_d(MSARegister wd, MSARegister ws)
void fmul_w(MSARegister wd, MSARegister ws, MSARegister wt)
void slli_w(Register rd, Register rj, int32_t ui5)
void adds_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void ffint_u_w(MSARegister wd, MSARegister ws)
void fseq_w(MSARegister wd, MSARegister ws, MSARegister wt)
void movn(const Register &rd, uint64_t imm, int shift=-1)
void srai_b(MSARegister wd, MSARegister ws, uint32_t m)
void copy_u_b(Register rd, MSARegister ws, uint32_t n)
void cle_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void sra_h(MSARegister wd, MSARegister ws, MSARegister wt)
void srl_d(Register rd, Register rj, Register rk)
void and_v(MSARegister wd, MSARegister ws, MSARegister wt)
void asub_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void pckev_w(MSARegister wd, MSARegister ws, MSARegister wt)
void copy_s_h(Register rd, MSARegister ws, uint32_t n)
void subs_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void xor_v(MSARegister wd, MSARegister ws, MSARegister wt)
void binsli_d(MSARegister wd, MSARegister ws, uint32_t m)
void sltiu(Register rd, Register rs, int32_t j)
void fsub_w(MSARegister wd, MSARegister ws, MSARegister wt)
void max_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void subs_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void shf_w(MSARegister wd, MSARegister ws, uint32_t imm8)
void ceq_b(MSARegister wd, MSARegister ws, MSARegister wt)
void clti_s_b(MSARegister wd, MSARegister ws, uint32_t imm5)
void srli_w(Register rd, Register rj, int32_t ui5)
void copy_s_b(Register rd, MSARegister ws, uint32_t n)
void pckev_b(MSARegister wd, MSARegister ws, MSARegister wt)
void srai_w(Register rd, Register rj, int32_t ui5)
void mulv_w(MSARegister wd, MSARegister ws, MSARegister wt)
void sat_u_d(MSARegister wd, MSARegister ws, uint32_t m)
void clt_s_d(MSARegister wd, MSARegister ws, MSARegister wt)
void cle_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void ilvr_h(MSARegister wd, MSARegister ws, MSARegister wt)
void cvt_d_s(FPURegister fd, FPURegister fs)
void cvt_d_w(FPURegister fd, FPURegister fs)
void clt_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void fsub_d(FPURegister fd, FPURegister fj, FPURegister fk)
void addv_w(MSARegister wd, MSARegister ws, MSARegister wt)
void insert_b(MSARegister wd, uint32_t n, Register rs)
void fexdo_w(MSARegister wd, MSARegister ws, MSARegister wt)
void slli_b(MSARegister wd, MSARegister ws, uint32_t m)
void trunc_w_s(FPURegister fd, FPURegister fs)
void slli_h(MSARegister wd, MSARegister ws, uint32_t m)
void ilvl_b(MSARegister wd, MSARegister ws, MSARegister wt)
void trunc_l_d(FPURegister fd, FPURegister fs)
void fill_b(MSARegister wd, Register rs)
Simd128Register Simd128Register ra
void bclri_w(MSARegister wd, MSARegister ws, uint32_t m)
void rotr(Register rd, Register rt, uint16_t sa)
void mtc1(Register rt, FPURegister fs)
void ftrunc_s_w(MSARegister wd, MSARegister ws)
void min_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void min_s_w(MSARegister wd, MSARegister ws, MSARegister wt)
void dsrl32(Register rt, Register rd, uint16_t sa)
void adds_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void fsle_d(MSARegister wd, MSARegister ws, MSARegister wt)
void clt_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void nor_v(MSARegister wd, MSARegister ws, MSARegister wt)
friend class UseScratchRegisterScope
void ilvr_w(MSARegister wd, MSARegister ws, MSARegister wt)
void fdiv_d(FPURegister fd, FPURegister fj, FPURegister fk)
void insert_d(MSARegister wd, uint32_t n, Register rs)
void ilvl_w(MSARegister wd, MSARegister ws, MSARegister wt)
void srai_d(Register rd, Register rj, int32_t ui6)
void ffint_s_w(FPURegister fd, FPURegister fj)
void fseq_d(MSARegister wd, MSARegister ws, MSARegister wt)
void fexupr_d(MSARegister wd, MSARegister ws)
void sll_h(MSARegister wd, MSARegister ws, MSARegister wt)
void trunc_w_d(FPURegister fd, FPURegister fs)
void clt_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void fsle_w(MSARegister wd, MSARegister ws, MSARegister wt)
void fceq_d(MSARegister wd, MSARegister ws, MSARegister wt)
void clti_s_h(MSARegister wd, MSARegister ws, uint32_t imm5)
void addv_h(MSARegister wd, MSARegister ws, MSARegister wt)
void fcune_w(MSARegister wd, MSARegister ws, MSARegister wt)
void fmin_d(FPURegister fd, FPURegister fj, FPURegister fk)
void fsqrt_w(MSARegister wd, MSARegister ws)
void sra_d(Register rd, Register rj, Register rk)
void trunc_l_s(FPURegister fd, FPURegister fs)
void ceq_w(MSARegister wd, MSARegister ws, MSARegister wt)
void min_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void fcle_w(MSARegister wd, MSARegister ws, MSARegister wt)
void daddiu(Register rd, Register rs, int32_t j)
void max_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void st_b(Register rd, Register rj, int32_t si12)
void bsel_v(MSARegister wd, MSARegister ws, MSARegister wt)
void fceq_w(MSARegister wd, MSARegister ws, MSARegister wt)
void subs_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void ld_b(Register rd, Register rj, int32_t si12)
void addv_d(MSARegister wd, MSARegister ws, MSARegister wt)
void mulr_q_h(MSARegister wd, MSARegister ws, MSARegister wt)
void max_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void sat_s_d(MSARegister wd, MSARegister ws, uint32_t m)
void ceq_d(MSARegister wd, MSARegister ws, MSARegister wt)
void sra_w(Register rd, Register rj, Register rk)
void subs_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void srai_h(MSARegister wd, MSARegister ws, uint32_t m)
void max_u_w(MSARegister wd, MSARegister ws, MSARegister wt)
void sll_b(MSARegister wd, MSARegister ws, MSARegister wt)
void fmin_w(MSARegister wd, MSARegister ws, MSARegister wt)
void ilvev_b(MSARegister wd, MSARegister ws, MSARegister wt)
void add_a_d(MSARegister wd, MSARegister ws, MSARegister wt)
void dotp_s_w(MSARegister wd, MSARegister ws, MSARegister wt)
void subv_h(MSARegister wd, MSARegister ws, MSARegister wt)
void srl(Register rd, Register rt, uint16_t sa)
void ffint_u_d(MSARegister wd, MSARegister ws)
void vshf_b(MSARegister wd, MSARegister ws, MSARegister wt)
void adds_s_b(MSARegister wd, MSARegister ws, MSARegister wt)
void asub_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void sll(Register rd, Register rt, uint16_t sa, bool coming_from_nop=false)
void fcle_d(MSARegister wd, MSARegister ws, MSARegister wt)
void copy_u_w(Register rd, MSARegister ws, uint32_t n)
void clt_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void cvt_d_l(FPURegister fd, FPURegister fs)
void insert_h(MSARegister wd, uint32_t n, Register rs)
void mfc1(Register rt, FPURegister fs)
void subv_b(MSARegister wd, MSARegister ws, MSARegister wt)
void cvt_s_d(FPURegister fd, FPURegister fs)
void clt_u_w(MSARegister wd, MSARegister ws, MSARegister wt)
void min_u_w(MSARegister wd, MSARegister ws, MSARegister wt)
void ilvl_h(MSARegister wd, MSARegister ws, MSARegister wt)
void fmul_d(FPURegister fd, FPURegister fj, FPURegister fk)
void cvt_s_l(FPURegister fd, FPURegister fs)
void cle_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void ftrunc_u_w(MSARegister wd, MSARegister ws)
void sat_s_h(MSARegister wd, MSARegister ws, uint32_t m)
void ilvr_b(MSARegister wd, MSARegister ws, MSARegister wt)
void ffint_s_d(MSARegister wd, MSARegister ws)
void fcune_d(MSARegister wd, MSARegister ws, MSARegister wt)
void dmtc1(Register rt, FPURegister fs)
void copy_s_d(Register rd, MSARegister ws, uint32_t n)
void fcun_d(MSARegister wd, MSARegister ws, MSARegister wt)
void min_s_h(MSARegister wd, MSARegister ws, MSARegister wt)
void subv_w(MSARegister wd, MSARegister ws, MSARegister wt)
void cle_u_h(MSARegister wd, MSARegister ws, MSARegister wt)
void ftrunc_u_d(MSARegister wd, MSARegister ws)
void max_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void insert_w(MSARegister wd, uint32_t n, Register rs)
void srli_h(MSARegister wd, MSARegister ws, uint32_t m)
void ceq_h(MSARegister wd, MSARegister ws, MSARegister wt)
void cle_u_w(MSARegister wd, MSARegister ws, MSARegister wt)
void cle_s_w(MSARegister wd, MSARegister ws, MSARegister wt)
void sat_s_w(MSARegister wd, MSARegister ws, uint32_t m)
void move_v(MSARegister wd, MSARegister ws)
void sat_u_h(MSARegister wd, MSARegister ws, uint32_t m)
void fadd_d(FPURegister fd, FPURegister fj, FPURegister fk)
void adds_u_b(MSARegister wd, MSARegister ws, MSARegister wt)
void bnegi_d(MSARegister wd, MSARegister ws, uint32_t m)
void daddu(Register rd, Register rs, Register rt)
void fdiv_w(MSARegister wd, MSARegister ws, MSARegister wt)
void srl_h(MSARegister wd, MSARegister ws, MSARegister wt)
void copy_s_w(Register rd, MSARegister ws, uint32_t n)
void mulv_h(MSARegister wd, MSARegister ws, MSARegister wt)
void slli_d(Register rd, Register rj, int32_t ui6)
void st_d(Register rd, Register rj, int32_t si12)
void fill_h(MSARegister wd, Register rs)
void max_s_w(MSARegister wd, MSARegister ws, MSARegister wt)
void stop(Condition cond=al, int32_t code=kDefaultStopCode)
void fclt_w(MSARegister wd, MSARegister ws, MSARegister wt)
void drotr32(Register rd, Register rt, uint16_t sa)
void bclri_d(MSARegister wd, MSARegister ws, uint32_t m)
void srl_b(MSARegister wd, MSARegister ws, MSARegister wt)
void pckev_h(MSARegister wd, MSARegister ws, MSARegister wt)
void srli_d(Register rd, Register rj, int32_t ui6)
void sll_d(Register rd, Register rj, Register rk)
void dmfc1(Register rt, FPURegister fs)
void or_v(MSARegister wd, MSARegister ws, MSARegister wt)
void sra_b(MSARegister wd, MSARegister ws, MSARegister wt)
void fclt_d(MSARegister wd, MSARegister ws, MSARegister wt)
void sat_u_w(MSARegister wd, MSARegister ws, uint32_t m)
void subv_d(MSARegister wd, MSARegister ws, MSARegister wt)
void seh(Register rd, Register rt)
void srl_w(Register rd, Register rj, Register rk)
void cle_s_d(MSARegister wd, MSARegister ws, MSARegister wt)
static bool IsSupported(CpuFeature f)
static constexpr MachineType Uint8()
static constexpr MachineType Int32()
static constexpr MachineType Uint32()
static constexpr MachineType Uint16()
static constexpr MachineType Int16()
static constexpr MachineType Int64()
static constexpr MachineType Int8()
void Mul(const Register &rd, const Register &rn, const Register &rm)
void Abort(AbortReason msg)
void LoadStackLimit(Register destination, StackLimitKind kind)
void Call(Register target, Condition cond=al)
void Lbu(Register rd, const MemOperand &rs)
void Trunc_uw_s(FPURegister fd, FPURegister fj, FPURegister scratch)
void Ctz(Register rd, Register rs)
void Cvt_s_uw(FPURegister fd, FPURegister fs)
void Scd(Register rd, const MemOperand &rs)
void mov(Register rd, Register rj)
void Dins(Register rt, Register rs, uint16_t pos, uint16_t size)
void Sh(Register rd, const MemOperand &rs)
void SmiUntag(Register reg, SBit s=LeaveCC)
void Neg_s(FPURegister fd, FPURegister fj)
void Uswc1(FPURegister fd, const MemOperand &rs, Register scratch)
void Ext(const VRegister &vd, const VRegister &vn, const VRegister &vm, int index)
void Float64Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void CompareIsNanF64(FPURegister cmp1, FPURegister cmp2, CFRegister cd=FCC0)
void Lb(Register rd, const MemOperand &rs)
void CompareF32(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void BranchFalseF(Label *target, CFRegister cc=FCC0)
void LoadZeroIfNotFPUCondition(Register dest, CFRegister=FCC0)
void Uld(Register rd, const MemOperand &rs)
void Swc1(FPURegister fs, const MemOperand &dst)
void LoadLane(NeonSize sz, NeonListOperand dst_list, uint8_t lane, NeonMemOperand src)
void Move(Register dst, Tagged< Smi > smi)
void Float64Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void Movz(Register rd, Register rj, Register rk)
void JumpIfSmi(Register value, Label *smi_label)
void BranchShort(Label *label, Condition cond, Register r1, const Operand &r2, bool need_link=false)
void Lwu(Register rd, const MemOperand &rs)
void BranchFalseShortF(Label *target, CFRegister cc=FCC0)
void Clz(const Register &rd, const Register &rn)
void Ulwu(Register rd, const MemOperand &rs)
void Movn(Register rd, Register rj, Register rk)
void Trunc_ul_s(FPURegister fd, FPURegister fs, FPURegister scratch, Register result=no_reg)
void Sd(Register rd, const MemOperand &rs)
void Lwc1(FPURegister fd, const MemOperand &src)
void Move_d(FPURegister dst, FPURegister src)
void Trunc_s_s(FPURegister fd, FPURegister fs)
void Uldc1(FPURegister fd, const MemOperand &rs, Register scratch)
void DropAndRet(int drop)
void SmiTag(Register reg, SBit s=LeaveCC)
void CompareWord(Condition cond, Register dst, Register lhs, const Operand &rhs)
void Ins(const VRegister &vd, int vd_index, const VRegister &vn, int vn_index)
void li(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
void Sw(Register rd, const MemOperand &rs)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
void Float64MaxOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Lhu(Register rd, const MemOperand &rs)
void BranchLong(int32_t offset, BranchDelaySlot bdslot=PROTECT)
void FmoveLow(Register dst_low, FPURegister src)
void Jump(Register target, Condition cond=al)
void Usw(Register rd, const MemOperand &rs)
void Trunc_d_d(FPURegister fd, FPURegister fs)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void Float64MinOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Float32MinOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Trunc_uw_d(FPURegister fd, FPURegister fs, FPURegister scratch)
void Float32Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void MSARoundD(MSARegister dst, MSARegister src, FPURoundingMode mode)
void StoreLane(NeonSize sz, NeonListOperand src_list, uint8_t lane, NeonMemOperand dst)
void Sdc1(FPURegister fs, const MemOperand &dst)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void Lld(Register rd, const MemOperand &rs)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void Ulhu(Register rd, const MemOperand &rs)
void Lw(Register rd, const MemOperand &rs)
void Neg_d(FPURegister fd, FPURegister fk)
void Lh(Register rd, const MemOperand &rs)
void Usdc1(FPURegister fd, const MemOperand &rs, Register scratch)
void Cvt_d_uw(FPURegister fd, FPURegister fs)
void Float32MaxOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Float32Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void Usd(Register rd, const MemOperand &rs)
void Sc(Register rd, const MemOperand &rs)
void MSARoundW(MSARegister dst, MSARegister src, FPURoundingMode mode)
void AllocateStackSpace(Register bytes)
void Popcnt(Register dst, Register src)
void Dpopcnt(Register rd, Register rs)
void Ll(Register rd, const MemOperand &rs)
void Ulwc1(FPURegister fd, const MemOperand &rs, Register scratch)
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Trunc_ul_d(FPURegister fd, FPURegister fs, FPURegister scratch, Register result=no_reg)
void Sb(Register rd, const MemOperand &rs)
void Branch(Label *label, bool need_link=false)
void Ld(Register rd, const MemOperand &rs)
void Ulw(Register rd, const MemOperand &rs)
void Ulh(Register rd, const MemOperand &rs)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void LoadZeroIfFPUCondition(Register dest, CFRegister=FCC0)
void CompareIsNanF32(FPURegister cmp1, FPURegister cmp2, CFRegister cd=FCC0)
void BranchTrueShortF(Label *target, CFRegister cc=FCC0)
void Dclz(Register rd, Register rs)
void Dctz(Register rd, Register rs)
void BranchMSA(Label *target, MSABranchDF df, MSABranchCondition cond, MSARegister wt, BranchDelaySlot bd=PROTECT)
void CompareF64(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void Dext(Register rt, Register rs, uint16_t pos, uint16_t size)
void Ldc1(FPURegister fd, const MemOperand &src)
void Ush(Register rd, const MemOperand &rs, Register scratch)
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
constexpr bool has(RegisterT reg) const
static constexpr int kInstanceDataOffset
static constexpr int kFeedbackVectorOffset
void emit_i8x16_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_store_nonzero_if_nan(Register dst, DoubleRegister src, ValueKind kind)
bool emit_f32x4_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_shli(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i8x16_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_relaxed_q15mulr_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i8x16_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_trunc(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i32_rems(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
void emit_i32_clz(Register dst, Register src)
void emit_i8x16_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_store_nonzero(Register dst)
void emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_eqz(Register dst, Register src)
void emit_i32x4_uconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void RecordUsedSpillOffset(int offset)
void emit_i16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i8x16_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void FillI64Half(Register, int offset, RegPairHalf)
bool emit_f16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void CallCWithStackBuffer(const std::initializer_list< VarState > args, const LiftoffRegister *rets, ValueKind return_kind, ValueKind out_argument_kind, int stack_bytes, ExternalReference ext_ref)
void emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_dot_i16x8_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_muli(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_dot_i8x16_i7x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void TailCallNativeWasmCode(Address addr)
void emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void SpillInstanceData(Register instance)
void RecordOolSpillSpaceSize(int size)
void emit_f64x2_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void LoadTransform(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LoadTransformationKind transform, uint32_t *protected_load_pc, bool i64_offset)
void emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_store_nonzero_if_nan(Register dst, LiftoffRegister src, Register tmp_gp, LiftoffRegister tmp_s128, ValueKind lane_kind)
void emit_i32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_i16x8_sconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicCompareExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister value, StoreType type, bool i64_offset)
void emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_not(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i16(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_dot_i8x16_i7x16_add_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister acc)
bool emit_f16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64x2_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void Fill(LiftoffRegister, int offset, ValueKind)
void emit_i64_signextend_i32(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void LoadFullPointer(Register dst, Register src_addr, int32_t offset_imm)
void DeallocateStackSlot(uint32_t size)
void emit_i8x16_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StackCheck(Label *ool_code)
void emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f32x4_promote_low_f16x8(LiftoffRegister dst, LiftoffRegister src)
void Store(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, bool is_store_mem=false, bool i64_offset=false)
bool emit_f16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint32_t *protected_load_pc=nullptr, bool is_load_mem=false, bool i64_offset=false, bool needs_shift=false)
void emit_i64x2_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f16x8_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_signextend_i16(Register dst, Register src)
void emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_divs(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_f32_neg(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_promote_low_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs, int64_t imm)
void LoadLane(LiftoffRegister dst, LiftoffRegister src, Register addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint8_t lane, uint32_t *protected_load_pc, bool i64_offset)
void emit_i16x8_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f64x2_trunc(LiftoffRegister dst, LiftoffRegister src)
void CallBuiltin(Builtin builtin)
void emit_i8x16_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_relaxed_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallFrameSetupStub(int declared_function_index)
void emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_remu(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
bool emit_f64x2_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSpillAddress(Register dst, int offset, ValueKind kind)
void emit_f32x4_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f64_neg(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32_ctz(Register dst, Register src)
void StoreCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind, Register frame_pointer)
void emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void AtomicExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i32x4_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Spill(VarState *slot)
void emit_s128_select(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask)
bool emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void emit_i8x16_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void AssertUnreachable(AbortReason reason)
void CallC(const std::initializer_list< VarState > args, ExternalReference ext_ref)
void emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i8(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_bitmask(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero)
bool emit_f16x8_demote_f32x4_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f64x2_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void DropStackSlotsAndRet(uint32_t num_stack_slots)
void emit_f32x4_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_and_not(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_relaxed_laneselect(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask, int lane_width)
void emit_i32x4_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_sconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void LoadConstant(LiftoffRegister, WasmValue)
void emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
int GetTotalFrameSize() const
void emit_i16x8_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallNativeWasmCode(Address addr)
bool emit_f32x4_trunc(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareTailCall(int num_callee_stack_params, int stack_param_delta)
void emit_f32x4_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_ceil(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_divu(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero)
void emit_cond_jump(Condition, Label *, ValueKind value, Register lhs, Register rhs, const FreezeCacheState &frozen)
void LoadFromInstance(Register dst, Register instance, int offset, int size)
void emit_i8x16_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_smi_check(Register obj, Label *target, SmiCheckMode mode, const FreezeCacheState &frozen)
void emit_f64_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
static bool NeedsAlignment(ValueKind kind)
void emit_f32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
static int SlotSizeForType(ValueKind kind)
void LoadProtectedPointer(Register dst, Register src_addr, int32_t offset)
void emit_i16x8_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
bool emit_f32x4_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_popcnt(LiftoffRegister dst, LiftoffRegister src)
void emit_u32_to_uintptr(Register dst, Register src)
bool supports_f16_mem_access()
void emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_mul(Register dst, Register lhs, Register rhs)
void emit_i8x16_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadInstanceDataFromFrame(Register dst)
void emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32x4_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i64_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64x2_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i32_popcnt(Register dst, Register src)
void emit_i16x8_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void MoveStackValue(uint32_t dst_offset, uint32_t src_offset, ValueKind)
void emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Move(LiftoffRegister dst, LiftoffRegister src, ValueKind)
void emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_popcnt(LiftoffRegister dst, LiftoffRegister src)
void AtomicStore(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, bool i64_offset)
static constexpr int kStackSlotSize
bool emit_f16x8_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PatchPrepareStackFrame(int offset, SafepointTableBuilder *, bool feedback_vector_slot, size_t stack_param_slots)
void emit_f32x4_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_ptrsize_cond_jumpi(Condition, Label *, Register lhs, int32_t imm, const FreezeCacheState &frozen)
void LoadCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind)
void emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_set_cond(Condition, Register dst, Register lhs, Register rhs)
void emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void TailCallIndirect(compiler::CallDescriptor *call_descriptor, Register target)
void emit_i16x8_q15mulr_sat_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i16x8_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AllocateStackSlot(Register addr, uint32_t size)
void emit_i32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadTaggedPointer(Register dst, Register src_addr, Register offset_reg, int32_t offset_imm, uint32_t *protected_load_pc=nullptr, bool offset_reg_needs_shift=false)
void PushRegisters(LiftoffRegList)
void emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_cond_jumpi(Condition, Label *, Register lhs, int imm, const FreezeCacheState &frozen)
void emit_i64_eqz(Register dst, LiftoffRegister src)
void StoreTaggedPointer(Register dst_addr, Register offset_reg, int32_t offset_imm, Register src, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, SkipWriteBarrier=kNoSkipWriteBarrier)
void emit_i64_clz(LiftoffRegister dst, LiftoffRegister src)
void bailout(LiftoffBailoutReason reason, const char *detail)
void emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void IncrementSmi(LiftoffRegister dst, int offset)
void PopRegisters(LiftoffRegList)
LiftoffRegister GetUnusedRegister(RegClass rc, std::initializer_list< LiftoffRegister > try_first, LiftoffRegList pinned)
void emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shuffle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, const uint8_t shuffle[16], bool is_swizzle)
void emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_select(LiftoffRegister dst, Register condition, LiftoffRegister true_value, LiftoffRegister false_value, ValueKind kind)
bool emit_i16x8_uconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
bool emit_type_conversion(WasmOpcode opcode, LiftoffRegister dst, LiftoffRegister src, Label *trap=nullptr)
void emit_i8x16_neg(LiftoffRegister dst, LiftoffRegister src)
void AtomicLoad(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, bool i64_offset)
void emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadReturnStackSlot(LiftoffRegister, int offset, ValueKind)
void emit_i32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_u(LiftoffRegister dst, LiftoffRegister src)
void LoadTaggedPointerFromInstance(Register dst, Register instance, int offset)
void emit_v128_anytrue(LiftoffRegister dst, LiftoffRegister src)
void FillStackSlotsWithZero(int start, int size)
void emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_const(LiftoffRegister dst, const uint8_t imms[16])
void StoreLane(Register dst, Register offset, uintptr_t offset_imm, LiftoffRegister src, StoreType type, uint8_t lane, uint32_t *protected_store_pc, bool i64_offset)
Register LoadOldFramePointer()
void CheckTierUp(int declared_func_index, int budget_used, Label *ool_label, const FreezeCacheState &frozen)
void emit_i16x8_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64x2_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallIndirect(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, Register target)
void RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint &safepoint, LiftoffRegList all_spills, LiftoffRegList ref_spills, int spill_offset)
void emit_f64x2_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void clear_i32_upper_half(Register dst)
void emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sqrt(LiftoffRegister dst, LiftoffRegister src)
void LoadTrustedPointer(Register dst, Register src_addr, int offset, IndirectPointerTag tag)
void emit_i32x4_relaxed_trunc_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
static constexpr int StaticStackFrameSize()
void emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_signextend_i8(Register dst, Register src)
void emit_i8x16_bitmask(LiftoffRegister dst, LiftoffRegister src)
constexpr unsigned GetNumRegsSet() const
constexpr Register gp() const
base::SmallVector< Slot, 8 > slots_
static int SlotSizeInBytes(const Slot &slot)
void Construct(int param_slots)
LiftoffAssembler *const asm_
static constexpr int ToTagged(int offset)
#define ATOMIC_BINOP_CASE(op, inst)
#define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER(load_instr, store_instr, cmp_reg)
#define ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(load_instr, store_instr)
#define ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT( load_linked, store_conditional, sign_extend, size, representation)
#define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT( load_linked, store_conditional, sign_extend, size, representation)
#define COMPRESS_POINTERS_BOOL
#define V8_ENABLE_SANDBOX_BOOL
base::Vector< const DirectHandle< Object > > args
ZoneVector< RpoNumber > & result
#define FP_BINOP(name, instruction)
#define FP_UNOP_RETURN_TRUE(name, instruction)
#define SIMD_BINOP(name1, name2)
#define I64_BINOP(name, instruction)
#define I64_BINOP_I(name, instruction)
#define I32_SHIFTOP_I(name, instruction, instruction1)
#define FP_UNOP(name, instruction)
#define I64_SHIFTOP_I(name, instruction, instructioni)
#define I32_BINOP(name, instruction)
#define I32_BINOP_I(name, instruction)
MovableLabel continuation
std::optional< OolTrapLabel > trap
constexpr bool IsPowerOfTwo(T value)
constexpr int WhichPowerOfTwo(T value)
void EmitAllTrue(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister src, VectorFormat format)
void Store(LiftoffAssembler *assm, LiftoffRegister src, MemOperand dst, ValueKind kind)
void StoreToMemory(LiftoffAssembler *assm, MemOperand dst, const LiftoffAssembler::VarState &src)
constexpr DoubleRegister kScratchDoubleReg
void EmitAnyTrue(LiftoffAssembler *assm, LiftoffRegister dst, LiftoffRegister src)
MemOperand GetStackSlot(int offset)
void Load(LiftoffAssembler *assm, LiftoffRegister dst, MemOperand src, ValueKind kind)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr DoubleRegister kScratchDoubleReg2
MemOperand GetInstanceDataOperand()
MemOperand GetMemOp(LiftoffAssembler *assm, UseScratchRegisterScope *temps, Register addr, Register offset, int32_t offset_imm, unsigned shift_amount=0)
FPUCondition ConditionToConditionCmpFPU(Condition condition, bool *predicate)
constexpr Register kGpParamRegisters[]
constexpr DoubleRegList kLiftoffAssemblerFpCacheRegs
int declared_function_index(const WasmModule *module, int func_index)
constexpr int value_kind_size(ValueKind kind)
static constexpr LiftoffRegList kGpCacheRegList
static constexpr LiftoffRegList kFpCacheRegList
constexpr bool is_reference(ValueKind kind)
LiftoffAssembler::ValueKindSig ValueKindSig
constexpr Register no_reg
constexpr VFPRoundingMode kRoundToNearest
constexpr VFPRoundingMode kRoundToMinusInf
constexpr int kTaggedSize
constexpr int kSimd128Size
@ kUnsignedGreaterThanEqual
DwVfpRegister DoubleRegister
constexpr Simd128Register kSimd128RegZero
constexpr DoubleRegister kScratchDoubleReg
kWasmInternalFunctionIndirectPointerTag instance_data
constexpr DoubleRegister kScratchDoubleReg2
constexpr Register kScratchReg2
constexpr Register kScratchReg
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr VFPRoundingMode kRoundToPlusInf
constexpr int kSystemPointerSize
constexpr Simd128Register kSimd128ScratchReg
V8_EXPORT_PRIVATE FlagValues v8_flags
const intptr_t kSmiTagMask
constexpr VFPRoundingMode kRoundToZero
constexpr uint8_t kInstrSize
constexpr Register kCArgRegs[]
std::unique_ptr< AssemblerBuffer > ExternalAssemblerBuffer(void *start, int size)
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
Register cached_instance_data
#define V8_LIKELY(condition)