5#ifndef V8_WASM_BASELINE_RISCV_LIFTOFF_ASSEMBLER_RISCV_INL_H_
6#define V8_WASM_BASELINE_RISCV_LIFTOFF_ASSEMBLER_RISCV_INL_H_
39 int stack_param_delta) {
50 int slot_count = num_callee_stack_params + 2;
51 for (
int i = slot_count - 1;
i >= 0; --
i) {
66 const FreezeCacheState& frozen) {
68 Register budget_array = temps.Acquire();
76 WasmTrustedInstanceData::kTieringBudgetArrayOffset);
79 int budget_arr_offset =
kInt32Size * declared_func_index;
83 MemOperand budget_addr(budget_array, budget_arr_offset);
84 Lw(budget, budget_addr);
85 Sub32(budget, budget, Operand{budget_used});
86 Sw(budget, budget_addr);
87 Branch(ool_label,
lt, budget, Operand{0});
91 if (!
v8_flags.experimental_wasm_growable_stacks) {
95 Label done, call_runtime;
98 &call_runtime,
eq, old_fp.gp(),
139 int offset, SafepointTableBuilder* safepoint_table_builder,
140 bool feedback_vector_slot,
size_t stack_param_slots) {
146 if (feedback_vector_slot) {
151 constexpr int kAvailableSpace = 256;
152 MacroAssembler patching_assembler(
159 patching_assembler.AddWord(
sp,
sp, Operand(-frame_size));
179 patching_assembler.GenPCRelativeJump(
kScratchReg, imm32);
186 if (frame_size <
v8_flags.stack_size * 1024) {
189 AddWord(stack_limit, stack_limit, Operand(frame_size));
193 if (
v8_flags.experimental_wasm_growable_stacks) {
196 regs_to_save.set(WasmHandleStackOverflowDescriptor::FrameBaseRegister());
201 AddWord(WasmHandleStackOverflowDescriptor::FrameBaseRegister(), fp,
205 safepoint_table_builder->DefineSafepoint(
this);
208 Call(
static_cast<Address>(Builtin::kWasmStackOverflow),
211 safepoint_table_builder->DefineSafepoint(
this);
219 AddWord(
sp,
sp, Operand(-frame_size));
343#define FP_BINOP(name, instruction) \
344 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
345 DoubleRegister rhs) { \
346 instruction(dst, lhs, rhs); \
348#define FP_UNOP(name, instruction) \
349 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
350 instruction(dst, src); \
352#define FP_UNOP_RETURN_TRUE(name, instruction) \
353 bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
354 instruction(dst, src, kScratchDoubleReg); \
376#undef FP_UNOP_RETURN_TRUE
413 LiftoffRegister true_value,
414 LiftoffRegister false_value,
421 const FreezeCacheState& frozen) {
442 LiftoffRegister src) {
443 VRegister src_v = src.fp().toV();
444 VRegister dst_v = dst.fp().toV();
469 const uint8_t shuffle[16],
471 VRegister dst_v = dst.fp().toV();
472 VRegister lhs_v = lhs.fp().toV();
473 VRegister rhs_v = rhs.fp().toV();
480 if (dst_v == lhs_v) {
483 }
else if (dst_v == rhs_v) {
497 LiftoffRegister rhs) {
499 if (dst == lhs || dst == rhs) {
503 vrgather_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
509 LiftoffRegister rhs) {
514 LiftoffRegister src1,
515 LiftoffRegister src2,
516 LiftoffRegister
mask,
523 LiftoffRegister src) {
525 vmv_vx(dst.fp().toV(), src.gp());
529 LiftoffRegister src) {
531 vmv_vx(dst.fp().toV(), src.gp());
535 LiftoffRegister src) {
537 vmv_vx(dst.fp().toV(), src.gp());
541 LiftoffRegister rhs) {
542 WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
546 LiftoffRegister rhs) {
547 WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
551 LiftoffRegister rhs) {
552 WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
556 LiftoffRegister rhs) {
557 WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E64, m1);
561 LiftoffRegister src) {
563 vfmv_vf(dst.fp().toV(), src.fp());
567 LiftoffRegister src) {
569 vfmv_vf(dst.fp().toV(), src.fp());
573 LiftoffRegister src1,
574 LiftoffRegister src2) {
576 VRegister dst_v = dst.fp().toV();
577 if (dst == src1 || dst == src2) {
580 vwmul_vv(dst_v, src2.fp().toV(), src1.fp().toV());
581 if (dst == src1 || dst == src2) {
583 vmv_vv(dst.fp().toV(), dst_v);
588 LiftoffRegister src1,
589 LiftoffRegister src2) {
591 VRegister dst_v = dst.fp().toV();
592 if (dst == src1 || dst == src2) {
595 vwmulu_vv(dst_v, src2.fp().toV(), src1.fp().toV());
596 if (dst == src1 || dst == src2) {
598 vmv_vv(dst.fp().toV(), dst_v);
603 LiftoffRegister src1,
604 LiftoffRegister src2) {
613 LiftoffRegister src1,
614 LiftoffRegister src2) {
623 LiftoffRegister src1,
624 LiftoffRegister src2) {
626 VRegister dst_v = dst.fp().toV();
627 if (dst == src1 || dst == src2) {
630 vwmul_vv(dst_v, src2.fp().toV(), src1.fp().toV());
631 if (dst == src1 || dst == src2) {
633 vmv_vv(dst.fp().toV(), dst_v);
638 LiftoffRegister src1,
639 LiftoffRegister src2) {
641 VRegister dst_v = dst.fp().toV();
642 if (dst == src1 || dst == src2) {
645 vwmulu_vv(dst_v, src2.fp().toV(), src1.fp().toV());
646 if (dst == src1 || dst == src2) {
648 vmv_vv(dst.fp().toV(), dst_v);
653 LiftoffRegister src1,
654 LiftoffRegister src2) {
663 LiftoffRegister src1,
664 LiftoffRegister src2) {
673 LiftoffRegister src1,
674 LiftoffRegister src2) {
676 VRegister dst_v = dst.fp().toV();
677 if (dst == src1 || dst == src2) {
680 vwmul_vv(dst_v, src2.fp().toV(), src1.fp().toV());
681 if (dst == src1 || dst == src2) {
683 vmv_vv(dst.fp().toV(), dst_v);
688 LiftoffRegister src1,
689 LiftoffRegister src2) {
691 VRegister dst_v = dst.fp().toV();
692 if (dst == src1 || dst == src2) {
695 vwmulu_vv(dst_v, src2.fp().toV(), src1.fp().toV());
696 if (dst == src1 || dst == src2) {
698 vmv_vv(dst.fp().toV(), dst_v);
703 LiftoffRegister src1,
704 LiftoffRegister src2) {
713 LiftoffRegister src1,
714 LiftoffRegister src2) {
725 LiftoffRegister src1,
726 LiftoffRegister src2) {
728 vsmul_vv(dst.fp().toV(), src1.fp().toV(), src2.fp().toV());
732 LiftoffRegister src1,
733 LiftoffRegister src2) {
735 vsmul_vv(dst.fp().toV(), src1.fp().toV(), src2.fp().toV());
739 LiftoffRegister src) {
749 LiftoffRegister src) {
756 LiftoffRegister src) {
764 LiftoffRegister src) {
771 LiftoffRegister src) {
779 LiftoffRegister rhs) {
780 WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
784 LiftoffRegister rhs) {
785 WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
789 LiftoffRegister rhs) {
790 WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
794 LiftoffRegister rhs) {
795 WasmRvvGtU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
799 LiftoffRegister rhs) {
800 WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
804 LiftoffRegister rhs) {
805 WasmRvvGeU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E8, m1);
809 LiftoffRegister rhs) {
810 WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
814 LiftoffRegister rhs) {
815 WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
819 LiftoffRegister rhs) {
820 WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
824 LiftoffRegister rhs) {
825 WasmRvvGtU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
829 LiftoffRegister rhs) {
830 WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
834 LiftoffRegister rhs) {
835 WasmRvvGeU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E16, m1);
839 LiftoffRegister rhs) {
840 WasmRvvEq(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
844 LiftoffRegister rhs) {
845 WasmRvvNe(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
849 LiftoffRegister rhs) {
850 WasmRvvGtS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
854 LiftoffRegister rhs) {
855 WasmRvvGtU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
859 LiftoffRegister rhs) {
860 WasmRvvGeS(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
864 LiftoffRegister rhs) {
865 WasmRvvGeU(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV(), E32, m1);
869 LiftoffRegister rhs) {
871 vmfeq_vv(v0, rhs.fp().toV(), lhs.fp().toV());
872 vmv_vx(dst.fp().toV(), zero_reg);
873 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
877 LiftoffRegister rhs) {
879 vmfne_vv(v0, rhs.fp().toV(), lhs.fp().toV());
880 vmv_vx(dst.fp().toV(), zero_reg);
881 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
885 LiftoffRegister rhs) {
887 vmflt_vv(v0, lhs.fp().toV(), rhs.fp().toV());
888 vmv_vx(dst.fp().toV(), zero_reg);
889 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
893 LiftoffRegister rhs) {
895 vmfle_vv(v0, lhs.fp().toV(), rhs.fp().toV());
896 vmv_vx(dst.fp().toV(), zero_reg);
897 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
901 LiftoffRegister src) {
903 if (dst.fp().toV() != src.fp().toV()) {
904 vfwcvt_f_x_v(dst.fp().toV(), src.fp().toV());
913 LiftoffRegister src) {
915 if (dst.fp().toV() != src.fp().toV()) {
916 vfwcvt_f_xu_v(dst.fp().toV(), src.fp().toV());
925 LiftoffRegister src) {
927 if (dst.fp().toV() != src.fp().toV()) {
928 vfwcvt_f_f_v(dst.fp().toV(), src.fp().toV());
937 LiftoffRegister src) {
939 vfncvt_f_f_w(dst.fp().toV(), src.fp().toV());
942 vmerge_vx(dst.fp().toV(), zero_reg, dst.fp().toV());
946 LiftoffRegister src) {
949 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
952 VU.
set(FPURoundingMode::RTZ);
958 LiftoffRegister src) {
961 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
964 VU.
set(FPURoundingMode::RTZ);
970 LiftoffRegister src) {
971 VU.
set(FPURoundingMode::RTZ);
973 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
975 vmv_vx(dst.fp().toV(), zero_reg);
979 LiftoffRegister src) {
980 VU.
set(FPURoundingMode::RTZ);
982 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
990 LiftoffRegister dst, LiftoffRegister src) {
992 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
995 VU.
set(FPURoundingMode::RTZ);
997 vmv_vx(dst.fp().toV(), zero_reg);
1002 LiftoffRegister dst, LiftoffRegister src) {
1006 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
1008 VU.
set(FPURoundingMode::RTZ);
1011 vmv_vx(dst.fp().toV(), zero_reg);
1017 LiftoffRegister rhs) {
1019 vmfeq_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1020 vmv_vx(dst.fp().toV(), zero_reg);
1021 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1025 LiftoffRegister rhs) {
1027 vmfne_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1028 vmv_vx(dst.fp().toV(), zero_reg);
1029 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1033 LiftoffRegister rhs) {
1035 vmflt_vv(v0, lhs.fp().toV(), rhs.fp().toV());
1036 vmv_vx(dst.fp().toV(), zero_reg);
1037 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1041 LiftoffRegister rhs) {
1043 vmfle_vv(v0, lhs.fp().toV(), rhs.fp().toV());
1044 vmv_vx(dst.fp().toV(), zero_reg);
1045 vmerge_vi(dst.fp().toV(), -1, dst.fp().toV());
1049 const uint8_t imms[16]) {
1055 vnot_vv(dst.fp().toV(), src.fp().toV());
1059 LiftoffRegister rhs) {
1061 vand_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1065 LiftoffRegister rhs) {
1067 vor_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1071 LiftoffRegister rhs) {
1073 vxor_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1077 LiftoffRegister lhs,
1078 LiftoffRegister rhs) {
1085 LiftoffRegister src1,
1086 LiftoffRegister src2,
1087 LiftoffRegister
mask) {
1096 LiftoffRegister src) {
1098 vneg_vv(dst.fp().toV(), src.fp().toV());
1102 LiftoffRegister src) {
1108 beq(dst.gp(), zero_reg, &t);
1114 LiftoffRegister src) {
1120 beqz(dst.gp(), ¬alltrue);
1126 LiftoffRegister src) {
1136 LiftoffRegister rhs) {
1138 andi(rhs.gp(), rhs.gp(), 8 - 1);
1139 vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1145 vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 8);
1149 LiftoffRegister lhs,
1150 LiftoffRegister rhs) {
1152 andi(rhs.gp(), rhs.gp(), 8 - 1);
1153 vsra_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1157 LiftoffRegister lhs, int32_t rhs) {
1159 vsra_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 8);
1163 LiftoffRegister lhs,
1164 LiftoffRegister rhs) {
1166 andi(rhs.gp(), rhs.gp(), 8 - 1);
1167 vsrl_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1171 LiftoffRegister lhs, int32_t rhs) {
1173 vsrl_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 8);
1177 LiftoffRegister rhs) {
1179 vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1183 LiftoffRegister lhs,
1184 LiftoffRegister rhs) {
1186 vsadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1190 LiftoffRegister lhs,
1191 LiftoffRegister rhs) {
1193 vsaddu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1197 LiftoffRegister rhs) {
1199 vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1203 LiftoffRegister lhs,
1204 LiftoffRegister rhs) {
1206 vssub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1210 LiftoffRegister lhs,
1211 LiftoffRegister rhs) {
1213 vssubu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1217 LiftoffRegister lhs,
1218 LiftoffRegister rhs) {
1220 vmin_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1224 LiftoffRegister lhs,
1225 LiftoffRegister rhs) {
1227 vminu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1231 LiftoffRegister lhs,
1232 LiftoffRegister rhs) {
1234 vmax_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1238 LiftoffRegister lhs,
1239 LiftoffRegister rhs) {
1241 vmaxu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1245 LiftoffRegister src) {
1247 vneg_vv(dst.fp().toV(), src.fp().toV());
1251 LiftoffRegister src) {
1257 beqz(dst.gp(), ¬alltrue);
1263 LiftoffRegister src) {
1273 LiftoffRegister rhs) {
1275 andi(rhs.gp(), rhs.gp(), 16 - 1);
1276 vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1282 vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 16);
1286 LiftoffRegister lhs,
1287 LiftoffRegister rhs) {
1289 andi(rhs.gp(), rhs.gp(), 16 - 1);
1290 vsra_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1294 LiftoffRegister lhs, int32_t rhs) {
1296 vsra_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 16);
1300 LiftoffRegister lhs,
1301 LiftoffRegister rhs) {
1303 andi(rhs.gp(), rhs.gp(), 16 - 1);
1304 vsrl_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1308 LiftoffRegister lhs, int32_t rhs) {
1310 vsrl_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 16);
1314 LiftoffRegister rhs) {
1316 vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1320 LiftoffRegister lhs,
1321 LiftoffRegister rhs) {
1323 vsadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1327 LiftoffRegister lhs,
1328 LiftoffRegister rhs) {
1330 vsaddu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1334 LiftoffRegister rhs) {
1336 vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1340 LiftoffRegister lhs,
1341 LiftoffRegister rhs) {
1343 vssub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1347 LiftoffRegister lhs,
1348 LiftoffRegister rhs) {
1350 vssubu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1354 LiftoffRegister rhs) {
1356 vmul_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1360 LiftoffRegister lhs,
1361 LiftoffRegister rhs) {
1363 vmin_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1367 LiftoffRegister lhs,
1368 LiftoffRegister rhs) {
1370 vminu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1374 LiftoffRegister lhs,
1375 LiftoffRegister rhs) {
1377 vmax_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1381 LiftoffRegister lhs,
1382 LiftoffRegister rhs) {
1384 vmaxu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1388 LiftoffRegister src) {
1390 vneg_vv(dst.fp().toV(), src.fp().toV());
1394 LiftoffRegister src) {
1400 beqz(dst.gp(), ¬alltrue);
1406 LiftoffRegister src) {
1415 LiftoffRegister rhs) {
1417 andi(rhs.gp(), rhs.gp(), 32 - 1);
1418 vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1423 if (is_uint5(rhs % 32)) {
1424 vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 32);
1427 vsll_vx(dst.fp().toV(), lhs.fp().toV(),
kScratchReg);
1432 LiftoffRegister lhs,
1433 LiftoffRegister rhs) {
1435 andi(rhs.gp(), rhs.gp(), 32 - 1);
1436 vsra_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1440 LiftoffRegister lhs, int32_t rhs) {
1442 if (is_uint5(rhs % 32)) {
1443 vsra_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 32);
1446 vsra_vx(dst.fp().toV(), lhs.fp().toV(),
kScratchReg);
1451 LiftoffRegister lhs,
1452 LiftoffRegister rhs) {
1454 andi(rhs.gp(), rhs.gp(), 32 - 1);
1455 vsrl_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1459 LiftoffRegister lhs, int32_t rhs) {
1461 if (is_uint5(rhs % 32)) {
1462 vsrl_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 32);
1465 vsrl_vx(dst.fp().toV(), lhs.fp().toV(),
kScratchReg);
1470 LiftoffRegister rhs) {
1472 vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1476 LiftoffRegister rhs) {
1478 vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1482 LiftoffRegister rhs) {
1484 vmul_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1488 LiftoffRegister lhs,
1489 LiftoffRegister rhs) {
1491 vmin_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1495 LiftoffRegister lhs,
1496 LiftoffRegister rhs) {
1498 vminu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1502 LiftoffRegister lhs,
1503 LiftoffRegister rhs) {
1505 vmax_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1509 LiftoffRegister lhs,
1510 LiftoffRegister rhs) {
1512 vmaxu_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1516 LiftoffRegister lhs,
1517 LiftoffRegister rhs) {
1533 LiftoffRegister lhs,
1534 LiftoffRegister rhs) {
1539 constexpr int32_t FIRST_INDEX = 0b0101010101010101;
1540 constexpr int32_t SECOND_INDEX = 0b1010101010101010;
1553 LiftoffRegister lhs,
1554 LiftoffRegister rhs,
1555 LiftoffRegister acc) {
1559 VRegister kSimd128ScratchReg4 =
1561 vwmul_vv(intermediate, lhs.fp().toV(), rhs.fp().toV());
1563 constexpr int32_t FIRST_INDEX = 0b0001000100010001;
1564 constexpr int32_t SECOND_INDEX = 0b0010001000100010;
1565 constexpr int32_t THIRD_INDEX = 0b0100010001000100;
1566 constexpr int32_t FOURTH_INDEX = 0b1000100010001000;
1593 vadd_vv(dst.fp().toV(), dst.fp().toV(), acc.fp().toV());
1597 LiftoffRegister src) {
1599 vneg_vv(dst.fp().toV(), src.fp().toV());
1603 LiftoffRegister src) {
1609 beqz(dst.gp(), ¬alltrue);
1615 LiftoffRegister rhs) {
1617 andi(rhs.gp(), rhs.gp(), 64 - 1);
1618 vsll_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1624 if (is_uint5(rhs % 64)) {
1625 vsll_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 64);
1628 vsll_vx(dst.fp().toV(), lhs.fp().toV(),
kScratchReg);
1633 LiftoffRegister lhs,
1634 LiftoffRegister rhs) {
1636 andi(rhs.gp(), rhs.gp(), 64 - 1);
1637 vsra_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1641 LiftoffRegister lhs, int32_t rhs) {
1643 if (is_uint5(rhs % 64)) {
1644 vsra_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 64);
1647 vsra_vx(dst.fp().toV(), lhs.fp().toV(),
kScratchReg);
1652 LiftoffRegister lhs,
1653 LiftoffRegister rhs) {
1655 andi(rhs.gp(), rhs.gp(), 64 - 1);
1656 vsrl_vx(dst.fp().toV(), lhs.fp().toV(), rhs.gp());
1660 LiftoffRegister lhs, int32_t rhs) {
1662 if (is_uint5(rhs % 64)) {
1663 vsrl_vi(dst.fp().toV(), lhs.fp().toV(), rhs % 64);
1666 vsrl_vx(dst.fp().toV(), lhs.fp().toV(),
kScratchReg);
1671 LiftoffRegister rhs) {
1673 vadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1677 LiftoffRegister rhs) {
1679 vsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1683 LiftoffRegister rhs) {
1685 vmul_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1689 LiftoffRegister src) {
1691 vfabs_vv(dst.fp().toV(), src.fp().toV());
1695 LiftoffRegister src) {
1697 vfneg_vv(dst.fp().toV(), src.fp().toV());
1701 LiftoffRegister src) {
1703 vfsqrt_v(dst.fp().toV(), src.fp().toV());
1707 LiftoffRegister src) {
1713 LiftoffRegister src) {
1719 LiftoffRegister src) {
1725 LiftoffRegister src) {
1731 LiftoffRegister rhs) {
1733 vfadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1737 LiftoffRegister rhs) {
1739 vfsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1743 LiftoffRegister rhs) {
1745 VU.
set(FPURoundingMode::RTZ);
1746 vfmul_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1750 LiftoffRegister rhs) {
1752 vfdiv_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1756 LiftoffRegister rhs) {
1759 vmfeq_vv(v0, lhs.fp().toV(), lhs.fp().toV());
1769 LiftoffRegister rhs) {
1772 vmfeq_vv(v0, lhs.fp().toV(), lhs.fp().toV());
1782 LiftoffRegister lhs,
1783 LiftoffRegister rhs) {
1785 vfmin_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1789 LiftoffRegister lhs,
1790 LiftoffRegister rhs) {
1792 vfmax_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1796 LiftoffRegister rhs) {
1799 vmflt_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1800 vmerge_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1804 LiftoffRegister rhs) {
1807 vmflt_vv(v0, lhs.fp().toV(), rhs.fp().toV());
1808 vmerge_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1812 LiftoffRegister src) {
1814 vfabs_vv(dst.fp().toV(), src.fp().toV());
1818 LiftoffRegister src) {
1820 vfneg_vv(dst.fp().toV(), src.fp().toV());
1824 LiftoffRegister src) {
1826 vfsqrt_v(dst.fp().toV(), src.fp().toV());
1830 LiftoffRegister src) {
1836 LiftoffRegister src) {
1842 LiftoffRegister src) {
1848 LiftoffRegister src) {
1854 LiftoffRegister rhs) {
1856 vfadd_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1860 LiftoffRegister rhs) {
1862 vfsub_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1866 LiftoffRegister rhs) {
1868 vfmul_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1872 LiftoffRegister rhs) {
1874 vfdiv_vv(dst.fp().toV(), lhs.fp().toV(), rhs.fp().toV());
1878 LiftoffRegister lhs,
1879 LiftoffRegister rhs) {
1881 vfmin_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1885 LiftoffRegister lhs,
1886 LiftoffRegister rhs) {
1888 vfmax_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1892 LiftoffRegister rhs) {
1895 vmflt_vv(v0, rhs.fp().toV(), lhs.fp().toV());
1896 vmerge_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1900 LiftoffRegister rhs) {
1903 vmflt_vv(v0, lhs.fp().toV(), rhs.fp().toV());
1904 vmerge_vv(dst.fp().toV(), rhs.fp().toV(), lhs.fp().toV());
1908 LiftoffRegister src) {
1910 VU.
set(FPURoundingMode::RTZ);
1911 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
1913 vmv_vx(dst.fp().toV(), zero_reg);
1918 LiftoffRegister src) {
1920 VU.
set(FPURoundingMode::RTZ);
1921 vmfeq_vv(v0, src.fp().toV(), src.fp().toV());
1923 vmv_vx(dst.fp().toV(), zero_reg);
1928 LiftoffRegister src) {
1930 VU.
set(FPURoundingMode::RTZ);
1931 vfcvt_f_x_v(dst.fp().toV(), src.fp().toV());
1935 LiftoffRegister src) {
1937 VU.
set(FPURoundingMode::RTZ);
1938 vfcvt_f_xu_v(dst.fp().toV(), src.fp().toV());
1942 LiftoffRegister lhs,
1943 LiftoffRegister rhs) {
1946 vmv_vv(v25, rhs.fp().toV());
1948 VU.
set(FPURoundingMode::RNE);
1953 LiftoffRegister lhs,
1954 LiftoffRegister rhs) {
1957 vmv_vv(v25, rhs.fp().toV());
1961 VU.
set(FPURoundingMode::RNE);
1966 LiftoffRegister lhs,
1967 LiftoffRegister rhs) {
1970 vmv_vv(v25, rhs.fp().toV());
1972 VU.
set(FPURoundingMode::RNE);
1977 LiftoffRegister lhs,
1978 LiftoffRegister rhs) {
1981 vmv_vv(v25, rhs.fp().toV());
1985 VU.
set(FPURoundingMode::RNE);
1990 LiftoffRegister src) {
1997 LiftoffRegister src) {
2005 LiftoffRegister src) {
2012 LiftoffRegister src) {
2020 LiftoffRegister src) {
2027 LiftoffRegister src) {
2035 LiftoffRegister src) {
2042 LiftoffRegister src) {
2050 LiftoffRegister lhs,
2051 LiftoffRegister rhs) {
2063 LiftoffRegister lhs,
2064 LiftoffRegister rhs) {
2077 LiftoffRegister src) {
2080 vmv_vv(dst.fp().toV(), src.fp().toV());
2087 LiftoffRegister src) {
2090 vmv_vv(dst.fp().toV(), src.fp().toV());
2097 LiftoffRegister src) {
2100 vmv_vv(dst.fp().toV(), src.fp().toV());
2107 LiftoffRegister src) {
2110 vmv_vv(dst.fp().toV(), src.fp().toV());
2117 LiftoffRegister lhs,
2118 uint8_t imm_lane_idx) {
2122 slli(dst.gp(), dst.gp(),
sizeof(
void*) * 8 - 8);
2123 srli(dst.gp(), dst.gp(),
sizeof(
void*) * 8 - 8);
2127 LiftoffRegister lhs,
2128 uint8_t imm_lane_idx) {
2135 LiftoffRegister lhs,
2136 uint8_t imm_lane_idx) {
2140 slli(dst.gp(), dst.gp(),
sizeof(
void*) * 8 - 16);
2141 srli(dst.gp(), dst.gp(),
sizeof(
void*) * 8 - 16);
2145 LiftoffRegister lhs,
2146 uint8_t imm_lane_idx) {
2153 LiftoffRegister lhs,
2154 uint8_t imm_lane_idx) {
2161 LiftoffRegister lhs,
2162 uint8_t imm_lane_idx) {
2169 LiftoffRegister lhs,
2170 uint8_t imm_lane_idx) {
2177 LiftoffRegister src1,
2178 LiftoffRegister src2,
2179 uint8_t imm_lane_idx) {
2184 vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2188 LiftoffRegister src1,
2189 LiftoffRegister src2,
2190 uint8_t imm_lane_idx) {
2194 vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2198 LiftoffRegister src1,
2199 LiftoffRegister src2,
2200 uint8_t imm_lane_idx) {
2204 vmerge_vx(dst.fp().toV(), src2.gp(), src1.fp().toV());
2208 LiftoffRegister src1,
2209 LiftoffRegister src2,
2210 uint8_t imm_lane_idx) {
2219 LiftoffRegister src1,
2220 LiftoffRegister src2,
2221 uint8_t imm_lane_idx) {
2225 vfmerge_vf(dst.fp().toV(), src2.fp(), src1.fp().toV());
2229 LiftoffRegister src,
2231 LiftoffRegister tmp_s128,
2234 if (lane_kind ==
kF32) {
2251 LiftoffRegister src1,
2252 LiftoffRegister src2,
2253 LiftoffRegister src3) {
2261 LiftoffRegister src1,
2262 LiftoffRegister src2,
2263 LiftoffRegister src3) {
2271 LiftoffRegister src1,
2272 LiftoffRegister src2,
2273 LiftoffRegister src3) {
2281 LiftoffRegister src1,
2282 LiftoffRegister src2,
2283 LiftoffRegister src3) {
2292 Register limit_address = temps.Acquire();
2303 int32_t num_gp_regs = gp_regs.GetNumRegsSet();
2307 while (!gp_regs.is_empty()) {
2308 LiftoffRegister
reg = gp_regs.GetFirstRegSet();
2316 int32_t num_fp_regs = fp_regs.GetNumRegsSet();
2320 while (!fp_regs.is_empty()) {
2321 LiftoffRegister
reg = fp_regs.GetFirstRegSet();
2324 offset +=
sizeof(double);
2333 while (!fp_regs.is_empty()) {
2334 LiftoffRegister
reg = fp_regs.GetFirstRegSet();
2337 fp_offset +=
sizeof(double);
2339 if (fp_offset) AddWord(
sp,
sp, Operand(fp_offset));
2342 while (!gp_regs.is_empty()) {
2343 LiftoffRegister
reg = gp_regs.GetLastRegSet();
2348 AddWord(
sp,
sp, Operand(gp_offset));
2352 SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
2353 LiftoffRegList ref_spills,
int spill_offset) {
2357 while (!gp_spills.is_empty()) {
2358 LiftoffRegister
reg = gp_spills.GetFirstRegSet();
2359 if (ref_spills.has(
reg)) {
2360 safepoint.DefineTaggedStackSlot(spill_offset);
2362 gp_spills.clear(
reg);
2383 compiler::CallDescriptor* call_descriptor,
2385 DCHECK(target.is_valid());
2386 CallWasmCodePointer(target, call_descriptor->signature_hash());
2390 compiler::CallDescriptor* call_descriptor, Register target) {
2391 DCHECK(target.is_valid());
2392 CallWasmCodePointer(target, call_descriptor->signature_hash(),
2403 AddWord(
sp,
sp, Operand(-size));
2408 AddWord(
sp,
sp, Operand(size));
2420 Register scratch = temps.Acquire();
2423 feq_s(scratch, src, src);
2426 feq_d(scratch, src, src);
2428 seqz(scratch, scratch);
2438 kLiftoffFrameSetupFunctionReg) ==
2446 LoadConstant(LiftoffRegister(kLiftoffFrameSetupFunctionReg),
2452 LiftoffRegister src) {
2456 LiftoffRegister lhs,
2457 uint8_t imm_lane_idx) {
2461 LiftoffRegister src1,
2462 LiftoffRegister src2,
2463 uint8_t imm_lane_idx) {
2468 LiftoffRegister rhs) {
2472 LiftoffRegister rhs) {
2476 LiftoffRegister rhs) {
2480 LiftoffRegister rhs) {
2485 LiftoffRegister src) {
2489 LiftoffRegister src) {
2493 LiftoffRegister src) {
2497 LiftoffRegister src) {
2501 LiftoffRegister src) {
2505 LiftoffRegister src) {
2509 LiftoffRegister src) {
2514 LiftoffRegister rhs) {
2518 LiftoffRegister rhs) {
2522 LiftoffRegister rhs) {
2526 LiftoffRegister rhs) {
2530 LiftoffRegister rhs) {
2534 LiftoffRegister rhs) {
2538 LiftoffRegister rhs) {
2542 LiftoffRegister rhs) {
2547 LiftoffRegister src) {
2551 LiftoffRegister src) {
2555 LiftoffRegister src) {
2559 LiftoffRegister src) {
2563 LiftoffRegister src) {
2567 LiftoffRegister src) {
2571 LiftoffRegister src) {
2576 LiftoffRegister src1,
2577 LiftoffRegister src2,
2578 LiftoffRegister src3) {
2583 LiftoffRegister src1,
2584 LiftoffRegister src2,
2585 LiftoffRegister src3) {
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
void feq_d(Register rd, FPURegister rs1, FPURegister rs2)
void fsgnj_d(FPURegister rd, FPURegister rs1, FPURegister rs2)
void feq_s(Register rd, FPURegister rs1, FPURegister rs2)
void fsgnj_s(FPURegister rd, FPURegister rs1, FPURegister rs2)
void fmv_x_w(Register rd, FPURegister rs1)
void mv(Register rd, Register rs)
void seqz(Register rd, Register rs)
void srli(Register rd, Register rs1, uint8_t shamt)
void slli(Register rd, Register rs1, uint8_t shamt)
void vmerge_vx(VRegister vd, Register rs1, VRegister vs2)
void vmerge_vv(VRegister vd, VRegister vs1, VRegister vs2)
void vmv_vi(VRegister vd, uint8_t simm5)
void vmv_xs(Register rd, VRegister vs2)
void vfmerge_vf(VRegister vd, FPURegister fs1, VRegister vs2)
void vredminu_vs(VRegister vd, VRegister vs2, VRegister vs1, MaskType mask=NoMask)
void vfabs_vv(VRegister dst, VRegister src, MaskType mask=NoMask)
void vredmaxu_vs(VRegister vd, VRegister vs2, VRegister vs1, MaskType mask=NoMask)
void vfmv_vf(VRegister vd, FPURegister fs1)
void vmerge_vi(VRegister vd, uint8_t imm5, VRegister vs2)
void vfneg_vv(VRegister dst, VRegister src, MaskType mask=NoMask)
void vnot_vv(VRegister dst, VRegister src, MaskType mask=NoMask)
void vfmv_fs(FPURegister fd, VRegister vs2)
void vwaddu_wx(VRegister vd, VRegister vs2, Register rs1, MaskType mask=NoMask)
void vmv_sx(VRegister vd, Register rs1)
void vmv_vx(VRegister vd, Register rs1)
void vneg_vv(VRegister dst, VRegister src, MaskType mask=NoMask)
void set(Register rd, VSew sew, Vlmul lmul)
void addi(Register dst, Register src, const Operand &imm)
void beqz(Register rj, int32_t offset)
Simd128Register Simd128Register ra
friend class UseScratchRegisterScope
void andi(Register rd, Register rj, int32_t ui12)
void not_(const VRegister &vd, const VRegister &vn)
void ForceConstantPoolEmissionWithoutJump()
void AbortedCodeGeneration() override
void stop(Condition cond=al, int32_t code=kDefaultStopCode)
void beq(Register rj, Register rd, int32_t offset)
static constexpr int kFixedFrameSizeAboveFp
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
void Abort(AbortReason msg)
void LoadStackLimit(Register destination, StackLimitKind kind)
void Call(Register target, Condition cond=al)
void Round_d(FPURegister fd, FPURegister fj)
void Floor_d(FPURegister fd, FPURegister fj)
void Ceil_f(VRegister dst, VRegister src, Register scratch, VRegister v_scratch)
void Neg_s(FPURegister fd, FPURegister fj)
void Round_f(VRegister dst, VRegister src, Register scratch, VRegister v_scratch)
void Float64Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void Lb(Register rd, const MemOperand &rs)
void CompareF32(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void Move(Register dst, Tagged< Smi > smi)
void Float64Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void BranchShort(Label *label, Condition cond, Register r1, const Operand &r2, bool need_link=false)
void WasmRvvS128const(VRegister dst, const uint8_t imms[16])
void DropAndRet(int drop)
void StoreDouble(FPURegister fs, const MemOperand &dst, Trapper &&trapper=[](int){})
void Trunc_d(FPURegister fd, FPURegister fj)
void li(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
void Sw(Register rd, const MemOperand &rs)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
void Jump(Register target, Condition cond=al)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand)
void Float32Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void WasmRvvEq(VRegister dst, VRegister lhs, VRegister rhs, VSew sew, Vlmul lmul)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void WasmRvvNe(VRegister dst, VRegister lhs, VRegister rhs, VSew sew, Vlmul lmul)
void Lw(Register rd, const MemOperand &rs)
void Neg_d(FPURegister fd, FPURegister fk)
void WasmRvvGeU(VRegister dst, VRegister lhs, VRegister rhs, VSew sew, Vlmul lmul)
void Ceil_d(FPURegister fd, FPURegister fj)
void LoadDouble(FPURegister fd, const MemOperand &src, Trapper &&trapper=[](int){})
void Float32Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void Floor_f(VRegister dst, VRegister src, Register scratch, VRegister v_scratch)
void Branch(Label *label, bool need_link=false)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void WasmRvvGtU(VRegister dst, VRegister lhs, VRegister rhs, VSew sew, Vlmul lmul)
void WasmRvvGeS(VRegister dst, VRegister lhs, VRegister rhs, VSew sew, Vlmul lmul)
void Trunc_f(VRegister dst, VRegister src, Register scratch, VRegister v_scratch)
void CompareF64(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void WasmRvvGtS(VRegister dst, VRegister lhs, VRegister rhs, VSew sew, Vlmul lmul)
void GenPCRelativeJump(Register rd, int64_t offset)
static constexpr int32_t TypeToMarker(Type type)
static constexpr int kFrameTypeOffset
static constexpr Register GapRegister()
static constexpr int kInstanceDataOffset
static constexpr int kFeedbackVectorOffset
void emit_i8x16_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_store_nonzero_if_nan(Register dst, DoubleRegister src, ValueKind kind)
bool emit_f32x4_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_relaxed_q15mulr_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i8x16_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_extmul_high_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_trunc(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i8x16_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_store_nonzero(Register dst)
void emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32x4_uconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i8x16_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_dot_i16x8_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_extmul_low_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_dot_i8x16_i7x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void TailCallNativeWasmCode(Address addr)
void emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void SpillInstanceData(Register instance)
void RecordOolSpillSpaceSize(int size)
void emit_f64x2_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i32x4_extmul_low_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_store_nonzero_if_nan(Register dst, LiftoffRegister src, Register tmp_gp, LiftoffRegister tmp_s128, ValueKind lane_kind)
void emit_i32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_i16x8_sconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_not(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_dot_i8x16_i7x16_add_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister acc)
bool emit_f16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64x2_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void DeallocateStackSlot(uint32_t size)
void emit_i8x16_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_extmul_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i16x8_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StackCheck(Label *ool_code)
void emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f32x4_promote_low_f16x8(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f16x8_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_high_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_neg(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_promote_low_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f64x2_trunc(LiftoffRegister dst, LiftoffRegister src)
void CallBuiltin(Builtin builtin)
void emit_i8x16_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_relaxed_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallFrameSetupStub(int declared_function_index)
void emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
bool emit_f64x2_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSpillAddress(Register dst, int offset, ValueKind kind)
void emit_i16x8_extmul_high_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f64_neg(DoubleRegister dst, DoubleRegister src)
void emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32x4_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_select(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask)
void emit_i8x16_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void AssertUnreachable(AbortReason reason)
void emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
bool emit_f16x8_demote_f32x4_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f64x2_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void DropStackSlotsAndRet(uint32_t num_stack_slots)
void emit_f32x4_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_and_not(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_relaxed_laneselect(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask, int lane_width)
void emit_i32x4_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_sconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void LoadConstant(LiftoffRegister, WasmValue)
void emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
int GetTotalFrameSize() const
void emit_i16x8_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallNativeWasmCode(Address addr)
bool emit_f32x4_trunc(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareTailCall(int num_callee_stack_params, int stack_param_delta)
void emit_f32x4_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_ceil(LiftoffRegister dst, LiftoffRegister src)
void LoadFromInstance(Register dst, Register instance, int offset, int size)
void emit_i8x16_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_smi_check(Register obj, Label *target, SmiCheckMode mode, const FreezeCacheState &frozen)
void emit_f64_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
static bool NeedsAlignment(ValueKind kind)
void emit_f32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
static int SlotSizeForType(ValueKind kind)
void emit_i32x4_extmul_low_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i16x8_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
bool emit_f32x4_ceil(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadInstanceDataFromFrame(Register dst)
void emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32x4_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f64x2_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_popcnt(LiftoffRegister dst, LiftoffRegister src)
static constexpr int kStackSlotSize
bool emit_f16x8_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PatchPrepareStackFrame(int offset, SafepointTableBuilder *, bool feedback_vector_slot, size_t stack_param_slots)
void emit_f32x4_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
CacheState * cache_state()
void emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void TailCallIndirect(compiler::CallDescriptor *call_descriptor, Register target)
void emit_i16x8_q15mulr_sat_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i16x8_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_s(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_extmul_low_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AllocateStackSlot(Register addr, uint32_t size)
void emit_i32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PushRegisters(LiftoffRegList)
void emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PopRegisters(LiftoffRegList)
LiftoffRegister GetUnusedRegister(RegClass rc, std::initializer_list< LiftoffRegister > try_first, LiftoffRegList pinned)
void emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shuffle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, const uint8_t shuffle[16], bool is_swizzle)
void emit_i32x4_extmul_high_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_select(LiftoffRegister dst, Register condition, LiftoffRegister true_value, LiftoffRegister false_value, ValueKind kind)
bool emit_i16x8_uconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_u(LiftoffRegister dst, LiftoffRegister src)
void LoadTaggedPointerFromInstance(Register dst, Register instance, int offset)
void emit_v128_anytrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_const(LiftoffRegister dst, const uint8_t imms[16])
Register LoadOldFramePointer()
void CheckTierUp(int declared_func_index, int budget_used, Label *ool_label, const FreezeCacheState &frozen)
void emit_i16x8_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64x2_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallIndirect(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, Register target)
void RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint &safepoint, LiftoffRegList all_spills, LiftoffRegList ref_spills, int spill_offset)
void emit_f64x2_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sqrt(LiftoffRegister dst, LiftoffRegister src)
void LoadTrustedPointer(Register dst, Register src_addr, int offset, IndirectPointerTag tag)
void emit_i32x4_relaxed_trunc_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
static constexpr int StaticStackFrameSize()
void emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_bitmask(LiftoffRegister dst, LiftoffRegister src)
constexpr unsigned GetNumRegsSet() const
static constexpr int ToTagged(int offset)
#define ASM_CODE_COMMENT(asm)
#define FP_BINOP(name, instruction)
#define FP_UNOP_RETURN_TRUE(name, instruction)
#define FP_UNOP(name, instruction)
MovableLabel continuation
LiftoffRegList regs_to_save
MemOperand GetStackSlot(int offset)
MemOperand GetInstanceDataOperand()
constexpr DoubleRegister kFpReturnRegisters[]
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr Register kGpReturnRegisters[]
static FPUCondition ConditionToConditionCmpFPU(Condition condition)
int declared_function_index(const WasmModule *module, int func_index)
constexpr int value_kind_size(ValueKind kind)
static constexpr LiftoffRegList kGpCacheRegList
static constexpr LiftoffRegList kFpCacheRegList
LiftoffAssembler::ValueKindSig ValueKindSig
constexpr Register no_reg
constexpr int kSimd128Size
@ kUnsignedGreaterThanEqual
DwVfpRegister DoubleRegister
constexpr Simd128Register kSimd128RegZero
kWasmInternalFunctionIndirectPointerTag instance_data
constexpr Register kScratchReg2
constexpr VRegister kSimd128ScratchReg2
constexpr Register kScratchReg
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
constexpr Simd128Register kSimd128ScratchReg
constexpr Register kReturnRegister0
constexpr VRegister kSimd128ScratchReg3
V8_EXPORT_PRIVATE FlagValues v8_flags
const intptr_t kSmiTagMask
constexpr uint8_t kInstrSize
constexpr Register kCArgRegs[]
std::unique_ptr< AssemblerBuffer > ExternalAssemblerBuffer(void *start, int size)
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
Register cached_instance_data
LiftoffRegList used_registers
#define V8_LIKELY(condition)