5#ifndef V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_H_
6#define V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_H_
83class LiftoffAssembler;
262 return new_cache_reg;
268 if (*cache ==
no_reg)
return;
318 int code =
reg.liftoff_code();
512 RegClass rc, std::initializer_list<LiftoffRegister> try_first,
565 uint32_t stack_depth);
580 std::initializer_list<Register*> possible_uses,
584 template <
typename... Regs>
612 std::initializer_list<VarState> params);
633 template <
typename Dst,
typename Src>
641 std::initializer_list<ParallelRegisterMoveTuple> moves) {
653 void UnfreezeCacheState() {
658#ifdef ENABLE_SLOW_DCHECKS
660 bool ValidateCacheState()
const;
664 LiftoffRegList pinned);
666 inline void LoadSmiAsInt32(LiftoffRegister dst, Register src_addr,
680 int stack_param_delta);
683 bool feedback_vector_slot,
684 size_t stack_param_slots);
691 inline void CheckTierUp(
int declared_func_index,
int budget_used,
692 Label* ool_label,
const FreezeCacheState& frozen);
706 Register offset_reg, int32_t offset_imm,
707 uint32_t* protected_load_pc =
nullptr,
708 bool offset_reg_needs_shift =
false);
714#ifdef V8_ENABLE_SANDBOX
715 inline void LoadCodeEntrypointViaCodePointer(Register dsr, Register src_addr,
725 uint32_t* protected_store_pc =
nullptr,
730 uintptr_t offset_imm,
LoadType type,
731 uint32_t* protected_load_pc =
nullptr,
732 bool is_load_mem =
false,
bool i64_offset =
false,
733 bool needs_shift =
false);
737 uint32_t* protected_store_pc =
nullptr,
738 bool is_store_mem =
false,
bool i64_offset =
false);
740 Register offset_reg, uintptr_t offset_imm,
777 uintptr_t offset_imm,
790 inline void MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
814 Label* trap_div_by_zero,
815 Label* trap_div_unrepresentable);
817 Label* trap_div_by_zero);
819 Label* trap_rem_by_zero);
821 Label* trap_rem_by_zero);
857 Label* trap_div_unrepresentable);
1001 Register offset_reg, uintptr_t offset_imm,
1003 uint32_t* protected_load_pc,
bool i64_offset);
1006 uint8_t lane, uint32_t* protected_load_pc,
1010 uint32_t* protected_store_pc,
bool i64_offset);
1461 uint8_t imm_lane_idx);
1464 uint8_t imm_lane_idx);
1467 uint8_t imm_lane_idx);
1470 uint8_t imm_lane_idx);
1472 uint8_t imm_lane_idx);
1474 uint8_t imm_lane_idx);
1476 uint8_t imm_lane_idx);
1478 uint8_t imm_lane_idx);
1480 uint8_t imm_lane_idx);
1483 uint8_t imm_lane_idx);
1486 uint8_t imm_lane_idx);
1489 uint8_t imm_lane_idx);
1492 uint8_t imm_lane_idx);
1495 uint8_t imm_lane_idx);
1498 uint8_t imm_lane_idx);
1501 uint8_t imm_lane_idx);
1539 ValueKind out_argument_kind,
int stack_bytes,
1544 inline void CallC(
const std::initializer_list<VarState>
args,
1595 return locals[
index];
1630 "Reconsider this inlining if ValueKind gets bigger");
1643 assm.SetCacheStateFrozen();
1646 : assm_(other.assm_) {
1647 assm_.SetCacheStateFrozen();
1649inline FreezeCacheState::~FreezeCacheState() { assm_.UnfreezeCacheState(); }
1661 slots_.emplace_back(src, src_offset, half, dst_slot);
1666 slots_.emplace_back(src, dst_slot);
1671 return a.dst_slot_ > b.dst_slot_;
Simd128Register Simd128Register Simd128Register Simd128Register rc
FreezeCacheState(LiftoffAssembler &assm)
void emit_i8x16_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void ClearRegister(Register reg, std::initializer_list< Register * > possible_uses, LiftoffRegList pinned)
bool emit_f16x8_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_store_nonzero_if_nan(Register dst, DoubleRegister src, ValueKind kind)
bool emit_f32x4_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_f64_div(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void set_trap_on_oob_mem64(Register index, uint64_t max_index, Label *trap_label)
void emit_i16x8_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
static constexpr ValueKind kSmiKind
void AtomicXor(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i64_shl(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_shli(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i8x16_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_relaxed_q15mulr_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i8x16_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_ceil(DoubleRegister dst, DoubleRegister src)
void emit_i16x8_extmul_high_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
static constexpr uint32_t kInlineLocalKinds
void emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_trunc(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i32_rems(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
void emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i32_clz(Register dst, Register src)
void emit_i8x16_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_shri(Register dst, Register src, int32_t amount)
ValueKind * more_local_kinds_
void emit_ptrsize_and(Register dst, Register lhs, Register rhs)
void emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64_mul(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
int ool_spill_space_size_
V8_NOINLINE V8_PRESERVE_MOST LiftoffRegister LoadToRegister_Slow(VarState slot, LiftoffRegList pinned)
void emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_store_nonzero(Register dst)
void emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_eqz(Register dst, Register src)
ValueKind local_kinds_[kInlineLocalKinds]
void emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_uconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
V8_INLINE LiftoffRegister PopToRegister(LiftoffRegList pinned={})
void DropValues(int count)
void emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_floor(DoubleRegister dst, DoubleRegister src)
void emit_f32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void RecordUsedSpillOffset(int offset)
void emit_i16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i8x16_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void FillI64Half(Register, int offset, RegPairHalf)
bool emit_f16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_sari(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void CallCWithStackBuffer(const std::initializer_list< VarState > args, const LiftoffRegister *rets, ValueKind return_kind, ValueKind out_argument_kind, int stack_bytes, ExternalReference ext_ref)
void emit_f32_mul(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_ptrsize_addi(Register dst, Register lhs, intptr_t imm)
void emit_i8x16_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32_nearest_int(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_dot_i16x8_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_muli(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i16x8_extmul_low_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void ParallelRegisterMove(std::initializer_list< ParallelRegisterMoveTuple > moves)
LiftoffAssembler(Zone *, std::unique_ptr< AssemblerBuffer >)
void emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
LiftoffRegister LoadToModifiableRegister(VarState slot, LiftoffRegList pinned)
void emit_i16x8_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
V8_INLINE VarState PopVarState()
void LoadFixedArrayLengthAsInt32(LiftoffRegister dst, Register array, LiftoffRegList pinned)
ValueKind local_kind(uint32_t index)
void emit_i64x2_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_dot_i8x16_i7x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void TailCallNativeWasmCode(Address addr)
const char * bailout_detail() const
void emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void SpillInstanceData(Register instance)
void RecordOolSpillSpaceSize(int size)
void emit_f64x2_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i32_subi(Register dst, Register lhs, int32_t imm)
void emit_i32_shli(Register dst, Register src, int32_t amount)
void emit_i32x4_extmul_low_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void AtomicAdd(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
V8_NODISCARD CacheState MergeIntoNewState(uint32_t num_locals, uint32_t arity, uint32_t stack_depth)
void AtomicSub(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void LoadTransform(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LoadTransformationKind transform, uint32_t *protected_load_pc, bool i64_offset)
void emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_store_nonzero_if_nan(Register dst, LiftoffRegister src, Register tmp_gp, LiftoffRegister tmp_s128, ValueKind lane_kind)
LiftoffBailoutReason bailout_reason_
void emit_i32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_sar(Register dst, Register src, Register amount)
void emit_i16x8_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_i16x8_sconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
void AtomicAnd(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i16x8_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicCompareExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister value, StoreType type, bool i64_offset)
void emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void MergeStackWith(CacheState &target, uint32_t arity, JumpDirection)
void emit_s128_not(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i16(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_dot_i8x16_i7x16_add_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister acc)
bool emit_f16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void set_local_kind(uint32_t index, ValueKind kind)
void emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64x2_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_trunc(DoubleRegister dst, DoubleRegister src)
void emit_f64_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void Fill(LiftoffRegister, int offset, ValueKind)
void emit_i32_shr(Register dst, Register src, Register amount)
void emit_i64_signextend_i32(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_ptrsize_muli(Register dst, Register lhs, int32_t imm)
void LoadFullPointer(Register dst, Register src_addr, int32_t offset_imm)
void DeallocateStackSlot(uint32_t size)
void emit_i8x16_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_extmul_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i16x8_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_abs(LiftoffRegister dst, LiftoffRegister src)
LiftoffRegister PeekToRegister(int index, LiftoffRegList pinned)
void emit_i32_andi(Register dst, Register lhs, int32_t imm)
bool emit_f16x8_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_ptrsize_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StackCheck(Label *ool_code)
void emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f32x4_promote_low_f16x8(LiftoffRegister dst, LiftoffRegister src)
void Store(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, bool is_store_mem=false, bool i64_offset=false)
bool emit_f16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint32_t *protected_load_pc=nullptr, bool is_load_mem=false, bool i64_offset=false, bool needs_shift=false)
void emit_i64x2_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f16x8_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_high_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_sub(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
int max_used_spill_offset_
void emit_i32_signextend_i16(Register dst, Register src)
void emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_divs(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_f32_neg(DoubleRegister dst, DoubleRegister src)
int TopSpillOffset() const
void emit_f64x2_promote_low_f32x4(LiftoffRegister dst, LiftoffRegister src)
static V8_INLINE int NextSpillOffset(ValueKind kind, int top_spill_offset)
void emit_ptrsize_add(Register dst, Register lhs, Register rhs)
void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs, int64_t imm)
LiftoffRegister PopToModifiableRegister(LiftoffRegList pinned={})
void LoadLane(LiftoffRegister dst, LiftoffRegister src, Register addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint8_t lane, uint32_t *protected_load_pc, bool i64_offset)
void emit_i16x8_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_and(Register dst, Register lhs, Register rhs)
void emit_i16x8_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
V8_INLINE LiftoffRegister LoadToRegister(VarState slot, LiftoffRegList pinned)
void emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f64x2_trunc(LiftoffRegister dst, LiftoffRegister src)
void CallBuiltin(Builtin builtin)
void emit_i8x16_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareCall(const ValueKindSig *, compiler::CallDescriptor *, Register *target=nullptr, Register target_instance=no_reg)
void emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_relaxed_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallFrameSetupStub(int declared_function_index)
void emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_abs(DoubleRegister dst, DoubleRegister src)
void emit_i32_remu(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
bool emit_f64x2_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSpillAddress(Register dst, int offset, ValueKind kind)
void emit_i16x8_extmul_high_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f64_neg(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32_ctz(Register dst, Register src)
LiftoffBailoutReason bailout_reason() const
void StoreCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind, Register frame_pointer)
void emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void AtomicExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i32x4_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Spill(VarState *slot)
~LiftoffAssembler() override
void emit_i32_xor(Register dst, Register lhs, Register rhs)
LiftoffRegister LoadI64HalfIntoRegister(VarState slot, RegPairHalf half, LiftoffRegList pinned)
void emit_s128_select(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask)
bool emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void emit_ptrsize_shri(Register dst, Register src, int amount)
void emit_i8x16_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void PrepareForBranch(uint32_t arity, LiftoffRegList pinned)
bool emit_f16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void AssertUnreachable(AbortReason reason)
void CallC(const std::initializer_list< VarState > args, ExternalReference ext_ref)
void emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_or(Register dst, Register lhs, Register rhs)
bool emit_f16x8_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst, LiftoffRegister src)
uint32_t GetNumUses(LiftoffRegister reg) const
const CacheState * cache_state() const
void emit_i64x2_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
bool emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero)
bool emit_f16x8_demote_f32x4_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f64x2_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void DropStackSlotsAndRet(uint32_t num_stack_slots)
void emit_f32x4_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_and_not(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_relaxed_laneselect(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask, int lane_width)
void emit_i32x4_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_sconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_sari(Register dst, Register src, int32_t amount)
void LoadConstant(LiftoffRegister, WasmValue)
void emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
int GetTotalFrameSize() const
void emit_i16x8_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_ptrsize_sub(Register dst, Register lhs, Register rhs)
void emit_i16x8_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallNativeWasmCode(Address addr)
bool emit_f32x4_trunc(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareTailCall(int num_callee_stack_params, int stack_param_delta)
void emit_f32x4_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_nearest_int(DoubleRegister dst, DoubleRegister src)
bool emit_f16x8_ceil(LiftoffRegister dst, LiftoffRegister src)
uint32_t num_locals() const
void emit_i32_divu(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero)
void emit_cond_jump(Condition, Label *, ValueKind value, Register lhs, Register rhs, const FreezeCacheState &frozen)
void LoadFromInstance(Register dst, Register instance, int offset, int size)
void emit_i8x16_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void SpillLoopArgs(int num)
void emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_smi_check(Register obj, Label *target, SmiCheckMode mode, const FreezeCacheState &frozen)
void emit_f64_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
static bool NeedsAlignment(ValueKind kind)
void emit_f32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
static int SlotSizeForType(ValueKind kind)
void LoadProtectedPointer(Register dst, Register src_addr, int32_t offset)
void emit_i32x4_extmul_low_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64_add(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64_sar(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i16x8_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_ori(Register dst, Register lhs, int32_t imm)
void emit_f64x2_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
bool emit_f32x4_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_popcnt(LiftoffRegister dst, LiftoffRegister src)
void emit_u32_to_uintptr(Register dst, Register src)
bool supports_f16_mem_access()
void emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_mul(Register dst, Register lhs, Register rhs)
void emit_i8x16_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
bool emit_f64_floor(DoubleRegister dst, DoubleRegister src)
void emit_f32_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadInstanceDataFromFrame(Register dst)
void emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32x4_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_shri(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i64_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64x2_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void DropExceptionValueAtOffset(int offset)
void emit_i16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
LiftoffRegister SpillAdjacentFpRegisters(LiftoffRegList pinned)
void emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PushStack(ValueKind kind)
bool emit_f16x8_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i32_popcnt(Register dst, Register src)
void ParallelRegisterMove(base::Vector< const ParallelRegisterMoveTuple >)
void emit_i16x8_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadCodePointer(Register dst, Register src_addr, int32_t offset)
void emit_i32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void MoveStackValue(uint32_t dst_offset, uint32_t src_offset, ValueKind)
void emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
LiftoffRegister GetUnusedRegister(LiftoffRegList candidates)
void Move(LiftoffRegister dst, LiftoffRegister src, ValueKind)
void emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_popcnt(LiftoffRegister dst, LiftoffRegister src)
void AtomicStore(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, bool i64_offset)
void emit_f64_abs(DoubleRegister dst, DoubleRegister src)
static constexpr int kStackSlotSize
void emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PatchPrepareStackFrame(int offset, SafepointTableBuilder *, bool feedback_vector_slot, size_t stack_param_slots)
void emit_f32x4_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_ptrsize_cond_jumpi(Condition, Label *, Register lhs, int32_t imm, const FreezeCacheState &frozen)
void LoadCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind)
LiftoffRegister GetUnusedRegister(RegClass rc, LiftoffRegList pinned)
void emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_sqrt(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_sub(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_xori(Register dst, Register lhs, int32_t imm)
const char * bailout_detail_
CacheState * cache_state()
void set_num_locals(uint32_t num_locals)
void emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_set_cond(Condition, Register dst, Register lhs, Register rhs)
void emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_sub(Register dst, Register lhs, Register rhs)
void TailCallIndirect(compiler::CallDescriptor *call_descriptor, Register target)
void PopToFixedRegister(LiftoffRegister reg)
void emit_i16x8_q15mulr_sat_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
int OolSpillCount() const
void emit_i64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void PushRegister(ValueKind kind, LiftoffRegister reg)
void emit_i64_shr(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i16x8_bitmask(LiftoffRegister dst, LiftoffRegister src)
void FinishCall(const ValueKindSig *, compiler::CallDescriptor *)
void emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void MoveToReturnLocations(const FunctionSig *, compiler::CallDescriptor *)
void emit_f32_div(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
bool emit_f16x8_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_sqrt(LiftoffRegister dst, LiftoffRegister src)
void SpillRegisters(Regs... regs)
void emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSmiAsInt32(LiftoffRegister dst, Register src_addr, int32_t offset)
void emit_i8x16_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_extmul_low_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AllocateStackSlot(Register addr, uint32_t size)
void emit_i32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadTaggedPointer(Register dst, Register src_addr, Register offset_reg, int32_t offset_imm, uint32_t *protected_load_pc=nullptr, bool offset_reg_needs_shift=false)
void PushRegisters(LiftoffRegList)
void emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_alltrue(LiftoffRegister dst, LiftoffRegister src)
void LoadToFixedRegister(VarState slot, LiftoffRegister reg)
void emit_i32x4_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_cond_jumpi(Condition, Label *, Register lhs, int imm, const FreezeCacheState &frozen)
void emit_i64_eqz(Register dst, LiftoffRegister src)
void StoreTaggedPointer(Register dst_addr, Register offset_reg, int32_t offset_imm, Register src, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, SkipWriteBarrier=kNoSkipWriteBarrier)
void emit_i64_clz(LiftoffRegister dst, LiftoffRegister src)
void bailout(LiftoffBailoutReason reason, const char *detail)
void emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void IncrementSmi(LiftoffRegister dst, int offset)
void PopRegisters(LiftoffRegList)
LiftoffRegister GetUnusedRegister(RegClass rc, std::initializer_list< LiftoffRegister > try_first, LiftoffRegList pinned)
void emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void MergeFullStackWith(CacheState &target)
void emit_f64x2_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shuffle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, const uint8_t shuffle[16], bool is_swizzle)
void emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst, LiftoffRegister src)
V8_NOINLINE V8_PRESERVE_MOST LiftoffRegister SpillOneRegister(LiftoffRegList candidates)
static constexpr ValueKind kIntPtrKind
bool emit_f32_ceil(DoubleRegister dst, DoubleRegister src)
void emit_i32_addi(Register dst, Register lhs, int32_t imm)
void emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_extmul_high_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicOr(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
bool emit_select(LiftoffRegister dst, Register condition, LiftoffRegister true_value, LiftoffRegister false_value, ValueKind kind)
void PushConstant(ValueKind kind, int32_t i32_const)
bool emit_i16x8_uconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
bool emit_type_conversion(WasmOpcode opcode, LiftoffRegister dst, LiftoffRegister src, Label *trap=nullptr)
void emit_i8x16_neg(LiftoffRegister dst, LiftoffRegister src)
void PrepareBuiltinCall(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, std::initializer_list< VarState > params)
void AtomicLoad(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, bool i64_offset)
void emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadReturnStackSlot(LiftoffRegister, int offset, ValueKind)
void emit_i32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_u(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_trunc(DoubleRegister dst, DoubleRegister src)
void LoadTaggedPointerFromInstance(Register dst, Register instance, int offset)
void emit_v128_anytrue(LiftoffRegister dst, LiftoffRegister src)
void FillStackSlotsWithZero(int start, int size)
void emit_i32_shl(Register dst, Register src, Register amount)
void emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_const(LiftoffRegister dst, const uint8_t imms[16])
void emit_f64_sqrt(DoubleRegister dst, DoubleRegister src)
void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StoreLane(Register dst, Register offset, uintptr_t offset_imm, LiftoffRegister src, StoreType type, uint8_t lane, uint32_t *protected_store_pc, bool i64_offset)
Register LoadOldFramePointer()
void CheckTierUp(int declared_func_index, int budget_used, Label *ool_label, const FreezeCacheState &frozen)
void emit_trace_instruction(uint32_t markid)
void emit_i16x8_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64x2_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallIndirect(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, Register target)
void RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint &safepoint, LiftoffRegList all_spills, LiftoffRegList ref_spills, int spill_offset)
void emit_f64x2_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void clear_i32_upper_half(Register dst)
int GetTotalFrameSlotCountForGC() const
V8_NOINLINE V8_PRESERVE_MOST void MoveToReturnLocationsMultiReturn(const FunctionSig *, compiler::CallDescriptor *)
V8_NOINLINE V8_PRESERVE_MOST void SpillRegister(LiftoffRegister)
void emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_add(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_sqrt(LiftoffRegister dst, LiftoffRegister src)
void LoadTrustedPointer(Register dst, Register src_addr, int offset, IndirectPointerTag tag)
void emit_i32_add(Register dst, Register lhs, Register rhs)
void emit_i32x4_relaxed_trunc_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
static constexpr int StaticStackFrameSize()
void emit_i32_muli(Register dst, Register lhs, int32_t imm)
void emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_signextend_i8(Register dst, Register src)
void emit_i8x16_bitmask(LiftoffRegister dst, LiftoffRegister src)
constexpr Register set(Register reg)
constexpr LiftoffRegister clear(LiftoffRegister reg)
LiftoffRegList MaskOut(const LiftoffRegList mask) const
constexpr bool HasAdjacentFpRegsSet() const
bool has(LiftoffRegister reg) const
constexpr bool is_empty() const
constexpr LiftoffRegList GetAdjacentFpRegsSet() const
constexpr unsigned GetNumRegsSet() const
LiftoffRegister GetFirstRegSet() const
constexpr DoubleRegister fp() const
static LiftoffRegister ForFpPair(DoubleRegister low)
static LiftoffRegister ForPair(Register low, Register high)
constexpr int liftoff_code() const
constexpr Register gp() const
void Add(const LiftoffAssembler::VarState &src, uint32_t src_offset, RegPairHalf half, int dst_slot)
base::SmallVector< Slot, 8 > slots_
static int SlotSizeInBytes(const Slot &slot)
void Construct(int param_slots)
LiftoffAssembler *const asm_
LiftoffStackSlots & operator=(const LiftoffStackSlots &)=delete
void Add(const LiftoffAssembler::VarState &src, int dst_slot)
LiftoffStackSlots(LiftoffAssembler *wasm_asm)
LiftoffStackSlots(const LiftoffStackSlots &)=delete
LiftoffRegister reg() const
void MakeRegister(LiftoffRegister r)
ZoneVector< OpIndex > candidates
base::Vector< const DirectHandle< Object > > args
ZoneVector< RpoNumber > & result
std::optional< OolTrapLabel > trap
constexpr Vector< T > VectorOf(T *start, size_t size)
constexpr Condition Flip(Condition cond)
constexpr Condition Negate(Condition cond)
static constexpr int kAfterMaxLiftoffRegCode
static constexpr bool kNeedS128RegPair
static constexpr LiftoffRegList GetCacheRegList(RegClass rc)
int declared_function_index(const WasmModule *module, int func_index)
static constexpr LiftoffRegList kGpCacheRegList
static constexpr LiftoffRegList kFpCacheRegList
static constexpr bool kNeedI64RegPair
constexpr Register no_reg
constexpr int kTaggedSize
constexpr int kSimd128Size
@ kUnsignedGreaterThanEqual
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
constexpr Register kWasmImplicitArgRegister
constexpr int kDoubleSize
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
uint32_t register_use_count[kAfterMaxLiftoffRegCode]
void SetMemStartCacheRegister(Register reg, int memory_index)
void ClearCachedInstanceRegister()
Register cached_instance_data
Register TrySetCachedInstanceRegister(LiftoffRegList pinned)
bool has_unused_register(LiftoffRegList candidates) const
static constexpr int kNoCachedMemIndex
uint32_t stack_height() const
LiftoffRegister GetNextSpillReg(LiftoffRegList candidates)
LiftoffRegister unused_register(RegClass rc, LiftoffRegList pinned={}) const
LiftoffRegList last_spilled_regs
Register cached_mem_start
void dec_used(LiftoffRegister reg)
LiftoffRegister take_volatile_register(LiftoffRegList candidates)
bool is_used(LiftoffRegister reg) const
CacheState & operator=(const CacheState &) V8_NOEXCEPT=default
void DefineSafepoint(SafepointTableBuilder::Safepoint &safepoint)
SmallZoneVector< VarState, 16 > stack_state
void Split(const CacheState &source)
void GetTaggedSlotsForOOLCode(ZoneVector< int > *slots, LiftoffRegList *spills, SpillLocation spill_location)
void reset_used_registers()
void ClearAllCacheRegisters()
void DefineSafepointWithCalleeSavedRegisters(SafepointTableBuilder::Safepoint &safepoint)
void clear_used(LiftoffRegister reg)
void SetInstanceCacheRegister(Register reg)
bool has_unused_register(RegClass rc, LiftoffRegList pinned={}) const
uint32_t get_use_count(LiftoffRegister reg) const
LiftoffRegList used_registers
void Steal(CacheState &source)
CacheState(CacheState &&) V8_NOEXCEPT=default
void ClearCachedMemStartRegister()
void SetCacheRegister(Register *cache, Register reg)
V8_INLINE void ClearCacheRegister(Register *cache)
LiftoffRegister unused_register(LiftoffRegList candidates, LiftoffRegList pinned={}) const
bool is_free(LiftoffRegister reg) const
void inc_used(LiftoffRegister reg)
bool has_volatile_register(LiftoffRegList candidates)
ParallelRegisterMoveTuple(Dst dst, Src src, ValueKind kind)
Slot(const LiftoffAssembler::VarState &src, int dst_slot)
LiftoffAssembler::VarState src_
Slot(const LiftoffAssembler::VarState &src, uint32_t src_offset, RegPairHalf half, int dst_slot)
#define V8_LIKELY(condition)