5#ifndef V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_INL_H_
6#define V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_INL_H_
12#if V8_TARGET_ARCH_IA32
14#elif V8_TARGET_ARCH_X64
16#elif V8_TARGET_ARCH_ARM64
18#elif V8_TARGET_ARCH_ARM
20#elif V8_TARGET_ARCH_PPC64
22#elif V8_TARGET_ARCH_MIPS64
24#elif V8_TARGET_ARCH_LOONG64
26#elif V8_TARGET_ARCH_S390X
28#elif V8_TARGET_ARCH_RISCV64
30#elif V8_TARGET_ARCH_RISCV32
33#error Unsupported architecture.
97 if (slot.
reg() ==
reg)
return;
116#if V8_TARGET_LITTLE_ENDIAN
129 int32_t offset_imm) {
202 const char* detail) {
214#ifdef V8_TARGET_ARCH_32_BIT
223template <
void (LiftoffAssembler::*op)(Register, Register, Register)>
224void EmitI64IndependentHalfOperation(LiftoffAssembler* assm,
225 LiftoffRegister dst, LiftoffRegister lhs,
226 LiftoffRegister rhs) {
229 if (dst.low() != lhs.high() && dst.low() != rhs.high()) {
230 (assm->*op)(dst.low_gp(), lhs.low_gp(), rhs.low_gp());
231 (assm->*op)(dst.high_gp(), lhs.high_gp(), rhs.high_gp());
236 if (dst.high() != lhs.low() && dst.high() != rhs.low()) {
237 (assm->*op)(dst.high_gp(), lhs.high_gp(), rhs.high_gp());
238 (assm->*op)(dst.low_gp(), lhs.low_gp(), rhs.low_gp());
242 Register tmp = assm->GetUnusedRegister(
kGpReg, LiftoffRegList{lhs, rhs}).gp();
243 (assm->*op)(tmp, lhs.low_gp(), rhs.low_gp());
244 (assm->*op)(dst.high_gp(), lhs.high_gp(), rhs.high_gp());
245 assm->Move(dst.low_gp(), tmp,
kI32);
248template <
void (LiftoffAssembler::*op)(Register, Register,
int32_t)>
249void EmitI64IndependentHalfOperationImm(LiftoffAssembler* assm,
251 LiftoffRegister lhs, int64_t imm) {
256 if (dst.low() != lhs.high()) {
257 (assm->*op)(dst.low_gp(), lhs.low_gp(), low_word);
258 (assm->*op)(dst.high_gp(), lhs.high_gp(), high_word);
263 if (dst.high() != lhs.low()) {
264 (assm->*op)(dst.high_gp(), lhs.high_gp(), high_word);
265 (assm->*op)(dst.low_gp(), lhs.low_gp(), low_word);
269 Register tmp = assm->GetUnusedRegister(
kGpReg, LiftoffRegList{lhs}).gp();
270 (assm->*op)(tmp, lhs.low_gp(), low_word);
271 (assm->*op)(dst.high_gp(), lhs.high_gp(), high_word);
272 assm->Move(dst.low_gp(), tmp,
kI32);
277 LiftoffRegister rhs) {
278 liftoff::EmitI64IndependentHalfOperation<&LiftoffAssembler::emit_i32_and>(
279 this, dst, lhs, rhs);
284 liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_andi>(
285 this, dst, lhs, imm);
289 LiftoffRegister rhs) {
290 liftoff::EmitI64IndependentHalfOperation<&LiftoffAssembler::emit_i32_or>(
291 this, dst, lhs, rhs);
296 liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_ori>(
297 this, dst, lhs, imm);
301 LiftoffRegister rhs) {
302 liftoff::EmitI64IndependentHalfOperation<&LiftoffAssembler::emit_i32_xor>(
303 this, dst, lhs, rhs);
308 liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_xori>(
309 this, dst, lhs, imm);
313 if (dst != src)
Move(dst, src,
kI32);
void emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i32_shri(Register dst, Register src, int32_t amount)
void emit_ptrsize_and(Register dst, Register lhs, Register rhs)
void emit_ptrsize_addi(Register dst, Register lhs, intptr_t imm)
void emit_i64_muli(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void LoadFixedArrayLengthAsInt32(LiftoffRegister dst, Register array, LiftoffRegList pinned)
LiftoffBailoutReason bailout_reason_
void Fill(LiftoffRegister, int offset, ValueKind)
void emit_ptrsize_muli(Register dst, Register lhs, int32_t imm)
void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_ptrsize_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint32_t *protected_load_pc=nullptr, bool is_load_mem=false, bool i64_offset=false, bool needs_shift=false)
int TopSpillOffset() const
static V8_INLINE int NextSpillOffset(ValueKind kind, int top_spill_offset)
void emit_ptrsize_add(Register dst, Register lhs, Register rhs)
void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs, int64_t imm)
void emit_i32_and(Register dst, Register lhs, Register rhs)
void emit_ptrsize_shri(Register dst, Register src, int amount)
void emit_i32_sari(Register dst, Register src, int32_t amount)
void LoadConstant(LiftoffRegister, WasmValue)
void emit_ptrsize_sub(Register dst, Register lhs, Register rhs)
void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
static bool NeedsAlignment(ValueKind kind)
static int SlotSizeForType(ValueKind kind)
void emit_u32_to_uintptr(Register dst, Register src)
void emit_i64_shri(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i64_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PushStack(ValueKind kind)
void LoadCodePointer(Register dst, Register src_addr, int32_t offset)
void Move(LiftoffRegister dst, LiftoffRegister src, ValueKind)
void emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_ptrsize_cond_jumpi(Condition, Label *, Register lhs, int32_t imm, const FreezeCacheState &frozen)
void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
const char * bailout_detail_
void emit_i32_set_cond(Condition, Register dst, Register lhs, Register rhs)
void emit_i32_sub(Register dst, Register lhs, Register rhs)
void PopToFixedRegister(LiftoffRegister reg)
void PushRegister(ValueKind kind, LiftoffRegister reg)
void emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void LoadSmiAsInt32(LiftoffRegister dst, Register src_addr, int32_t offset)
void LoadToFixedRegister(VarState slot, LiftoffRegister reg)
void emit_i32_cond_jumpi(Condition, Label *, Register lhs, int imm, const FreezeCacheState &frozen)
void bailout(LiftoffBailoutReason reason, const char *detail)
void emit_i32_addi(Register dst, Register lhs, int32_t imm)
void PushConstant(ValueKind kind, int32_t i32_const)
void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void clear_i32_upper_half(Register dst)
V8_NOINLINE V8_PRESERVE_MOST void SpillRegister(LiftoffRegister)
void emit_i32_add(Register dst, Register lhs, Register rhs)
static constexpr int StaticStackFrameSize()
void emit_i32_muli(Register dst, Register lhs, int32_t imm)
constexpr Register gp() const
WasmValue constant() const
LiftoffRegister reg() const
static constexpr RegClass reg_class_for(ValueKind kind)
constexpr Register no_reg
constexpr int kBitsPerByte
constexpr int kSystemPointerSize
constexpr Register kReturnRegister0
constexpr bool SmiValuesAre31Bits()
constexpr bool SmiValuesAre32Bits()
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
void dec_used(LiftoffRegister reg)
bool is_used(LiftoffRegister reg) const
SmallZoneVector< VarState, 16 > stack_state
bool is_free(LiftoffRegister reg) const
void inc_used(LiftoffRegister reg)
#define V8_LIKELY(condition)