5#ifndef V8_CODEGEN_RISCV_MACRO_ASSEMBLER_RISCV_H_
6#define V8_CODEGEN_RISCV_MACRO_ASSEMBLER_RISCV_H_
8#ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
9#error This header must be included via macro-assembler.h
26#define xlen (uint8_t(sizeof(void*) * 8))
73#if defined(V8_TARGET_LITTLE_ENDIAN)
74#define SmiWordOffset(offset) (offset + kSystemPointerSize / 2)
76#define SmiWordOffset(offset) offset
100 using MacroAssemblerBase::MacroAssemblerBase;
117#ifdef V8_COMPRESS_POINTERS
119 IsolateData::cage_base_offset());
137 void Debug(uint32_t parameters) { break_(parameters,
false); }
164#define COND_TYPED_ARGS Condition cond, Register r1, const Operand &r2
165#define COND_ARGS cond, r1, r2
168#define DECLARE_NORELOC_PROTOTYPE(Name, target_type) \
169 void Name(target_type target); \
170 void Name(target_type target, COND_TYPED_ARGS);
172#define DECLARE_BRANCH_PROTOTYPES(Name) \
173 DECLARE_NORELOC_PROTOTYPE(Name, Label*) \
174 DECLARE_NORELOC_PROTOTYPE(Name, int32_t)
179 void Branch(Label* target);
180 void Branch(int32_t target);
181 void BranchLong(Label* L);
182 void Branch(Label* target, Condition cond, Register r1,
const Operand& r2,
183 Label::Distance distance = Label::kFar);
191#undef DECLARE_BRANCH_PROTOTYPES
192#undef COND_TYPED_ARGS
196 unsigned lower_limit,
unsigned higher_limit,
202 if (bytes == 0)
return;
203 SubWord(sp, sp,
Operand(bytes));
227 const Operand& r2,
bool need_link =
false);
238 LoadWord(output, operand);
258 return ExternalReferenceAsOperand(ExternalReference::Create(
id),
no_reg);
262 DCHECK(is_int32(imm32 + 0x800));
263 int32_t Hi20 = ((imm32 + 0x800) >> 12);
264 int32_t Lo12 = imm32 << 20 >> 20;
271 DCHECK(is_int32(imm32 + 0x800));
272 int32_t Hi20 = ((imm32 + 0x800) >> 12);
273 int32_t Lo12 = imm32 << 20 >> 20;
285 if (!RelocInfo::IsNoInfo(rmode)) RecordRelocInfo(rmode,
offset);
286 GenPCRelativeJump(temp,
offset);
294 if (!RelocInfo::IsNoInfo(rmode)) RecordRelocInfo(rmode,
offset);
295 GenPCRelativeJumpAndLink(temp,
offset);
303 Condition cond = al, Register rs = zero_reg, \
304 const Operand &rt = Operand(zero_reg)
316 return rmode != RelocInfo::EXTERNAL_REFERENCE;
344#ifdef V8_ENABLE_LEAPTIERING
346 uint16_t argument_count);
348#ifdef V8_ENABLE_WEBASSEMBLY
349 void ResolveWasmCodePointer(
Register target, uint64_t signature_hash);
350 void CallWasmCodePointer(
Register target, uint64_t signature_hash,
352 void CallWasmCodePointerNoSignatureCheck(
Register target);
372#ifdef V8_TARGET_ARCH_RISCV32
374 void EnforceStackAlignment();
379 Label* jump_deoptimization_entry_label);
403 template <
typename... Rs>
417 template <
typename... Rs>
420 push_helper(
r, rs...);
425 Branch(3, cond, tst1,
Operand(tst2));
431 void PushArray(
Register array,
Register size, PushArrayOrder order = kNormal);
436 Label** labels,
int num_labels);
473 SubWord(sp, sp,
Operand(stack_offset));
477 StoreWord(Register::from_code(
i),
MemOperand(sp, stack_offset));
484 int16_t stack_offset = 0;
487 LoadWord(Register::from_code(
i),
MemOperand(sp, stack_offset));
491 addi(sp, sp, stack_offset);
499 SubWord(sp, sp,
Operand(stack_offset));
503 StoreDouble(FPURegister::from_code(
i),
MemOperand(sp, stack_offset));
510 int16_t stack_offset = 0;
513 LoadDouble(FPURegister::from_code(
i),
MemOperand(sp, stack_offset));
517 addi(sp, sp, stack_offset);
531 Register exclusion3 = no_reg)
const;
551 template <
typename... Rs>
565 template <
typename... Rs>
567 pop_helper(
r, rs...);
580#define DEFINE_INSTRUCTION(instr) \
581 void instr(Register rd, Register rs, const Operand& rt); \
582 void instr(Register rd, Register rs, Register rt) { \
583 instr(rd, rs, Operand(rt)); \
585 void instr(Register rs, Register rt, int32_t j) { instr(rs, rt, Operand(j)); }
587#define DEFINE_INSTRUCTION2(instr) \
588 void instr(Register rs, const Operand& rt); \
589 void instr(Register rs, Register rt) { instr(rs, Operand(rt)); } \
590 void instr(Register rs, int32_t j) { instr(rs, Operand(j)); }
592#define DEFINE_INSTRUCTION3(instr) void instr(Register rd, intptr_t imm);
599#if V8_TARGET_ARCH_RISCV64
625#elif V8_TARGET_ARCH_RISCV32
666#undef DEFINE_INSTRUCTION
667#undef DEFINE_INSTRUCTION2
668#undef DEFINE_INSTRUCTION3
673 sub(temp, zero_reg, rs2);
674 amoadd_w(aq, rl, rd, rs1, temp);
680#if V8_TARGET_ARCH_RISCV64
687#elif V8_TARGET_ARCH_RISCV32
714 Register scratch,
unsigned lower_limit,
717 int CalculateStackPassedDWords(
int num_gp_arguments,
int num_fp_arguments);
742 Label* return_location =
nullptr);
744 Register function,
int num_arguments,
746 Label* return_location =
nullptr);
749 int num_double_arguments,
751 Label* return_location =
nullptr);
753 Register function,
int num_reg_arguments,
int num_double_arguments,
755 Label* return_location =
nullptr);
772 Label* condition_met);
776 CheckPageFlag(
object,
mask,
cc, condition_met);
792 if (CpuFeatures::IsSupported(ZBB)) {
795 slli(rd, rs,
xlen - 8);
796 srai(rd, rd,
xlen - 8);
801 if (CpuFeatures::IsSupported(ZBB)) {
804 slli(rd, rs,
xlen - 16);
805 srai(rd, rd,
xlen - 16);
813#if V8_TARGET_ARCH_RISCV64
815 void ZeroExtendWord(Register rd, Register rs) {
816 if (CpuFeatures::IsSupported(ZBA)) {
823 void Popcnt64(Register rd, Register rs, Register scratch);
824 void Ctz64(Register rd, Register rs);
825 void Clz64(Register rd, Register rs);
826#elif V8_TARGET_ARCH_RISCV32
827 void AddPair(Register dst_low, Register dst_high, Register left_low,
828 Register left_high, Register right_low, Register right_high,
829 Register scratch1, Register scratch2);
831 void SubPair(Register dst_low, Register dst_high, Register left_low,
832 Register left_high, Register right_low, Register right_high,
833 Register scratch1, Register scratch2);
835 void AndPair(Register dst_low, Register dst_high, Register left_low,
836 Register left_high, Register right_low, Register right_high);
838 void OrPair(Register dst_low, Register dst_high, Register left_low,
839 Register left_high, Register right_low, Register right_high);
841 void XorPair(Register dst_low, Register dst_high, Register left_low,
842 Register left_high, Register right_low, Register right_high);
844 void MulPair(Register dst_low, Register dst_high, Register left_low,
845 Register left_high, Register right_low, Register right_high,
846 Register scratch1, Register scratch2);
848 void ShlPair(Register dst_low, Register dst_high, Register src_low,
849 Register src_high, Register shift, Register scratch1,
851 void ShlPair(Register dst_low, Register dst_high, Register src_low,
852 Register src_high, int32_t shift, Register scratch1,
855 void ShrPair(Register dst_low, Register dst_high, Register src_low,
856 Register src_high, Register shift, Register scratch1,
859 void ShrPair(Register dst_low, Register dst_high, Register src_low,
860 Register src_high, int32_t shift, Register scratch1,
863 void SarPair(Register dst_low, Register dst_high, Register src_low,
864 Register src_high, Register shift, Register scratch1,
866 void SarPair(Register dst_low, Register dst_high, Register src_low,
867 Register src_high, int32_t shift, Register scratch1,
873 void ExtractBits(Register rt, Register rs, uint16_t
pos, uint16_t size,
874 bool sign_extend =
false);
876 bool sign_extend =
false) {
877 sra(dest, source,
pos);
878 ExtractBits(dest, dest, 0, size, sign_extend);
892 template <
int NBYTES>
902 template <
int NBYTES,
bool IS_SIGNED>
904 template <
int NBYTES>
908 template <
int NBYTES>
910 template <
int NBYTES>
912#if V8_TARGET_ARCH_RISCV32
917 template <
typename Reg_T,
typename Func>
918 void AlignedLoadHelper(Reg_T target,
const MemOperand& rs, Func generator);
919 template <
typename Reg_T,
typename Func>
920 void AlignedStoreHelper(Reg_T value,
const MemOperand& rs, Func generator);
922 template <
int NBYTES,
bool LOAD_SIGNED>
924 template <
int NBYTES,
bool LOAD_SIGNED>
948 void Sb(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
950 void Lh(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
951 void Lhu(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
952 void Sh(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
954 void Lw(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
955 void Sw(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
957#if V8_TARGET_ARCH_RISCV64
958 void Ulwu(Register rd,
const MemOperand& rs);
959 void Lwu(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
960 void Ld(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
961 void Sd(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
962 void Lld(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
963 void Scd(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
965 inline void Load32U(Register rd,
const MemOperand& rs,
966 Trapper&& trapper = [](
int){}) {
967 Lwu(rd, rs, std::forward<Trapper>(trapper));
969 inline void LoadWord(Register rd,
const MemOperand& rs,
970 Trapper&& trapper = [](
int){}) {
971 Ld(rd, rs, std::forward<Trapper>(trapper));
973 inline void StoreWord(Register rd,
const MemOperand& rs,
974 Trapper&& trapper = [](
int){}) {
975 Sd(rd, rs, std::forward<Trapper>(trapper));
977#elif V8_TARGET_ARCH_RISCV32
979 Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){}) {
980 Lw(rd, rs, std::forward<Trapper>(trapper));
982 inline void LoadWord(
983 Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){}) {
984 Lw(rd, rs, std::forward<Trapper>(trapper));
986 inline void StoreWord(
987 Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){}) {
988 Sw(rd, rs, std::forward<Trapper>(trapper));
992 FPURegister fd,
const MemOperand& src, Trapper&& trapper = [](
int){});
994 FPURegister fs,
const MemOperand& dst, Trapper&& trapper = [](
int){});
997 FPURegister fd,
const MemOperand& src, Trapper&& trapper = [](
int){});
999 FPURegister fs,
const MemOperand& dst, Trapper&& trapper = [](
int){});
1001 void Ll(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
1002 void Sc(Register rd,
const MemOperand& rs, Trapper&& trapper = [](
int){});
1004 void Float32Max(FPURegister dst, FPURegister src1, FPURegister src2);
1005 void Float32Min(FPURegister dst, FPURegister src1, FPURegister src2);
1006 void Float64Max(FPURegister dst, FPURegister src1, FPURegister src2);
1007 void Float64Min(FPURegister dst, FPURegister src1, FPURegister src2);
1008 template <
typename F>
1009 void FloatMinMaxHelper(FPURegister dst, FPURegister src1, FPURegister src2,
1016 CHECK(CpuFeatures::IsSupported(ZICOND));
1019 czero_nez(scratch, rj, rk);
1020 czero_eqz(rd, rd, rk);
1021 or_(rd, rd, scratch);
1033 if (dst != src) fmv_d(dst, src);
1037 if (dst != src) fmv_s(dst, src);
1042#if V8_TARGET_ARCH_RISCV64
1044 fmv_x_d(dst_high, src);
1045 fmv_x_w(dst_low, src);
1046 srli(dst_high, dst_high, 32);
1049 inline void Move(Register dst, FPURegister src) { fmv_x_d(dst, src); }
1051 inline void Move(FPURegister dst, Register src) { fmv_d_x(dst, src); }
1052#elif V8_TARGET_ARCH_RISCV32
1053 inline void Move(Register dst, FPURegister src) { fmv_x_w(dst, src); }
1055 inline void Move(FPURegister dst, Register src) { fmv_w_x(dst, src); }
1060#if V8_TARGET_ARCH_RISCV64
1061 fmv_x_d(dst_high, src);
1062 srai(dst_high, dst_high, 32);
1063#elif V8_TARGET_ARCH_RISCV32
1077 fmv_x_w(dst_low, src);
1084 LoadFPRImmediate(dst, base::bit_cast<uint32_t>(imm));
1087 LoadFPRImmediate(dst, base::bit_cast<uint64_t>(imm));
1089 void LoadFPRImmediate(
FPURegister dst, uint32_t src);
1090 void LoadFPRImmediate(
FPURegister dst, uint64_t src);
1091#if V8_TARGET_ARCH_RISCV64
1103#elif V8_TARGET_ARCH_RISCV32
1118 Register overflow,
bool sign_extend_inputs =
true);
1123 static const int kSwitchTablePrologueSize = 6;
1127 template <
typename Func>
1129 Func GetLabelFunction);
1174#if V8_TARGET_ARCH_RISCV64
1240#if V8_TARGET_ARCH_RISCV64
1247 Add32(dst, src, src);
1249#elif V8_TARGET_ARCH_RISCV32
1267#ifdef V8_COMPRESS_POINTERS
1274#ifdef V8_COMPRESS_POINTERS
1281 AssertZeroExtended(
x);
1314 LoadTrustedPointerField(
destination, field_operand,
1315 kCodeIndirectPointerTag);
1319 StoreTrustedPointerField(value, dst_field_operand);
1328#if V8_TARGET_ARCH_RISCV64
1335 Trapper&& trapper = [](
int){});
1338 void LoadTaggedFieldWithoutDecompressing(
const Register&
destination,
1339 const MemOperand& field_operand);
1343 void LoadTaggedSignedField(
const Register&
destination,
1344 const MemOperand& field_operand);
1347 void SmiUntagField(Register dst,
const MemOperand& src);
1350 void StoreTaggedField(
const Register& value,
1351 const MemOperand& dst_field_operand,
1352 Trapper&& trapper = [](
int){});
1353 void AtomicStoreTaggedField(Register dst,
const MemOperand& src,
1354 Trapper&& trapper = [](
int){});
1356 void DecompressTaggedSigned(
const Register&
destination,
1357 const MemOperand& field_operand,
1358 Trapper&& trapper = [](
int){});
1359 void DecompressTagged(
const Register&
destination,
1360 const MemOperand& field_operand,
1361 Trapper&& trapper = [](
int){});
1362 void DecompressTagged(
const Register&
destination,
const Register& source);
1363 void DecompressTagged(Register dst, Tagged_t immediate);
1364 void DecompressProtected(
const Register&
destination,
1365 const MemOperand& field_operand,
1366 Trapper&& trapper = [](
int){});
1374 void DecodeSandboxedPointer(Register value);
1375 void LoadSandboxedPointerField(Register
destination,
1376 const MemOperand& field_operand,
1377 Trapper&& trapper = [](
int){});
1378 void StoreSandboxedPointerField(Register value,
1379 const MemOperand& dst_field_operand,
1380 Trapper&& trapper = [](
int){});
1385 void LoadIndirectPointerField(Register
destination, MemOperand field_operand,
1386 IndirectPointerTag tag);
1390 void StoreIndirectPointerField(Register value, MemOperand dst_field_operand,
1391 Trapper&& trapper = [](
int){});
1393#ifdef V8_ENABLE_SANDBOX
1396 void ResolveIndirectPointerHandle(Register
destination, Register handle,
1397 IndirectPointerTag tag);
1400 void ResolveTrustedPointerHandle(Register
destination, Register handle,
1401 IndirectPointerTag tag);
1403 void ResolveCodePointerHandle(Register
destination, Register handle);
1408 void LoadCodeEntrypointViaCodePointer(Register
destination,
1409 MemOperand field_operand,
1410 CodeEntrypointTag tag);
1415 void LoadCodePointerTableBase(Register
destination);
1418 void AtomicDecompressTaggedSigned(Register dst,
const MemOperand& src,
1419 Trapper&& trapper = [](
int){});
1420 void AtomicDecompressTagged(Register dst,
const MemOperand& src,
1421 Trapper&& trapper = [](
int){});
1423 void CmpTagged(
const Register& rd,
const Register& rs1,
const Register& rs2) {
1425 Sub32(rd, rs1, rs2);
1427 SubWord(rd, rs1, rs2);
1431#elif V8_TARGET_ARCH_RISCV32
1436 inline void LoadTaggedField(
const Register&
destination,
1437 const MemOperand& field_operand,
1438 Trapper&& trapper = [](
int){}) {
1439 Lw(
destination, field_operand, std::forward<Trapper>(trapper));
1442 inline void LoadTaggedSignedField(
const Register&
destination,
1443 const MemOperand& field_operand) {
1447 inline void SmiUntagField(Register dst,
const MemOperand& src) {
1452 void StoreTaggedField(
1453 const Register& value,
const MemOperand& dst_field_operand,
1454 Trapper&& trapper = [](
int){}) {
1455 Sw(value, dst_field_operand, std::forward<Trapper>(trapper));
1458 void AtomicStoreTaggedField(
1459 Register src,
const MemOperand& dst, Trapper&& trapper = [](
int) {}) {
1460 UseScratchRegisterScope temps(
this);
1461 Register scratch = temps.Acquire();
1462 AddWord(scratch, dst.rm(), dst.offset());
1464 amoswap_w(
true,
true, zero_reg, src, scratch);
1512 Trapper&& trapper = [](
int){});
1515 Trapper&& trapper = [](
int){});
1530 LoadRoot(scratch, index);
1543 Branch(if_equal,
eq, with, index, distance);
1555 Branch(if_not_equal,
ne, with, index, distance);
1561 unsigned higher_limit,
Label* on_in_range);
1574 return JumpIfJSAnyIsNotPrimitive(heap_object, scratch, target, distance,
1575 Condition::kUnsignedLessThan);
1585 void RecordWriteField(
1624 LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
1646 Register expected_parameter_count_or_dispatch_handle,
1654#if defined(V8_ENABLE_LEAPTIERING) && defined(V8_TARGET_ARCH_RISCV64)
1728#ifndef V8_ENABLE_LEAPTIERING
1731 Label* flags_need_processing);
1758 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
1766 bool builtin_exit_frame =
false);
1776 if (!
v8_flags.native_code_counters)
return;
1777 EmitIncrementCounter(counter, value, scratch1, scratch2);
1783 if (!
v8_flags.native_code_counters)
return;
1784 EmitDecrementCounter(counter, value, scratch1, scratch2);
1794 Label* done =
nullptr);
1798#if V8_TARGET_ARCH_RISCV64
1807#elif V8_TARGET_ARCH_RISCV32
1825 Label* if_marked_for_deoptimization);
1861 template <
typename Field>
1863 ExtractBits(dst, src, Field::kShift, Field::kSize);
1866 template <
typename Field>
1868 DecodeField<Field>(
reg,
reg);
1871#ifdef V8_ENABLE_LEAPTIERING
1879#ifdef V8_TARGET_ARCH_RISCV64
1885 void LoadEntrypointAndParameterCountFromJSDispatchTable(
1905 bool has_double_zero_reg_set_ =
false;
1906 bool has_single_zero_reg_set_ =
false;
1909 Register function,
int num_reg_arguments,
int num_double_arguments,
1911 Label* return_location =
nullptr);
1924 void BranchAndLinkShortHelper(int32_t
offset,
Label* L);
1925 void BranchAndLinkShort(int32_t
offset);
1926 void BranchAndLinkShort(
Label* L);
1931 void BranchAndLinkLong(
Label* L);
1932#if V8_TARGET_ARCH_RISCV64
1933 template <
typename F_TYPE>
1936#elif V8_TARGET_ARCH_RISCV32
1943 template <
typename F>
1946 bool keep_nan_same =
true);
1948 template <
typename TruncFunc>
1960 static int SafepointRegisterStackIndex(
int reg_code);
1969template <
typename Func>
1971 Func GetLabelFunction) {
1977 Register scratch = temps.Acquire();
1978 Register scratch2 = temps.Acquire();
1983 slli(scratch2, index,
1985 add(scratch2, scratch2,
1993 for (
size_t index = 0; index < case_count; ++
index) {
1994 dd(GetLabelFunction(index));
1998struct MoveCycleState {
2004 std::optional<UseScratchRegisterScope>
temps;
2026 Register function_address,
2027 ExternalReference thunk_ref, Register thunk_arg,
2028 int slots_to_drop_on_return,
2032#define ACCESS_MASM(masm) masm->
interpreter::OperandScale scale
void slli(Register rd, Register rs1, uint8_t shamt)
void auipc(Register rs, int16_t imm16)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void BlockTrampolinePoolFor(int instructions)
static constexpr int kFixedSlotCountAboveFp
void JumpIfJSAnyIsPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void PushAll(RegList registers)
void MovToFloatParameters(DoubleRegister src1, DoubleRegister src2)
void GenerateSwitchTable(Register index, size_t case_count, Func GetLabelFunction)
void CompareTaggedAndBranch(Label *label, Condition cond, Register r1, const Operand &r2, bool need_link=false)
void Abort(AbortReason msg)
void LoadStackLimit(Register destination, StackLimitKind kind)
void LoadReceiver(Register dest)
void Cvt_s_ul(FPURegister fd, Register rs)
void InitializeRootRegister()
void Push(Tagged< TaggedIndex > index)
void GetObjectType(Register function, Register map, Register type_reg)
void Branch(Label *target, Label::Distance distance)
void CallJSFunction(Register function_object, uint16_t argument_count)
void LiLower32BitHelper(Register rd, Operand j)
int32_t GetOffset(int32_t offset, Label *L, OffsetSize bits)
void StoreWordPair(Register rd, const MemOperand &rs)
bool IsNear(Label *L, Condition cond, int rs_reg)
void pop_helper(Register r, Rs... rs)
int CallCFunction(ExternalReference function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void DecodeField(Register reg)
void JumpIfIsInRange(Register value, unsigned lower_limit, unsigned higher_limit, Label *on_in_range)
void ExtractBits(Register dest, Register source, Register pos, int size, bool sign_extend=false)
int CallCFunction(Register function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void MultiPopFPU(DoubleRegList regs)
void GenPCRelativeJump(Register rd, int32_t imm32)
void Move(FPURegister dst, FPURegister src)
void Swap(Register reg1, Register reg2, Register scratch=no_reg)
void ByteSwap(Register dest, Register src, int operand_size, Register scratch=no_reg)
void Cvt_d_ul(FPURegister fd, Register rs)
void CallRuntime(Runtime::FunctionId fid)
void MultiPushFPU(DoubleRegList regs)
void EnterFrame(StackFrame::Type type)
void IsObjectType(Register heap_object, Register scratch1, Register scratch2, InstanceType type)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason)
void DecodeField(Register dst, Register src)
void Msub_s(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft)
void WasmRvvExtractLane(Register dst, VRegister src, int8_t idx, VSew sew, Vlmul lmul)
void Call(Register target, COND_ARGS)
void StoreReceiver(Register rec)
void near_call(int offset, RelocInfo::Mode rmode)
void TailCallBuiltin(Builtin builtin, Condition cond, Register type, Operand range)
static int InstrCountForLi64Bit(int64_t value)
void LoadEntryFromBuiltin(Builtin builtin, Register destination)
void Neg_s(FPURegister fd, FPURegister fs)
void PushStandardFrame(Register function_reg)
void li(Register dst, ExternalReference value, LiFlags mode=OPTIMIZE_SIZE)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi)
void Uld(Register rd, const MemOperand &rs)
void CompareTaggedRoot(const Register &with, RootIndex index, const Register &result)
void near_call(HeapNumberRequest request)
void AssertSignExtended(Register int32_register) NOOP_UNLESS_DEBUG_CODE
void UnalignedFStoreHelper(FPURegister frd, const MemOperand &rs)
void li(Register rd, intptr_t j, LiFlags mode=OPTIMIZE_SIZE)
void Move(Register dst, Register src)
void Msub_d(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft)
static const int kSwitchTablePrologueSize
void Jump(intptr_t target, RelocInfo::Mode rmode, COND_ARGS)
void MoveDouble(FPURegister dst, FPURegister src)
void Move(Register dst, Tagged< Smi > smi)
void Amosub_w(bool aq, bool rl, Register rd, Register rs1, Register rs2)
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void JumpIfEqual(Register a, int32_t b, Label *dest)
void li_optimized(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
void StoreReturnAddressAndCall(Register target)
void LoadZeroIfConditionZero(Register dest, Register condition)
void LeaveExitFrame(Register scratch)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
void SbxCheck(Condition cc, AbortReason reason, Register rs, Operand rt)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void MoveIfZero(Register rd, Register rj, Register rk)
void LoadRootRelative(Register destination, int32_t offset) final
void Assert(Condition cc, AbortReason reason, Register rs, Operand rt)
static int ActivationFrameAlignment()
void Push(Handle< HeapObject > handle)
void PrepareCallCFunction(int num_reg_arguments, Register scratch)
void UnalignedFLoadHelper(FPURegister frd, const MemOperand &rs)
void MultiPush(RegList regs)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void Jump(Register target, COND_ARGS)
int CallCFunctionHelper(Register function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void MulOverflow32(Register dst, Register left, const Operand &right, Register overflow, bool sign_extend_inputs=true)
void PopAll(DoubleRegList registers, int stack_slot_size=kDoubleSize)
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag=kDefaultCodeEntrypointTag)
bool IsDoubleZeroRegSet()
void CompareTaggedRootAndBranch(const Register &with, RootIndex index, Condition cc, Label *target)
void pop_helper(Register r)
void SmiUntag(Register dst, Register src)
void NegateBool(Register rd, Register rs)
void Neg_d(FPURegister fd, FPURegister fs)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void LoadFPRImmediate(FPURegister dst, float imm)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void AssertStackIsAligned()
int CallCFunction(Register function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void TailCallBuiltin(Builtin builtin)
void near_jump(int offset, RelocInfo::Mode rmode)
void LoadGlobalFunctionInitialMap(Register function, Register map, Register scratch)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void Madd_d(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft)
void ExtractLowWordFromF64(Register dst_low, FPURegister src)
void JumpIfUnsignedLessThan(Register x, int32_t y, Label *dest)
void BailoutIfDeoptimized()
void JumpIfRoot(Register with, RootIndex index, Label *if_equal, Label::Distance distance=Label::kFar)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void WasmRvvS128const(VRegister dst, const uint8_t imms[16])
void Jump(const ExternalReference &reference)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
MemOperand ExternalReferenceAsOperand(IsolateFieldId id)
void LoadFPRImmediate(FPURegister dst, double imm)
bool CanUseNearCallOrJump(RelocInfo::Mode rmode)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void DropAndRet(int drop)
void AssertConstructor(Register object)
void Call(Address target, RelocInfo::Mode rmode, COND_ARGS)
void PushAll(DoubleRegList registers, int stack_slot_size=kDoubleSize)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void Drop(int count, Condition cond=cc_always, Register reg=no_reg, const Operand &op=Operand(no_reg))
bool IsSingleZeroRegSet()
void SmiTag(Register dst, Register src)
void Push(Register r, Rs... rs)
void Check(Condition cc, AbortReason reason, Register rs, Operand rt)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void CallBuiltin(Builtin builtin)
void IncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void DropAndRet(int drop, Condition cond, Register reg, const Operand &op)
void SignExtendShort(Register rd, Register rs)
void Pop(uint32_t count=1)
void PushCommonFrame(Register marker_reg=no_reg)
void MovFromFloatParameter(DoubleRegister dst)
void CallIndirectPointerBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode, IndirectPointerTag tag)
void LeaveFrame(StackFrame::Type type)
void li(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void LoadGlobalProxy(Register dst)
void JumpIfMarking(Label *is_marking, Label::Distance condition_met_distance=Label::kFar)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
Operand ClearedValue() const
void InsertBits(Register dest, Register source, Register pos, int size)
void StoreCodePointerField(Register value, MemOperand dst_field_operand)
void MultiPop(RegList regs)
void InvokeFunctionWithNewTarget(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void MovToFloatParameter(DoubleRegister src)
void Push(Register src, Condition cond, Register tst1, Register tst2)
void Usw(Register rd, const MemOperand &rs)
void BindExceptionHandler(Label *label)
void LoadRoot(Register destination, RootIndex index) final
void MovToFloatResult(DoubleRegister src)
void push_helper(Register r, Rs... rs)
void PushRoot(RootIndex index)
void CheckPageFlag(const Register &object, Register scratch, int mask, Condition cc, Label *condition_met)
void PrepareCEntryArgs(int num_args)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void TruncateDoubleToI(Isolate *isolate, Zone *zone, Register result, DoubleRegister double_input, StubCallMode stub_mode)
void PatchAndJump(Address target)
void DecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Call(Handle< Code > code, RelocInfo::Mode rmode=RelocInfo::CODE_TARGET, COND_ARGS)
void CallCodeObject(Register code_object, CodeEntrypointTag tag)
void LoadZeroIfConditionNotZero(Register dest, Register condition)
void EmitDecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
bool CalculateOffset(Label *L, int32_t *offset, OffsetSize bits, Register *scratch, const Operand &rt)
void JumpIfSmi(Register value, Label *smi_label, Label::Distance distance=Label::kFar)
void LoadFromConstantsTable(Register destination, int constant_index) final
void SmiUntag(Register reg)
void LoadProtectedPointerField(Register destination, MemOperand field_operand)
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void PrepareCEntryFunction(const ExternalReference &ref)
void ComputeCodeStartAddress(Register dst)
void MaybeSaveRegisters(RegList registers)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void AllocateStackSpace(int bytes)
void AssertZeroExtended(Register int32_register) NOOP_UNLESS_DEBUG_CODE
void LoadTaggedRoot(Register destination, RootIndex index)
DISALLOW_IMPLICIT_CONSTRUCTORS(MacroAssembler)
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal, Label::Distance distance=Label::kFar)
void Jump(Handle< Code > code, RelocInfo::Mode rmode, COND_ARGS)
static int64_t CalculateTargetOffset(Address target, RelocInfo::Mode rmode, uint8_t *pc)
void AssertGeneratorObject(Register object)
void AssertFunction(Register object)
void SmiTst(Register value, Register scratch)
void SmiToInt32(Register smi)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
void Ulhu(Register rd, const MemOperand &rs)
void JumpIfNotMarking(Label *not_marking, Label::Distance condition_met_distance=Label::kFar)
void CheckDebugHook(Register fun, Register new_target, Register expected_parameter_count, Register actual_parameter_count)
void MoveObjectAndSlot(Register dst_object, Register dst_slot, Register object, Operand offset)
void LoadCompressedMap(Register dst, Register object)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void GenPCRelativeJumpAndLink(Register rd, int32_t imm32)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg)
void JumpIfObjectType(Label *target, Condition cc, Register object, InstanceType instance_type, Register scratch=no_reg)
void FPUCanonicalizeNaN(const DoubleRegister dst, const DoubleRegister src)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void ExtractHighWordFromF64(Register dst_high, FPURegister src)
void LoadCodePointerField(Register destination, MemOperand field_operand)
void Push(Tagged< Smi > smi)
std::function< void(int)> Trapper
void CallEphemeronKeyBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode)
void Check(Condition cond, AbortReason reason)
void Cvt_s_uw(FPURegister fd, Register rs)
void Usd(Register rd, const MemOperand &rs)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void JumpCodeObject(Register code_object, CodeEntrypointTag tag, JumpMode jump_mode=JumpMode::kJump)
void Madd_s(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft)
bool CalculateOffset(Label *L, int32_t *offset, OffsetSize bits)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void Pop(Register r, Rs... rs)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver)
void JumpIfCodeIsMarkedForDeoptimization(Register code, Register scratch, Label *if_marked_for_deoptimization)
Register GetRtAsRegisterHelper(const Operand &rt, Register scratch)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi)
void AllocateStackSpace(Register bytes)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void RoundFloatingPointToInteger(Register rd, FPURegister fs, Register result, TruncFunc trunc)
void JumpIfLessThan(Register a, int32_t b, Label *dest)
void AssertUndefinedOrAllocationSite(Register object, Register scratch)
void Move(Register output, MemOperand operand)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure)
void CompareRootAndBranch(const Register &obj, RootIndex index, Condition cc, Label *target, ComparisonMode mode=ComparisonMode::kDefault)
void AssertBoundFunction(Register object)
void Jump(Address target, RelocInfo::Mode rmode, COND_ARGS)
void AssertRange(Condition cond, AbortReason reason, Register value, Register scratch, unsigned lower_limit, unsigned higher_limit) NOOP_UNLESS_DEBUG_CODE
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Cvt_d_uw(FPURegister fd, Register rs)
void SmiTag(Register reg)
void GetInstanceTypeRange(Register map, Register type_reg, InstanceType lower_limit, Register range)
void Ulw(Register rd, const MemOperand &rs)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void Ulh(Register rd, const MemOperand &rs)
void CompareRoot(const Register &obj, RootIndex index, const Register &result, ComparisonMode mode=ComparisonMode::kDefault)
void MoveFloat(FPURegister dst, FPURegister src)
void MaybeRestoreRegisters(RegList registers)
void MulOverflow64(Register dst, Register left, const Operand &right, Register overflow)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void push_helper(Register r)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count_or_dispatch_handle, Register actual_parameter_count)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers, Register scratch)
void TryLoadOptimizedOsrCode(Register scratch_and_result, CodeKind min_opt_level, Register feedback_vector, FeedbackSlot slot, Label *on_result, Label::Distance distance)
void LoadIsolateField(const Register &rd, IsolateFieldId id)
void BindJumpTarget(Label *label)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void AssertCallableFunction(Register object)
void LoadWordPair(Register rd, const MemOperand &rs)
void PopAll(RegList registers)
void StubPrologue(StackFrame::Type type)
void SmiToInt32(Register dst, Register smi)
void StoreRootRelative(int32_t offset, Register value) final
void jmp(Label *L, Label::Distance distance=Label::kFar)
void LoadMap(Register destination, Register object)
void MovFromFloatResult(DoubleRegister dst)
void SmiUntag(Register dst, const MemOperand &src)
void BindJumpOrCallTarget(Label *label)
void TailCallRuntime(Runtime::FunctionId fid)
void CallRuntime(Runtime::FunctionId fid, int num_arguments)
void JumpIfNotSmi(Register value, Label *not_smi_label, Label::Distance dist=Label::kFar)
void LoadNativeContextSlot(Register dst, int index)
void TryInlineTruncateDoubleToI(Register result, DoubleRegister input, Label *done)
void BindCallTarget(Label *label)
void Switch(Register scratch, Register value, int case_value_base, Label **labels, int num_labels)
void SignExtendByte(Register rd, Register rs)
void AssertSmiOrHeapObjectInMainCompressionCage(Register object) NOOP_UNLESS_DEBUG_CODE
void DropArguments(Register count)
void SmiScale(Register dst, Register src, int scale)
#define NOOP_UNLESS_DEBUG_CODE
#define ASM_CODE_COMMENT(asm)
#define COMPRESS_POINTERS_BOOL
DirectHandle< Object > new_target
ZoneVector< RpoNumber > & result
#define DEFINE_INSTRUCTION2(instr)
#define DEFINE_INSTRUCTION(instr)
#define DECLARE_BRANCH_PROTOTYPES(Name)
#define DEFINE_INSTRUCTION3(instr)
RegListBase< RegisterT > registers
InstructionOperand destination
constexpr Register no_reg
constexpr Register kRootRegister
MemOperand ExitFrameCallerStackSlotOperand(int index)
RegListBase< Register > RegList
constexpr int kSystemPointerSizeLog2
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
MemOperand CFunctionArgumentOperand(int index)
constexpr Simd128Register kSimd128ScratchReg
constexpr bool SmiValuesAre31Bits()
const int kCArgsSlotsSize
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr Register kPtrComprCageBaseRegister
const intptr_t kSmiTagMask
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
constexpr uint8_t kInstrSize
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
constexpr int kDoubleSize
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr int kNumRegisters
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_EXPORT_PRIVATE
std::optional< CPURegister > scratch_reg
std::optional< UseScratchRegisterScope > temps