5#ifndef V8_MAGLEV_RISCV_MAGLEV_ASSEMBLER_RISCV_INL_H_
6#define V8_MAGLEV_RISCV_MAGLEV_ASSEMBLER_RISCV_INL_H_
65class MaglevAssembler::TemporaryRegisterScope
66 :
public TemporaryRegisterScopeBase<TemporaryRegisterScope> {
67 using Base = TemporaryRegisterScopeBase<TemporaryRegisterScope>;
70 struct SavedData :
public Base::SavedData {
133 MaglevAssembler::TemporaryRegisterScope temps(
masm_);
134 Register temp = temps.AcquireScratch();
135 masm_->Move(temp, map);
142 masm_->MacroAssembler::Branch(if_true, cond, temp, Operand(zero_reg),
159template <
typename Arg>
167 if (input.operand().IsConstant()) {
170 const compiler::AllocatedOperand& operand =
171 compiler::AllocatedOperand::cast(input.operand());
172 if (operand.IsRegister()) {
175 DCHECK(operand.IsStackSlot());
179template <
typename Arg>
181 MaglevAssembler::TemporaryRegisterScope* scratch,
184 masm->Move(
reg, arg);
188 MaglevAssembler::TemporaryRegisterScope* scratch,
193 MaglevAssembler::TemporaryRegisterScope* scratch,
194 const Input& input) {
195 if (input.operand().IsConstant()) {
197 input.node()->LoadToRegister(masm,
reg);
200 const compiler::AllocatedOperand& operand =
201 compiler::AllocatedOperand::cast(input.operand());
202 if (operand.IsRegister()) {
205 DCHECK(operand.IsStackSlot());
207 masm->Move(
reg, masm->ToMemOperand(input));
212template <
typename... Args>
216struct PushAllHelper<> {
222 if (input.operand().IsConstant()) {
224 Register scratch = temps.AcquireScratch();
225 input.node()->LoadToRegister(masm, scratch);
231 compiler::AllocatedOperand::cast(input.operand());
236 MaglevAssembler::TemporaryRegisterScope temps(masm);
237 Register scratch = temps.AcquireScratch();
244template <
typename T,
typename... Args>
245inline void PushIterator(MaglevAssembler* masm, base::iterator_range<T> range,
247 for (
auto iter = range.begin(),
end = range.end(); iter !=
end; ++iter) {
250 PushAllHelper<Args...>::Push(masm,
args...);
253template <
typename T,
typename... Args>
255 base::iterator_range<T> range, Args...
args) {
256 PushAllHelper<Args...>::PushReverse(masm,
args...);
257 for (
auto iter = range.rbegin(),
end = range.rend(); iter !=
end; ++iter) {
262template <
typename... Args>
263struct PushAllHelper<Input, Args...> {
274template <
typename Arg,
typename... Args>
275struct PushAllHelper<Arg, Args...> {
280 masm->MacroAssembler::Push(arg);
296template <
typename... T>
298 detail::PushAllHelper<T...>::Push(
this, vals...);
301template <
typename... T>
303 detail::PushAllHelper<T...>::PushReverse(
this, vals...);
311 if (block->is_start_block_of_switch_case()) {
314 bind(block->label());
319 Register cmp_flag = MaglevAssembler::GetFlagsRegister();
326#ifdef V8_ENABLE_DEBUG_CODE
332 MaglevAssembler::TemporaryRegisterScope temps(
this);
333 Register temp = temps.AcquireScratch();
334 Label ConditionMet, Done;
337 Abort(AbortReason::kOperandIsNotAMap);
348 Register overflow_flag = MaglevAssembler::GetFlagsRegister();
353 Add64(overflow_flag, src, src);
354 Add32(dst, src, src);
355 Sne(overflow_flag, overflow_flag, Operand(dst));
360 Move(overflow_flag, zero_reg);
368 MaglevAssembler::TemporaryRegisterScope temps(
this);
370 scratch = temps.AcquireScratch();
373 Register sum64 = temps.AcquireScratch();
374 Add32(sum32, maybeSmi, Operand(maybeSmi));
375 Add64(sum64, maybeSmi, Operand(maybeSmi));
381 int value, Label* fail,
385 MaglevAssembler::TemporaryRegisterScope temps(
this);
386 Register overflow = temps.AcquireScratch();
389 Add64(overflow, src, addend);
390 Add32(dst, src, addend);
391 Sub64(overflow, dst, overflow);
394 AddOverflow64(dst, src, addend, overflow);
403 int value, Label* fail,
407 MaglevAssembler::TemporaryRegisterScope temps(
this);
408 Register overflow = temps.AcquireScratch();
411 Sub64(overflow, src, subtrahend);
412 Sub32(dst, src, subtrahend);
413 Sub64(overflow, dst, overflow);
416 SubOverflow64(dst, src, subtrahend, overflow);
431 constexpr Register aflag = MaglevAssembler::GetFlagsRegister();
437 constexpr Register cmp_result = MaglevAssembler::GetFlagsRegister();
442 const Register& rs2) {
443 constexpr Register aflag = MaglevAssembler::GetFlagsRegister();
451 constexpr Register aflag = MaglevAssembler::GetFlagsRegister();
452 SubWord(aflag, rn, Operand(imm));
456 constexpr Register aflag = MaglevAssembler::GetFlagsRegister();
462 constexpr Register aflag = MaglevAssembler::GetFlagsRegister();
464 if (input.operand().IsRegister()) {
467 DCHECK(input.operand().IsStackSlot());
468 MaglevAssembler::TemporaryRegisterScope temps(
this);
469 Register scratch = temps.AcquireScratch();
484 const compiler::AllocatedOperand& operand) {
489 const compiler::InstructionOperand& operand) {
504 MaglevAssembler::TemporaryRegisterScope scope(
this);
507 Load32U(base,
FieldMemOperand(
object, JSTypedArray::kBasePointerOffset));
509 LoadWord(base,
FieldMemOperand(
object, JSTypedArray::kBasePointerOffset));
511 Add64(data_pointer, data_pointer, base);
515 Register data_pointer, Register index,
int element_size) {
518 AddWord(data_pointer, data_pointer, index);
527 Add64(data_pointer, data_pointer,
534 Register index,
int scale,
538 AddWord(
result,
object, index);
549#ifdef V8_ENABLE_SANDBOX
556#ifdef V8_ENABLE_SANDBOX
568 AbortReason::kUnexpectedNegativeValue);
581 Register
result, Register array, Register index) {
585 AbortReason::kUnexpectedNegativeValue);
597 AbortReason::kUnexpectedValue);
599 AbortReason::kUnexpectedNegativeValue);
601 MaglevAssembler::TemporaryRegisterScope temps(
this);
602 Register scratch = temps.AcquireScratch();
610 MaglevAssembler::TemporaryRegisterScope temps(
this);
611 Register scratch = temps.AcquireScratch();
621 }
else if (size == 2) {
633 }
else if (size == 2) {
648 Register slot_reg, Register
object, Register index) {
660 Register array, Register index, Register value) {
661 MaglevAssembler::TemporaryRegisterScope temps(
this);
662 Register scratch = temps.AcquireScratch();
676 MaglevAssembler::TemporaryRegisterScope temps(
this);
677 Register scratch = temps.AcquireScratch();
678 Move(scratch, value);
688 MaglevAssembler::TemporaryRegisterScope scope(
this);
689 Register scratch = scope.AcquireScratch();
690 Move(scratch, value);
696 DCHECK(size == 1 || size == 2 || size == 4);
699 }
else if (size == 2) {
707#ifdef V8_ENABLE_SANDBOX
708inline void MaglevAssembler::StoreTrustedPointerFieldNoWriteBarrier(
709 Register
object,
int offset, Register value) {
716 MaglevAssembler::TemporaryRegisterScope temps(
this);
717 Register scratch = temps.AcquireScratch();
720 srai(value, value, 16);
721 }
else if (size == 4) {
729 Add32(
reg,
reg, Operand(1));
733 Sub32(
reg,
reg, Operand(1));
737 Add32(
reg,
reg, Operand(amount));
742 if constexpr (
sizeof(intptr_t) >
sizeof(
mask)) {
745 static constexpr intptr_t lsb_mask = 0xFFFFFFFF;
765 Sll32(
reg,
reg, Operand(amount));
769 Add64(
reg,
reg, Operand(delta));
773 DCHECK(location.is_reg());
774 Add64(dst, location.rm(), location.offset());
819 li(dst, Operand(
i.ptr()));
840#ifdef V8_COMPRESS_POINTERS
861 MaglevAssembler::TemporaryRegisterScope temps(
this);
877 MaglevAssembler::TemporaryRegisterScope temps(
this);
878 Register address = temps.AcquireScratch();
879 Add64(address, base, index);
884 MaglevAssembler::TemporaryRegisterScope temps(
this);
885 Register address = temps.AcquireScratch();
886 Add64(address, base, index);
889 ByteSwap(scratch, scratch, 8, address);
895 MaglevAssembler::TemporaryRegisterScope temps(
this);
896 Register address = temps.AcquireScratch();
897 Add64(address, base, index);
902 MaglevAssembler::TemporaryRegisterScope temps(
this);
903 Register scratch = temps.AcquireScratch();
904 Register address = temps.AcquireScratch();
906 ByteSwap(scratch, scratch, 8, address);
907 Add64(address, base, index);
912 SignExtendWord(dst, src);
916 SignExtendWord(val, val);
922 Label* max, Label* done) {
923 MaglevAssembler::TemporaryRegisterScope temps(
this);
924 Register scratch = temps.AcquireScratch();
925 Register scratch2 = temps.AcquireScratch();
931 constexpr int32_t nan_neg_mask =
935 And(scratch2, scratch, Operand(nan_neg_mask));
938 And(scratch2, scratch, Operand(pos_inf_mask));
942 Add32(scratch, zero_reg, Operand(0x406FE));
943 Sll64(scratch, scratch, Operand(44));
944 fmv_d_x(ftmp1, scratch);
965 fcvt_l_d(scratch, value,
RNE);
966 fcvt_d_l(ftmp1, scratch,
RNE);
976template <
typename NodeT>
986 ZeroExtendWord(scratch, scratch);
987 And(scratch, scratch, Operand(JSArrayBuffer::WasDetachedBit::kMask));
989 GetDeoptLabel(node, DeoptimizeReason::kArrayBufferWasDetached);
999 Register map, Register scratch) {
1001 And(scratch, scratch,
1002 Operand(Map::Bits1::IsUndetectableBit::kMask |
1003 Map::Bits1::IsCallableBit::kMask));
1006 constexpr Register bit_set_flag = MaglevAssembler::GetFlagsRegister();
1007 Sub32(bit_set_flag, scratch, Operand(Map::Bits1::IsCallableBit::kMask));
1012 Register map, Register scratch) {
1017 constexpr Register bits_unset_flag = MaglevAssembler::GetFlagsRegister();
1018 And(bits_unset_flag, scratch,
1019 Operand(Map::Bits1::IsUndetectableBit::kMask |
1020 Map::Bits1::IsCallableBit::kMask));
1025 Register heap_object) {
1026 LoadMap(instance_type, heap_object);
1033 MaglevAssembler::TemporaryRegisterScope temps(
this);
1034 Register scratch = temps.AcquireScratch();
1039 bool can_sub =
false;
1041 case Condition::kEqual:
1042 case Condition::kNotEqual:
1043 case Condition::kLessThan:
1044 case Condition::kLessThanEqual:
1045 case Condition::kGreaterThan:
1046 case Condition::kGreaterThanEqual:
1053 SubWord(scratch, scratch, Operand(type));
1060 Register map, Register instance_type_out,
InstanceType lower_limit,
1064 Sub32(instance_type_out, instance_type_out, Operand(lower_limit));
1065 Register aflag = MaglevAssembler::GetFlagsRegister();
1067 Sleu(aflag, instance_type_out, Operand(higher_limit - lower_limit));
1075 constexpr Register flag = MaglevAssembler::GetFlagsRegister();
1085 constexpr Register flag = MaglevAssembler::GetFlagsRegister();
1088 Assert(Condition::kEqual, reason);
1092 Register heap_object,
InstanceType type, Label* if_true,
1093 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1095 MaglevAssembler::TemporaryRegisterScope temps(
this);
1096 Register scratch = temps.AcquireScratch();
1098 Branch(
kEqual, if_true, true_distance, fallthrough_when_true, if_false,
1099 false_distance, fallthrough_when_false);
1107 TemporaryRegisterScope temps(
this);
1108 Register scratch = temps.AcquireScratch();
1109 LoadMap(scratch, heap_object);
1111 Sub32(scratch, scratch, Operand(lower_limit));
1113 Operand(higher_limit - lower_limit));
1119 TemporaryRegisterScope temps(
this);
1120 Register scratch = temps.AcquireScratch();
1121 LoadMap(scratch, heap_object);
1123 Sub32(scratch, scratch, Operand(lower_limit));
1125 Operand(higher_limit - lower_limit));
1133 TemporaryRegisterScope temps(
this);
1134 Register scratch = temps.AcquireScratch();
1135 LoadMap(scratch, heap_object);
1137 Sub32(scratch, scratch, Operand(lower_limit));
1139 Operand(higher_limit - lower_limit));
1144 Label* if_true,
Label::Distance true_distance,
bool fallthrough_when_true,
1146 bool fallthrough_when_false) {
1147 TemporaryRegisterScope temps(
this);
1148 Register scratch = temps.AcquireScratch();
1149 LoadMap(scratch, heap_object);
1151 Sub32(scratch, scratch, Operand(lower_limit));
1152 constexpr Register flags_reg = MaglevAssembler::GetFlagsRegister();
1155 CompareI(flags_reg, scratch, Operand(higher_limit - lower_limit),
1156 Condition::kUnsignedGreaterThan);
1158 Branch(Condition::kEqual, if_true, true_distance, fallthrough_when_true,
1159 if_false, false_distance, fallthrough_when_false);
1162#if V8_STATIC_ROOTS_BOOL
1164inline void MaglevAssembler::JumpIfObjectInRange(Register heap_object,
1170 DCHECK_LE(lower_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
1171 DCHECK_LE(higher_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
1172 TemporaryRegisterScope temps(
this);
1174 Register scratch = temps.AcquireScratch();
1176 higher_limit, distance);
1179inline void MaglevAssembler::JumpIfObjectNotInRange(Register heap_object,
1185 DCHECK_LE(lower_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
1186 DCHECK_LE(higher_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
1187 TemporaryRegisterScope temps(
this);
1189 Register scratch = temps.AcquireScratch();
1191 higher_limit, distance);
1194inline void MaglevAssembler::AssertObjectInRange(Register heap_object,
1199 DCHECK_LE(lower_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
1200 DCHECK_LE(higher_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
1201 TemporaryRegisterScope temps(
this);
1203 Register scratch = temps.AcquireScratch();
1211 TemporaryRegisterScope temps(
this);
1212 Register scratch = temps.AcquireScratch();
1217template <
typename NodeT>
1230template <
typename NodeT>
1241template <
typename NodeT>
1246 MaglevAssembler::TemporaryRegisterScope temps(
this);
1247 Register cmp_result = temps.AcquireScratch();
1254template <
typename NodeT>
1270 constexpr Register Jump_flag = MaglevAssembler::GetFlagsRegister();
1274 MaglevAssembler::TemporaryRegisterScope temps(
this);
1275 Register index_reg = temps.AcquireScratch();
1285template <
typename NodeT>
1287 Register map, Register instance_type_out,
InstanceType lower_limit,
1292 Sub32(instance_type_out, instance_type_out, Operand(lower_limit));
1297 Operand(higher_limit - lower_limit));
1303 MaglevAssembler::TemporaryRegisterScope temps(
this);
1304 Register scratch = temps.AcquireScratch();
1305 Register scratch2 = temps.AcquireScratch();
1311 feq_d(scratch, src1, src1);
1312 feq_d(scratch2, src2, src2);
1313 And(scratch2, scratch, scratch2);
1325 Or(scratch2, scratch, scratch2);
1336 BasicBlock* if_true, BasicBlock* if_false, BasicBlock* next_block,
1337 BasicBlock* nan_failed) {
1338 MaglevAssembler::TemporaryRegisterScope temps(
this);
1339 Register scratch1 = temps.AcquireScratch();
1340 Register scratch2 = temps.AcquireScratch();
1342 feq_d(scratch1, src1, src1);
1343 feq_d(scratch2, src2, src2);
1345 And(any_nan, scratch1, scratch2);
1350 bool fallthrough_when_true = (if_true == next_block);
1351 bool fallthrough_when_false = (if_false == next_block);
1352 Label* if_true_label = if_true->label();
1353 Label* if_false_label = if_false->label();
1354 if (fallthrough_when_false) {
1355 if (fallthrough_when_true) {
1357 DCHECK_EQ(if_true_label, if_false_label);
1372 if (!fallthrough_when_true) {
1379 int num_double_registers) {
1380 MaglevAssembler::TemporaryRegisterScope temps(
this);
1381 Register scratch = temps.AcquireScratch();
1413 constexpr Register aflag = MaglevAssembler::GetFlagsRegister();
1424 Label* if_not_equal,
1452 ZoneLabelRef is_not_hole(
this);
1453 MaglevAssembler::TemporaryRegisterScope temps(
this);
1457 ZoneLabelRef is_hole, ZoneLabelRef is_not_hole) {
1458 masm->ExtractHighWordFromF64(scratch, value);
1460 masm->MacroAssembler::Branch(*is_not_hole);
1462 value, scratch, is_hole, is_not_hole);
1463 Register scratch2 = temps.AcquireScratch();
1464 feq_d(scratch2, value, value);
1479 MaglevAssembler::TemporaryRegisterScope temps(
this);
1480 Register upper_bits = temps.AcquireScratch();
1489 MaglevAssembler::TemporaryRegisterScope temps(
this);
1490 Register scratch = temps.AcquireScratch();
1491 feq_d(scratch, value, value);
1497 MaglevAssembler::TemporaryRegisterScope temps(
this);
1498 Register scratch = temps.AcquireScratch();
1499 feq_d(scratch, value, value);
1508 MaglevAssembler::TemporaryRegisterScope temps(
this);
1509 Register r1w = temps.AcquireScratch();
1510 Register r2w = temps.AcquireScratch();
1517 ZeroExtendWord(r1w, r1);
1518 ZeroExtendWord(r2w, r2);
1541 Register r1, int32_t value,
Condition cond, Label* if_true,
1542 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1562 MaglevAssembler::TemporaryRegisterScope temps(
this);
1563 Register lhs = temps.AcquireScratch();
1564 if (fallthrough_when_false) {
1565 if (fallthrough_when_true) {
1577 if (!fallthrough_when_true) {
1587 MaglevAssembler::TemporaryRegisterScope temps(
this);
1588 Register r1w = temps.AcquireScratch();
1595 ZeroExtendWord(r1w, r1);
1606 MaglevAssembler::TemporaryRegisterScope temps(
this);
1607 Register r1w = temps.AcquireScratch();
1608 Register r2w = temps.AcquireScratch();
1615 ZeroExtendWord(r1w, r1);
1616 ZeroExtendWord(r2w, r2);
1628 MaglevAssembler::TemporaryRegisterScope temps(
this);
1629 Register r1w = temps.AcquireScratch();
1636 ZeroExtendWord(r1w, r1);
1645 Register r1, int32_t value,
Condition cond, Label* if_true,
1646 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1666 MaglevAssembler::TemporaryRegisterScope temps(
this);
1667 Register lhs = temps.AcquireScratch();
1668 if (fallthrough_when_false) {
1669 if (fallthrough_when_true) {
1680 ZeroExtendWord(lhs, r1);
1694 ZeroExtendWord(lhs, r1);
1703 if (!fallthrough_when_true) {
1710 Register r1, Register value,
Condition cond, Label* if_true,
1711 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1731 MaglevAssembler::TemporaryRegisterScope temps(
this);
1732 Register lhs = temps.AcquireScratch();
1733 Register rhs = temps.AcquireScratch();
1734 if (fallthrough_when_false) {
1735 if (fallthrough_when_true) {
1746 ZeroExtendWord(lhs, r1);
1747 ZeroExtendWord(rhs, value);
1762 ZeroExtendWord(lhs, r1);
1763 ZeroExtendWord(rhs, value);
1773 if (!fallthrough_when_true) {
1804 Handle<HeapObject> obj,
1821 MaglevAssembler::TemporaryRegisterScope temps(
this);
1822 Register scratch = temps.AcquireScratch();
1824 And(scratch, scratch,
1832 MaglevAssembler::TemporaryRegisterScope temps(
this);
1840 MaglevAssembler::TemporaryRegisterScope temps(
this);
1841 Register scratch = temps.AcquireScratch();
1844 And(scratch, r1, Operand(
mask));
1845 ZeroExtendWord(scratch, scratch);
1847 And(scratch, r1, Operand(
mask));
1855 MaglevAssembler::TemporaryRegisterScope temps(
this);
1856 Register scratch = temps.AcquireScratch();
1857 Lwu(scratch, operand);
1858 And(scratch, scratch, Operand(
mask));
1865 MaglevAssembler::TemporaryRegisterScope temps(
this);
1866 Register scratch = temps.AcquireScratch();
1869 And(scratch, r1, Operand(
mask));
1870 ZeroExtendWord(scratch, scratch);
1872 And(scratch, r1, Operand(
mask));
1879 MaglevAssembler::TemporaryRegisterScope temps(
this);
1880 Register scratch = temps.AcquireScratch();
1881 Lwu(scratch, operand);
1882 And(scratch, scratch, Operand(
mask));
1888 MaglevAssembler::TemporaryRegisterScope temps(
this);
1889 Register scratch = temps.AcquireScratch();
1890 Lbu(scratch, operand);
1891 And(scratch, scratch, Operand(
mask));
1898 MaglevAssembler::TemporaryRegisterScope temps(
this);
1899 Register scratch = temps.AcquireScratch();
1900 Lbu(scratch, operand);
1901 And(scratch, scratch, Operand(
mask));
1906 Register heap_number) {
1912 Register heap_number) {
1917 Register heap_number) {
1940 MaglevAssembler::TemporaryRegisterScope temps(
this);
1941 Register scratch = temps.AcquireScratch();
1946 scratch, Operand(fp));
1951 int stack_check_offset) {
1952 MaglevAssembler::TemporaryRegisterScope temps(
this);
1955 stack_cmp_reg = temps.AcquireScratch();
1956 Sub64(stack_cmp_reg, sp, stack_check_offset);
1958 Register interrupt_stack_limit = temps.AcquireScratch();
1961 constexpr Register flags_reg = MaglevAssembler::GetFlagsRegister();
1965 CompareI(flags_reg, stack_cmp_reg, Operand(interrupt_stack_limit),
1966 Condition::kUnsignedLessThan);
1984 return Lw(dst, src);
1989 return LoadWord(dst, src);
1999 return Sw(src, dst);
2004 return StoreWord(src, dst);
2013 Register scratch = temps.AcquireScratch();
#define Assert(condition)
interpreter::OperandScale scale
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
void feq_d(Register rd, FPURegister rs1, FPURegister rs2)
void fsgnj_d(FPURegister rd, FPURegister rs1, FPURegister rs2)
void srai(Register rd, Register rs1, uint8_t shamt)
void slli(Register rd, Register rs1, uint8_t shamt)
void fcvt_d_s(FPURegister fd, FPURegister fj)
void fcvt_s_d(FPURegister fd, FPURegister fj)
void fclass_d(FPURegister fd, FPURegister fj)
static VfpRegList DefaultFPTmpList()
void cmp(Register src1, const Operand &src2, Condition cond=al)
void shift(Operand dst, Immediate shift_amount, int subcode, int size)
static RegList DefaultTmpList()
void ForceConstantPoolEmissionWithoutJump()
static constexpr size_t kMaxSizeInHeap
Tagged_t ReadOnlyRootPtr(RootIndex index)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal)
void Abort(AbortReason msg)
void LoadStackLimit(Register destination, StackLimitKind kind)
void Call(Register target, Condition cond=al)
void Lbu(Register rd, const MemOperand &rs)
void LoadFloat(FPURegister fd, const MemOperand &src, Trapper &&trapper=[](int){})
void Cmp(const Register &rn, int imm)
void AssertMap(Register object) NOOP_UNLESS_DEBUG_CODE
void Neg(const Register &rd, const Operand &operand)
void Sh(Register rd, const MemOperand &rs)
void IsObjectType(Register heap_object, Register scratch1, Register scratch2, InstanceType type)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal)
void Lb(Register rd, const MemOperand &rs)
void BranchRange(Label *L, Condition cond, Register value, Register scratch, unsigned lower_limit, unsigned higher_limit, Label::Distance distance=Label::kFar)
void Uld(Register rd, const MemOperand &rs)
void CompareRoot(Register obj, RootIndex index)
void MoveDouble(FPURegister dst, FPURegister src)
void Move(Register dst, Tagged< Smi > smi)
void SmiTst(Register value)
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void JumpIfSmi(Register value, Label *smi_label)
void JumpIfObjectType(Register object, Register map, Register type_reg, InstanceType type, Label *if_cond_pass, Condition cond=eq)
void LoadSandboxedPointerField(Register destination, MemOperand field_operand)
void Lwu(Register rd, const MemOperand &rs)
void LoadFPRImmediate(FPURegister dst, float imm)
void CompareTaggedRoot(Register with, RootIndex index)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
void CompareTaggedAndBranch(const Register &lhs, const Operand &rhs, Condition cond, Label *label)
void StoreDouble(FPURegister fs, const MemOperand &dst, Trapper &&trapper=[](int){})
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void li(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
void Sw(Register rd, const MemOperand &rs)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
void Lhu(Register rd, const MemOperand &rs)
void ByteSwap(Register dest, Register src, int operand_size)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void ULoadDouble(FPURegister fd, const MemOperand &rs)
void StoreFloat(FPURegister fs, const MemOperand &dst, Trapper &&trapper=[](int){})
void Cvt_d_w(FPURegister fd, Register rs)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void Lw(Register rd, const MemOperand &rs)
void LoadCompressedMap(Register dst, Register object)
void Lh(Register rd, const MemOperand &rs)
void ExtractHighWordFromF64(Register dst_high, FPURegister src)
void CompareI(Register rd, Register rs, const Operand &rt, Condition cond)
void Cvt_d_uw(FPURegister fd, FPURegister fs)
void LoadDouble(FPURegister fd, const MemOperand &src, Trapper &&trapper=[](int){})
void CmpTagged(const Register &r1, const Register &r2)
void Usd(Register rd, const MemOperand &rs)
void Or(Register dst, Register src)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand)
void CompareRootAndBranch(const Register &obj, RootIndex index, Condition cc, Label *target, ComparisonMode mode=ComparisonMode::kDefault)
void AssertRange(Condition cond, AbortReason reason, Register value, Register scratch, unsigned lower_limit, unsigned higher_limit) NOOP_UNLESS_DEBUG_CODE
void Sb(Register rd, const MemOperand &rs)
void Branch(Label *label, bool need_link=false)
void UStoreDouble(FPURegister fd, const MemOperand &rs)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void BindJumpTarget(Label *label)
void CalcScaledAddress(Register rd, Register rt, Register rs, uint8_t sa)
Condition CheckSmi(Register src)
void LoadMap(Register destination, Register object)
void CompareF64(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void LoadTaggedFieldWithoutDecompressing(const Register &destination, const MemOperand &field_operand)
static Operand EmbeddedNumber(double number)
constexpr bool has(RegisterT reg) const
static constexpr Register no_reg()
@ COMPRESSED_EMBEDDED_OBJECT
static constexpr bool IsReadOnly(RootIndex root_index)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int kFixedFrameSizeFromFp
DoubleRegister AcquireDouble()
void SetAvailable(RegList available)
void Include(const Register ®1, const Register ®2=no_reg)
void SetAvailableDouble(DoubleRegList available_double)
DoubleRegList AvailableDouble()
Register GetRegister() const
static LocationOperand * cast(InstructionOperand *op)
TemporaryRegisterScope * prev_scope_
DoubleRegList available_double_
SavedData CopyForDeferBase()
Register AcquireScratch()
DoubleRegister AcquireScratchDouble()
UseScratchRegisterScope scratch_scope_
void ResetToDefaultImpl()
TemporaryRegisterScope(MaglevAssembler *masm)
void IncludeScratch(Register reg)
TemporaryRegisterScope(MaglevAssembler *masm, const SavedData &saved_data)
void CompareMapWithRoot(Register object, RootIndex index, Register scratch)
void LoadFixedArrayElement(Register result, Register array, Register index)
void SmiAddConstant(Register dst, Register src, int value, Label *fail, Label::Distance distance=Label::kFar)
void JumpIfByte(Condition cc, Register value, int32_t byte, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotNan(DoubleRegister value, Label *target, Label::Distance distance=Label::kFar)
Condition IsNotCallableNorUndetactable(Register map, Register scratch)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal, Label::Distance distance=Label::kFar)
void ToUint8Clamped(Register result, DoubleRegister value, Label *min, Label *max, Label *done)
Condition IsCallableAndNotUndetectable(Register map, Register scratch)
void LoadFloat32(DoubleRegister dst, MemOperand src)
void JumpIfNotObjectType(Register heap_object, InstanceType type, Label *target, Label::Distance distance=Label::kFar)
void ReverseByteOrderAndStoreUnalignedFloat64(Register base, Register index, DoubleRegister src)
void AssertStackSizeCorrect()
void IncrementInt32(Register reg)
void IntPtrToDouble(DoubleRegister result, Register src)
void Branch(Condition condition, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
Label * GetDeoptLabel(NodeT *node, DeoptimizeReason reason)
void AndInt32(Register reg, int mask)
void AddInt32(Register reg, int amount)
MemOperand GetStackSlot(const compiler::AllocatedOperand &operand)
void StoreField(MemOperand operand, Register value, int element_size)
void LoadSignedField(Register result, MemOperand operand, int element_size)
void DecrementInt32(Register reg)
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
MaglevAssembler(Isolate *isolate, Zone *zone, MaglevCodeGenState *code_gen_state)
void TestUint8AndJumpIfAllClear(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void AssertObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, AbortReason reason)
void CompareInt32AndAssert(Register r1, Register r2, Condition cond, AbortReason reason)
void CompareDoubleAndJumpIfZeroOrNaN(DoubleRegister reg, Label *target, Label::Distance distance=Label::kFar)
void LoadFixedDoubleArrayElement(DoubleRegister result, Register array, Register index)
void LoadUnsignedField(Register result, MemOperand operand, int element_size)
void JumpIfObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *target, Label::Distance distance=Label::kFar)
void CheckInt32IsSmi(Register obj, Label *fail, Register scratch=Register::no_reg())
void PushReverse(T... vals)
void ReverseByteOrder(Register value, int element_size)
Condition FunctionEntryStackCheck(int stack_check_offset)
void EmitEagerDeoptStress(Label *label)
void LoadHeapNumberValue(DoubleRegister result, Register heap_number)
void Jump(Label *target, Label::Distance distance=Label::kFar)
MemOperand DataViewElementOperand(Register data_pointer, Register index)
void StoreTaggedSignedField(Register object, int offset, Register value)
void CompareSmiAndJumpIf(Register r1, Tagged< Smi > value, Condition cond, Label *target, Label::Distance distance=Label::kFar)
TemporaryRegisterScope * scratch_register_scope() const
void TestUint8AndJumpIfAnySet(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void MoveRepr(MachineRepresentation repr, Dest dst, Source src)
void StoreFixedDoubleArrayElement(Register array, Register index, DoubleRegister value)
void CompareRootAndEmitEagerDeoptIf(Register reg, RootIndex index, Condition cond, DeoptimizeReason reason, NodeT *node)
void SmiTagInt32AndSetFlags(Register dst, Register src)
void BindBlock(BasicBlock *block)
void LoadInstanceType(Register instance_type, Register heap_object)
void CompareTaggedRootAndEmitEagerDeoptIf(Register reg, RootIndex index, Condition cond, DeoptimizeReason reason, NodeT *node)
void StoreHeapInt32Value(Register value, Register heap_number)
void LoadInt32(Register dst, MemOperand src)
void StoreFixedArrayElementNoWriteBarrier(Register array, Register index, Register value)
void LoadAddress(Register dst, MemOperand location)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Label *target, Label::Distance distance=Label::kFar)
void LoadBoundedSizeFromObject(Register result, Register object, int offset)
void SetSlotAddressForTaggedField(Register slot_reg, Register object, int offset)
void CompareFloat64AndBranch(DoubleRegister src1, DoubleRegister src2, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block, BasicBlock *nan_failed)
void CompareTaggedAndJumpIf(Register reg, Tagged< Smi > smi, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void StoreInt32Field(Register object, int offset, int32_t value)
void CompareUInt32AndEmitEagerDeoptIf(Register reg, int imm, Condition cond, DeoptimizeReason reason, NodeT *node)
Register GetFramePointer()
void BranchOnObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadExternalPointerField(Register result, MemOperand operand)
void StoreInt32(MemOperand dst, Register src)
void BuildTypedArrayDataPointer(Register data_pointer, Register object)
void JumpIfObjectTypeNotInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *target, Label::Distance distance=Label::kFar)
void ShiftLeft(Register reg, int amount)
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal, Label::Distance distance=Label::kFar)
void EmitEnterExitFrame(int extra_slots, StackFrame::Type frame_type, Register c_function, Register scratch)
void BranchOnObjectType(Register heap_object, InstanceType type, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadHeapInt32Value(Register result, Register heap_number)
void CompareInstanceTypeRange(Register map, InstanceType lower_limit, InstanceType higher_limit)
void Move(StackSlot dst, Register src)
void SignExtend32To64Bits(Register dst, Register src)
void LoadFixedArrayElementWithoutDecompressing(Register result, Register array, Register index)
void LoadUnalignedFloat64AndReverseByteOrder(DoubleRegister dst, Register base, Register index)
void IncrementAddress(Register reg, int32_t delta)
Label * MakeDeferredCode(Function &&deferred_code_gen, Args &&... args)
void JumpIfNan(DoubleRegister value, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
MemOperand TypedArrayElementOperand(Register data_pointer, Register index, int element_size)
MaglevCompilationInfo * compilation_info() const
void CompareMapWithRootAndEmitEagerDeoptIf(Register reg, RootIndex index, Register scratch, Condition cond, DeoptimizeReason reason, NodeT *node)
void TestInt32AndJumpIfAllClear(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
int GetFramePointerOffsetForStackSlot(const compiler::AllocatedOperand &operand)
void CompareInt32AndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
MaglevCodeGenState * code_gen_state() const
void SetSlotAddressForFixedArrayElement(Register slot_reg, Register object, Register index)
void LoadTaggedFieldByIndex(Register result, Register object, Register index, int scale, int offset)
void CompareIntPtrAndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void CompareInt32AndBranch(Register r1, int32_t value, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void JumpIf(Condition cond, Label *target, Label::Distance distance=Label::kFar)
void LoadFloat64(DoubleRegister dst, MemOperand src)
void LoadUnalignedFloat64(DoubleRegister dst, Register base, Register index)
void LoadTaggedFieldWithoutDecompressing(Register result, Register object, int offset)
Condition IsRootConstant(Input input, RootIndex root_index)
void StoreFloat32(MemOperand dst, DoubleRegister src)
void EmitEagerDeoptIf(Condition cond, DeoptimizeReason reason, NodeT *node)
void StoreFloat64(MemOperand dst, DoubleRegister src)
void NegateInt32(Register val)
bool IsDeoptLabel(Label *label)
void StoreTaggedFieldNoWriteBarrier(Register object, int offset, Register value)
MemOperand ToMemOperand(const compiler::InstructionOperand &operand)
void MoveHeapNumber(Register dst, double value)
void LoadTaggedField(Register result, MemOperand operand)
void LoadByte(Register dst, MemOperand src)
void MoveTagged(Register dst, Handle< HeapObject > obj)
void JumpIfNotHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void DeoptIfBufferDetached(Register array, Register scratch, NodeT *node)
void StoreUnalignedFloat64(Register base, Register index, DoubleRegister src)
void TestInt32AndJumpIfAnySet(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
void JumpIfHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void Uint32ToDouble(DoubleRegister result, Register src)
void CompareFloat64AndJumpIf(DoubleRegister src1, DoubleRegister src2, Condition cond, Label *target, Label *nan_failed, Label::Distance distance=Label::kFar)
void CompareIntPtrAndBranch(Register r1, int32_t value, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void Int32ToDouble(DoubleRegister result, Register src)
void AssertObjectType(Register heap_object, InstanceType type, AbortReason reason)
void SmiSubConstant(Register dst, Register src, int value, Label *fail, Label::Distance distance=Label::kFar)
void CompareInstanceTypeRangeAndEagerDeoptIf(Register map, Register instance_type_out, InstanceType lower_limit, InstanceType higher_limit, Condition cond, DeoptimizeReason reason, NodeT *node)
void OrInt32(Register reg, int mask)
void BindJumpTarget(Label *label)
void JumpToDeopt(Label *target)
void EmitEagerDeopt(NodeT *node, DeoptimizeReason reason)
void CompareInstanceTypeAndJumpIf(Register map, InstanceType type, Condition cond, Label *target, Label::Distance distance)
MemOperand StackSlotOperand(StackSlot slot)
void CompareByteAndJumpIf(MemOperand left, int8_t right, Condition cond, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0)
void MaybeEmitPlaceHolderForDeopt()
static int TemporaryCount(size_t map_count)
MapCompare(MaglevAssembler *masm, Register object, size_t map_count)
void Generate(Handle< Map > map, Condition cond, Label *if_true, Label::Distance distance=Label::kFar)
static ZoneLabelRef UnsafeFromLabelPointer(Label *label)
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define COMPRESS_POINTERS_BOOL
base::Vector< const DirectHandle< Object > > args
ZoneVector< RpoNumber > & result
base::SmallVector< int32_t, 1 > stack_slots
MaglevAssembler *const masm_
Register ToRegister(BaselineAssembler *basm, BaselineAssembler::ScratchRegisterScope *scope, Arg arg)
FloatWithBits< 64 > Float64
bool AlreadyInARegister(Arg arg)
void PushIterator(MaglevAssembler *masm, base::iterator_range< T > range, Args... args)
void PushIteratorReverse(MaglevAssembler *masm, base::iterator_range< T > range, Args... args)
void PushInput(MaglevAssembler *masm, const Input &input)
constexpr Condition ConditionFor(Operation operation)
int ShiftFromScale(int n)
Register ToRegister(const compiler::InstructionOperand &operand)
constexpr Condition ConditionForFloat64(Operation operation)
NodeTMixin< Node, Derived > NodeT
FPUCondition ConditionToConditionCmpFPU(Condition condition)
constexpr int kTaggedSize
@ kUnsignedGreaterThanEqual
DwVfpRegister DoubleRegister
const int kFloat64MantissaBits
const int kFloat64ExponentBias
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
constexpr int kStackLimitSlackForDeoptimizationInBytes
constexpr int kTaggedSizeLog2
constexpr Register kMaglevExtraScratchRegister
constexpr bool SmiValuesAre31Bits()
Condition NegateCondition(Condition cond)
V8_EXPORT_PRIVATE FlagValues v8_flags
@ kNegativeSubnormalNumber
constexpr bool SmiValuesAre32Bits()
constexpr uint32_t kHoleNanUpper32
constexpr int kDoubleSizeLog2
constexpr int kDoubleSize
constexpr bool PointerCompressionIsEnabled()
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
RegList available_scratch_
DoubleRegList available_fp_scratch_
static void Push(MaglevAssembler *masm, Arg arg, Args... args)
static void PushReverse(MaglevAssembler *masm, Arg arg, Args... args)
static void PushReverse(MaglevAssembler *masm)
static void Push(MaglevAssembler *masm)
#define OFFSET_OF_DATA_START(Type)
#define V8_STATIC_ROOTS_BOOL