5#ifndef V8_MAGLEV_S390_MAGLEV_ASSEMBLER_S390_INL_H_
6#define V8_MAGLEV_S390_MAGLEV_ASSEMBLER_S390_INL_H_
43class MaglevAssembler::TemporaryRegisterScope
44 :
public TemporaryRegisterScopeBase<TemporaryRegisterScope> {
45 using Base = TemporaryRegisterScopeBase<TemporaryRegisterScope>;
48 struct SavedData :
public Base::SavedData {
106 MaglevAssembler::TemporaryRegisterScope temps(
masm_);
107 Register temp = temps.AcquireScratch();
108 masm_->Move(temp, map);
111 masm_->JumpIf(cond, if_true, distance);
120template <
typename... Args>
124struct PushAllHelper<> {
130 if (input.operand().IsConstant()) {
132 Register scratch = temps.AcquireScratch();
133 input.node()->LoadToRegister(masm, scratch);
139 compiler::AllocatedOperand::cast(input.operand());
150template <
typename T,
typename... Args>
151inline void PushIterator(MaglevAssembler* masm, base::iterator_range<T> range,
153 for (
auto iter = range.begin(),
end = range.end(); iter !=
end; ++iter) {
156 PushAllHelper<Args...>::Push(masm,
args...);
159template <
typename T,
typename... Args>
161 base::iterator_range<T> range, Args...
args) {
162 PushAllHelper<Args...>::PushReverse(masm,
args...);
163 for (
auto iter = range.rbegin(),
end = range.rend(); iter !=
end; ++iter) {
168template <
typename... Args>
169struct PushAllHelper<Input, Args...> {
180template <
typename Arg,
typename... Args>
181struct PushAllHelper<Arg, Args...> {
186 masm->MacroAssembler::Push(arg);
202template <
typename... T>
204 detail::PushAllHelper<T...>::Push(
this, vals...);
207template <
typename... T>
209 detail::PushAllHelper<T...>::PushReverse(
this, vals...);
215 bind(block->label());
239 int value, Label* fail,
256 int value, Label* fail,
278 if (input.operand().IsRegister()) {
281 DCHECK(input.operand().IsStackSlot());
282 TemporaryRegisterScope temps(
this);
283 Register scratch = temps.AcquireScratch();
298 const compiler::AllocatedOperand& operand) {
303 const compiler::InstructionOperand& operand) {
325 AddU64(data_pointer, data_pointer, base);
329 Register data_pointer, Register index,
int element_size) {
333 AddU64(data_pointer, data_pointer, temp);
344 Register index,
int scale,
346 TemporaryRegisterScope temps(
this);
347 Register scratch = temps.AcquireScratch();
349 AddU64(scratch, scratch,
object);
369 AbortReason::kUnexpectedNegativeValue);
377 TemporaryRegisterScope temps(
this);
378 Register scratch = temps.AcquireScratch();
384 Register
result, Register array, Register index) {
388 AbortReason::kUnexpectedNegativeValue);
391 TemporaryRegisterScope temps(
this);
392 Register scratch = temps.AcquireScratch();
393 Register scratch2 = temps.AcquireScratch();
403 TemporaryRegisterScope temps(
this);
404 Register scratch = temps.AcquireScratch();
407 AbortReason::kUnexpectedValue);
409 AbortReason::kUnexpectedNegativeValue);
418 TemporaryRegisterScope temps(
this);
419 Register scratch = temps.AcquireScratch();
429 }
else if (size == 2) {
441 }
else if (size == 2) {
452 mov(slot_reg,
object);
457 Register slot_reg, Register
object, Register index) {
460 mov(slot_reg,
object);
463 AddU64(slot_reg, slot_reg, scratch);
473 Register array, Register index, Register value) {
474 TemporaryRegisterScope temps(
this);
475 Register scratch = temps.AcquireScratch();
477 AddU64(scratch, scratch, array);
490 TemporaryRegisterScope scope(
this);
492 Move(scratch, value);
498 TemporaryRegisterScope scope(
this);
500 Move(scratch, value);
506 DCHECK(size == 1 || size == 2 || size == 4);
509 }
else if (size == 2) {
524 }
else if (size == 4) {
569 CHECK(is_int20(delta));
623 mov(dst, Operand(
i.ptr()));
626 mov(dst, Operand(
i));
629 TemporaryRegisterScope scope(
this);
630 Register scratch = scope.AcquireScratch();
634 TemporaryRegisterScope scope(
this);
635 Register scratch = scope.AcquireScratch();
644 mov(dst, Operand(
i));
649#ifdef V8_COMPRESS_POINTERS
668 MaglevAssembler::TemporaryRegisterScope temps(
this);
670 ledbr(double_scratch, src);
687 TemporaryRegisterScope scope(
this);
690 lrvgr(scratch, scratch);
700 TemporaryRegisterScope scope(
this);
703 lrvgr(scratch, scratch);
718 Label* max, Label* done) {
719 TemporaryRegisterScope temps(
this);
733template <
typename NodeT>
742 tmll(scratch, Operand(JSArrayBuffer::WasDetachedBit::kMask));
751 Register map, Register scratch) {
753 And(scratch, Operand(Map::Bits1::IsUndetectableBit::kMask |
754 Map::Bits1::IsCallableBit::kMask));
755 CmpU32(scratch, Operand(Map::Bits1::IsCallableBit::kMask));
760 Register map, Register scratch) {
762 Operand(Map::Bits1::IsUndetectableBit::kMask |
763 Map::Bits1::IsCallableBit::kMask));
768 Register heap_object) {
769 LoadMap(instance_type, heap_object);
777 TemporaryRegisterScope temps(
this);
778 Register scratch = temps.AcquireScratch();
787 TemporaryRegisterScope temps(
this);
788 Register scratch = temps.AcquireScratch();
796 TemporaryRegisterScope temps(
this);
797 Register scratch = temps.AcquireScratch();
804 Register heap_object,
InstanceType type, Label* if_true,
805 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
807 TemporaryRegisterScope temps(
this);
808 Register scratch = temps.AcquireScratch();
810 Branch(
kEqual, if_true, true_distance, fallthrough_when_true, if_false,
811 false_distance, fallthrough_when_false);
819 TemporaryRegisterScope temps(
this);
820 Register scratch = temps.AcquireScratch();
829 TemporaryRegisterScope temps(
this);
830 Register scratch = temps.AcquireScratch();
840 TemporaryRegisterScope temps(
this);
841 Register scratch = temps.AcquireScratch();
850 Label* if_true,
Label::Distance true_distance,
bool fallthrough_when_true,
852 bool fallthrough_when_false) {
853 TemporaryRegisterScope temps(
this);
854 Register scratch = temps.AcquireScratch();
858 if_false, false_distance, fallthrough_when_false);
865 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
866 TemporaryRegisterScope temps(
this);
867 Register scratch = temps.AcquireScratch();
869 FIRST_JS_RECEIVER_TYPE);
882 TemporaryRegisterScope temps(
this);
883 Register scratch = temps.AcquireScratch();
888 Register map, Register instance_type_out,
InstanceType lower_limit,
890 TemporaryRegisterScope temps(
this);
891 Register scratch = temps.AcquireScratch();
893 lower_limit, higher_limit);
902 JumpIf(cond, target, distance);
907 BasicBlock* if_true, BasicBlock* if_false, BasicBlock* next_block,
908 BasicBlock* nan_failed) {
911 Branch(cond, if_true, if_false, next_block);
915 int num_double_registers) {
916 TemporaryRegisterScope temps(
this);
917 Register scratch = temps.AcquireScratch();
975 MaglevAssembler::TemporaryRegisterScope temps(
this);
977 mov(scratch, Operand(
byte));
995 ZoneLabelRef is_not_hole(
this);
1000 ZoneLabelRef is_hole, ZoneLabelRef is_not_hole) {
1001 masm->lgdr(scratch, value);
1002 masm->ShiftRightU64(scratch, scratch, Operand(32));
1005 masm->Jump(*is_not_hole);
1007 value, scratch, is_hole, is_not_hole));
1014 MaglevAssembler::TemporaryRegisterScope temps(
this);
1018 lgdr(scratch, value);
1025 MaglevAssembler::TemporaryRegisterScope temps(
this);
1028 CmpU32(scratch, operand);
1076 CmpS64(r1, Operand(value));
1078 CmpU64(r1, Operand(value));
1088 CmpS32(r1, Operand(value));
1090 CmpU32(r1, Operand(value));
1110 CmpS32(r1, Operand(value));
1112 CmpU32(r1, Operand(value));
1118 Register r1, int32_t value,
Condition cond, Label* if_true,
1119 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1122 CmpS32(r1, Operand(value));
1124 CmpU32(r1, Operand(value));
1127 if_false, false_distance, fallthrough_when_false);
1131 Register r1, Register r2,
Condition cond, Label* if_true,
1132 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1140 if_false, false_distance, fallthrough_when_false);
1144 Register r1, int32_t value,
Condition cond, Label* if_true,
1145 Label::Distance true_distance,
bool fallthrough_when_true, Label* if_false,
1148 CmpS64(r1, Operand(value));
1150 CmpU64(r1, Operand(value));
1153 if_false, false_distance, fallthrough_when_false);
1177 MaglevAssembler::TemporaryRegisterScope temps(
this);
1180 mov(scratch2, Operand(right));
1181 LoadS8(scratch2, scratch2);
1182 CmpS32(scratch, scratch2);
1184 JumpIf(cond, target, distance);
1202 Handle<HeapObject> obj,
1206 MaglevAssembler::TemporaryRegisterScope temps(
this);
1222 JumpIf(cond, target, distance);
1243 And(r0, value, Operand(
mask));
1256 tmy(operand, Operand(
mask));
1257 bne(target, distance);
1262 And(r0, value, Operand(
mask));
1275 tmy(operand, Operand(
mask));
1276 beq(target, distance);
1280 Register heap_number) {
1285 Register heap_number) {
1292 Register heap_number) {
1321 Assert(
eq, AbortReason::kStackAccessBelowStackPointer);
1326 int stack_check_offset) {
1327 TemporaryRegisterScope temps(
this);
1328 Register interrupt_stack_limit = temps.AcquireScratch();
1334 mov(stack_cmp_reg, sp);
1335 lay(stack_cmp_reg,
MemOperand(stack_cmp_reg, -stack_check_offset));
1337 CmpU64(stack_cmp_reg, interrupt_stack_limit);
1343template <
typename NodeT>
#define Assert(condition)
interpreter::OperandScale scale
void b(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
static VfpRegList DefaultFPTmpList()
void bne(Register rj, Register rd, int32_t offset)
void lzdr(DoubleRegister r1)
static RegList DefaultTmpList()
void beq(Register rj, Register rd, int32_t offset)
static constexpr size_t kMaxSizeInHeap
void JumpIfRoot(Register with, RootIndex index, Label *if_equal)
void LoadStackLimit(Register destination, StackLimitKind kind)
void ConvertIntToDouble(Register src, DoubleRegister dst)
void Call(Register target, Condition cond=al)
void LoadF32AsF64(DoubleRegister dst, const MemOperand &opnd)
void LoadU8(Register dst, const MemOperand &mem, Register scratch=no_reg)
void CompareInstanceType(Register map, Register type_reg, InstanceType type)
void mov(Register rd, Register rj)
void CompareTagged(Register src1, Register src2, CRegister cr=cr0)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal)
void StoreF64(DoubleRegister src, const MemOperand &mem, Register scratch=no_reg)
void CompareRoot(Register obj, RootIndex index)
void CompareObjectType(Register heap_object, Register map, Register type_reg, InstanceType type)
void Move(Register dst, Tagged< Smi > smi)
void JumpIfSmi(Register value, Label *smi_label)
void ShiftRightU64(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void LoadU16(Register dst, const MemOperand &mem, Register scratch=no_reg)
void LoadS32(Register dst, const MemOperand &mem, Register scratch=no_reg)
void CmpU32(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void SmiTag(Register reg, SBit s=LeaveCC)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void AddS64(Register dst, Register src, const Operand &value, Register scratch=r0, OEBit s=LeaveOE, RCBit r=LeaveRC)
void SubS64(Register dst, Register src, const Operand &value, Register scratch=r0, OEBit s=LeaveOE, RCBit r=LeaveRC)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void StoreF32(DoubleRegister src, const MemOperand &mem, Register scratch=no_reg)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
void ShiftLeftU32(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void CmpU64(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void LoadF64(DoubleRegister dst, const MemOperand &mem, Register scratch=no_reg)
void AddS32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=LeaveRC)
void StoreU32(Register src, const MemOperand &mem, Register scratch)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void LoadU32(Register dst, const MemOperand &mem, Register scratch=no_reg)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand)
void ConvertUnsignedIntToDouble(Register src, DoubleRegister dst)
void LoadS8(Register dst, const MemOperand &mem, Register scratch=no_reg)
void CompareObjectTypeRange(Register heap_object, Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void ConvertDoubleToInt32(const Register dst, const DoubleRegister double_input, FPRoundingMode rounding_mode=kRoundToZero)
void SubS32(Register dst, Register src, const Operand &value, Register scratch=r0, RCBit r=LeaveRC)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void ShiftLeftU64(Register dst, Register src, const Operand &value, RCBit r=LeaveRC)
void CmpS64(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void CmpTagged(const Register &r1, const Register &r2)
void Or(Register dst, Register src)
void CmpS32(Register src1, const Operand &src2, Register scratch, CRegister cr=cr0)
void StoreU64(Register src, const MemOperand &mem, Register scratch=no_reg)
void StoreU16(Register src, const MemOperand &mem, Register scratch)
void StoreU8(Register src, const MemOperand &mem, Register scratch)
void AddU64(Register dst, const Operand &imm)
void CompareInstanceTypeRange(Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void LoadU64(Register dst, const MemOperand &mem, Register scratch=no_reg)
void CmpF64(DoubleRegister src1, DoubleRegister src2)
void CmpSmiLiteral(Register src1, Tagged< Smi > smi, Register scratch, CRegister cr=cr0)
void LoadMap(Register destination, Register object)
void ConvertInt64ToDouble(Register src, DoubleRegister double_dst)
void LoadTaggedFieldWithoutDecompressing(const Register &destination, const MemOperand &field_operand)
void LoadS16(Register dst, const MemOperand &mem, Register scratch=no_reg)
static Operand EmbeddedNumber(double number)
constexpr bool has(RegisterT reg) const
static constexpr Register no_reg()
@ COMPRESSED_EMBEDDED_OBJECT
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int kFixedFrameSizeFromFp
void SetAvailableDoubleRegList(DoubleRegList available)
DoubleRegister AcquireDouble()
void SetAvailable(RegList available)
void Include(const Register ®1, const Register ®2=no_reg)
DoubleRegList AvailableDoubleRegList()
Register GetRegister() const
static LocationOperand * cast(InstructionOperand *op)
TemporaryRegisterScope * prev_scope_
DoubleRegList available_double_
SavedData CopyForDeferBase()
Register AcquireScratch()
DoubleRegister AcquireScratchDouble()
UseScratchRegisterScope scratch_scope_
void ResetToDefaultImpl()
TemporaryRegisterScope(MaglevAssembler *masm)
void IncludeScratch(Register reg)
TemporaryRegisterScope(MaglevAssembler *masm, const SavedData &saved_data)
void CompareMapWithRoot(Register object, RootIndex index, Register scratch)
void LoadFixedArrayElement(Register result, Register array, Register index)
void CompareInstanceType(Register map, InstanceType instance_type)
void SmiAddConstant(Register dst, Register src, int value, Label *fail, Label::Distance distance=Label::kFar)
void JumpIfByte(Condition cc, Register value, int32_t byte, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotNan(DoubleRegister value, Label *target, Label::Distance distance=Label::kFar)
Condition IsNotCallableNorUndetactable(Register map, Register scratch)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal, Label::Distance distance=Label::kFar)
void ToUint8Clamped(Register result, DoubleRegister value, Label *min, Label *max, Label *done)
Condition IsCallableAndNotUndetectable(Register map, Register scratch)
void LoadFloat32(DoubleRegister dst, MemOperand src)
void JumpIfNotObjectType(Register heap_object, InstanceType type, Label *target, Label::Distance distance=Label::kFar)
void ReverseByteOrderAndStoreUnalignedFloat64(Register base, Register index, DoubleRegister src)
void AssertStackSizeCorrect()
void IncrementInt32(Register reg)
void IntPtrToDouble(DoubleRegister result, Register src)
void Branch(Condition condition, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void AndInt32(Register reg, int mask)
void AddInt32(Register reg, int amount)
MemOperand GetStackSlot(const compiler::AllocatedOperand &operand)
void StoreField(MemOperand operand, Register value, int element_size)
void LoadSignedField(Register result, MemOperand operand, int element_size)
void DecrementInt32(Register reg)
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
MaglevAssembler(Isolate *isolate, Zone *zone, MaglevCodeGenState *code_gen_state)
void TestUint8AndJumpIfAllClear(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void AssertObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, AbortReason reason)
void CompareInt32AndAssert(Register r1, Register r2, Condition cond, AbortReason reason)
void CompareDoubleAndJumpIfZeroOrNaN(DoubleRegister reg, Label *target, Label::Distance distance=Label::kFar)
void LoadFixedDoubleArrayElement(DoubleRegister result, Register array, Register index)
void LoadUnsignedField(Register result, MemOperand operand, int element_size)
void JumpIfObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *target, Label::Distance distance=Label::kFar)
void CheckInt32IsSmi(Register obj, Label *fail, Register scratch=Register::no_reg())
void PushReverse(T... vals)
void ReverseByteOrder(Register value, int element_size)
Condition FunctionEntryStackCheck(int stack_check_offset)
void EmitEagerDeoptStress(Label *label)
void LoadHeapNumberValue(DoubleRegister result, Register heap_number)
void Jump(Label *target, Label::Distance distance=Label::kFar)
MemOperand DataViewElementOperand(Register data_pointer, Register index)
void StoreTaggedSignedField(Register object, int offset, Register value)
void CompareSmiAndJumpIf(Register r1, Tagged< Smi > value, Condition cond, Label *target, Label::Distance distance=Label::kFar)
TemporaryRegisterScope * scratch_register_scope() const
void TestUint8AndJumpIfAnySet(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void MoveRepr(MachineRepresentation repr, Dest dst, Source src)
void StoreFixedDoubleArrayElement(Register array, Register index, DoubleRegister value)
void SmiTagInt32AndSetFlags(Register dst, Register src)
void BindBlock(BasicBlock *block)
void LoadInstanceType(Register instance_type, Register heap_object)
void StoreHeapInt32Value(Register value, Register heap_number)
void LoadInt32(Register dst, MemOperand src)
void StoreFixedArrayElementNoWriteBarrier(Register array, Register index, Register value)
void LoadAddress(Register dst, MemOperand location)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Label *target, Label::Distance distance=Label::kFar)
void LoadBoundedSizeFromObject(Register result, Register object, int offset)
void SetSlotAddressForTaggedField(Register slot_reg, Register object, int offset)
void CompareFloat64AndBranch(DoubleRegister src1, DoubleRegister src2, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block, BasicBlock *nan_failed)
void CompareTaggedAndJumpIf(Register reg, Tagged< Smi > smi, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void StoreInt32Field(Register object, int offset, int32_t value)
Register GetFramePointer()
void BranchOnObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadExternalPointerField(Register result, MemOperand operand)
void StoreInt32(MemOperand dst, Register src)
void BuildTypedArrayDataPointer(Register data_pointer, Register object)
void JumpIfObjectTypeNotInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *target, Label::Distance distance=Label::kFar)
void ShiftLeft(Register reg, int amount)
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal, Label::Distance distance=Label::kFar)
void EmitEnterExitFrame(int extra_slots, StackFrame::Type frame_type, Register c_function, Register scratch)
void BranchOnObjectType(Register heap_object, InstanceType type, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadHeapInt32Value(Register result, Register heap_number)
void CompareInstanceTypeRange(Register map, InstanceType lower_limit, InstanceType higher_limit)
void Move(StackSlot dst, Register src)
void SignExtend32To64Bits(Register dst, Register src)
void LoadFixedArrayElementWithoutDecompressing(Register result, Register array, Register index)
void LoadUnalignedFloat64AndReverseByteOrder(DoubleRegister dst, Register base, Register index)
void IncrementAddress(Register reg, int32_t delta)
Label * MakeDeferredCode(Function &&deferred_code_gen, Args &&... args)
void JumpIfNan(DoubleRegister value, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
MemOperand TypedArrayElementOperand(Register data_pointer, Register index, int element_size)
MaglevCompilationInfo * compilation_info() const
void TestInt32AndJumpIfAllClear(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
int GetFramePointerOffsetForStackSlot(const compiler::AllocatedOperand &operand)
void CompareInt32AndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
MaglevCodeGenState * code_gen_state() const
void SetSlotAddressForFixedArrayElement(Register slot_reg, Register object, Register index)
void LoadTaggedFieldByIndex(Register result, Register object, Register index, int scale, int offset)
void CompareIntPtrAndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void CompareInt32AndBranch(Register r1, int32_t value, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void CompareSmiAndAssert(Register r1, Tagged< Smi > value, Condition cond, AbortReason reason)
void JumpIf(Condition cond, Label *target, Label::Distance distance=Label::kFar)
void LoadFloat64(DoubleRegister dst, MemOperand src)
void LoadUnalignedFloat64(DoubleRegister dst, Register base, Register index)
void LoadTaggedFieldWithoutDecompressing(Register result, Register object, int offset)
Condition IsRootConstant(Input input, RootIndex root_index)
void StoreFloat32(MemOperand dst, DoubleRegister src)
void EmitEagerDeoptIf(Condition cond, DeoptimizeReason reason, NodeT *node)
void StoreFloat64(MemOperand dst, DoubleRegister src)
void NegateInt32(Register val)
bool IsDeoptLabel(Label *label)
void EmitEagerDeoptIfNotEqual(DeoptimizeReason reason, NodeT *node)
void StoreTaggedFieldNoWriteBarrier(Register object, int offset, Register value)
MemOperand ToMemOperand(const compiler::InstructionOperand &operand)
void MoveHeapNumber(Register dst, double value)
void LoadTaggedField(Register result, MemOperand operand)
void LoadByte(Register dst, MemOperand src)
void MoveTagged(Register dst, Handle< HeapObject > obj)
void JumpIfNotHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void DeoptIfBufferDetached(Register array, Register scratch, NodeT *node)
void StoreUnalignedFloat64(Register base, Register index, DoubleRegister src)
void TestInt32AndJumpIfAnySet(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
void JumpIfHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void Uint32ToDouble(DoubleRegister result, Register src)
void CompareFloat64AndJumpIf(DoubleRegister src1, DoubleRegister src2, Condition cond, Label *target, Label *nan_failed, Label::Distance distance=Label::kFar)
void CompareIntPtrAndBranch(Register r1, int32_t value, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void Int32ToDouble(DoubleRegister result, Register src)
void JumpIfObjectType(Register heap_object, InstanceType type, Label *target, Label::Distance distance=Label::kFar)
void AssertObjectType(Register heap_object, InstanceType type, AbortReason reason)
void SmiSubConstant(Register dst, Register src, int value, Label *fail, Label::Distance distance=Label::kFar)
void OrInt32(Register reg, int mask)
void BindJumpTarget(Label *label)
void JumpToDeopt(Label *target)
MemOperand StackSlotOperand(StackSlot slot)
void CompareByteAndJumpIf(MemOperand left, int8_t right, Condition cond, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0)
void MaybeEmitPlaceHolderForDeopt()
static int TemporaryCount(size_t map_count)
MapCompare(MaglevAssembler *masm, Register object, size_t map_count)
void Generate(Handle< Map > map, Condition cond, Label *if_true, Label::Distance distance=Label::kFar)
static ZoneLabelRef UnsafeFromLabelPointer(Label *label)
#define COMPRESS_POINTERS_BOOL
base::Vector< const DirectHandle< Object > > args
ZoneVector< RpoNumber > & result
base::SmallVector< int32_t, 1 > stack_slots
MaglevAssembler *const masm_
FloatWithBits< 64 > Float64
void PushIterator(MaglevAssembler *masm, base::iterator_range< T > range, Args... args)
void PushIteratorReverse(MaglevAssembler *masm, base::iterator_range< T > range, Args... args)
void PushInput(MaglevAssembler *masm, const Input &input)
constexpr Condition ConditionFor(Operation operation)
int ShiftFromScale(int n)
Register ToRegister(const compiler::InstructionOperand &operand)
constexpr Condition ConditionForFloat64(Operation operation)
NodeTMixin< Node, Derived > NodeT
uint32_t WasmInterpreterRuntime int64_t r0
constexpr VFPRoundingMode kRoundToNearest
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
@ kUnsignedGreaterThanEqual
DwVfpRegister DoubleRegister
RegListBase< Register > RegList
constexpr uint64_t kHoleNanInt64
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
constexpr int kStackLimitSlackForDeoptimizationInBytes
constexpr int kTaggedSizeLog2
constexpr bool SmiValuesAre31Bits()
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr uint32_t kHoleNanUpper32
constexpr int kDoubleSizeLog2
constexpr int kIeeeDoubleMantissaWordOffset
Condition to_condition(Condition cond)
bool is_signed(Condition cond)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
RegList available_scratch_
VfpRegList available_fp_scratch_
static void Push(MaglevAssembler *masm, Arg arg, Args... args)
static void PushReverse(MaglevAssembler *masm, Arg arg, Args... args)
static void PushReverse(MaglevAssembler *masm)
static void Push(MaglevAssembler *masm)
#define OFFSET_OF_DATA_START(Type)