5#ifndef V8_MAGLEV_MAGLEV_ASSEMBLER_H_ 
    6#define V8_MAGLEV_MAGLEV_ASSEMBLER_H_ 
   26    return ExternalReference::new_space_allocation_top_address(isolate);
 
   29  return ExternalReference::old_space_allocation_top_address(isolate);
 
 
   35    return ExternalReference::new_space_allocation_limit_address(isolate);
 
   38  return ExternalReference::old_space_allocation_limit_address(isolate);
 
 
   43    return Builtin::kAllocateInYoungGeneration;
 
   46  return Builtin::kAllocateInOldGeneration;
 
 
  103        code_gen_state_(code_gen_state) {}
 
 
  106#if defined(V8_TARGET_ARCH_ARM) 
  108#elif defined(V8_TARGET_ARCH_RISCV64) 
  112    return kAllocatableGeneralRegisters;
 
 
  116#if defined(V8_TARGET_ARCH_RISCV64) 
  121    return kAllocatableDoubleRegisters;
 
 
  132    int index = operand.
index();
 
  134      index += code_gen_state()->tagged_slots();
 
  136    return GetFramePointerOffsetForStackSlot(index);
 
 
  139  template <
typename Dest, 
typename Source>
 
  176                     bool fallthrough_when_false);
 
  197  inline void LoadFixedArrayElementWithoutDecompressing(
Register result,
 
  209  template <
typename BitField>
 
  211    static constexpr int load_size = 
sizeof(
typename BitField::BaseType);
 
  212    LoadUnsignedField(
result, operand, load_size);
 
  213    DecodeField<BitField>(
result);
 
 
  220  inline void SetSlotAddressForTaggedField(
Register slot_reg, 
Register object,
 
  222  inline void SetSlotAddressForFixedArrayElement(
Register slot_reg,
 
  226  template <StoreMode store_mode>
 
  227  using OffsetTypeFor = std::conditional_t<store_mode == kField, int, Register>;
 
  229  template <StoreMode store_mode>
 
  230  void CheckAndEmitDeferredWriteBarrier(
Register object,
 
  262  inline void StoreInt32Field(
Register object, 
int offset, int32_t value);
 
  267#ifdef V8_ENABLE_SANDBOX 
  272  inline void StoreTrustedPointerFieldNoWriteBarrier(
Register object,
 
  278  inline void ReverseByteOrder(
Register value, 
int element_size);
 
  280  inline void BuildTypedArrayDataPointer(
Register data_pointer,
 
  292  void StringCharCodeOrCodePointAt(
 
  296      Label* result_fits_one_byte);
 
  301                          CharCodeMaskMode mask_mode);
 
  309                  bool fallthrough_when_true, 
Label* if_false,
 
  316  inline void SmiTagInt32AndJumpIfSuccess(
 
  319  inline void SmiTagInt32AndJumpIfSuccess(
 
  330  inline void SmiTagUint32AndJumpIfSuccess(
 
  333  inline void SmiTagUint32AndJumpIfSuccess(
 
  335  inline void SmiTagIntPtrAndJumpIfSuccess(
 
  343                              Register scratch = Register::no_reg());
 
  359  inline void MoveHeapNumber(
Register dst, 
double value);
 
  361#ifdef V8_TARGET_ARCH_RISCV64 
  371  void Cmp(
const Register& rn, 
int imm);
 
  384  inline void MaybeEmitPlaceHolderForDeopt();
 
  386  inline void DefineExceptionHandlerPoint(
NodeBase* node);
 
  387  inline void DefineExceptionHandlerAndLazyDeoptPoint(
NodeBase* node);
 
  389  template <
typename Function, 
typename... Args>
 
  391  template <
typename Function, 
typename... Args>
 
  403  template <
typename NodeT>
 
  406  inline void EmitEagerDeoptStress(
Label* 
label);
 
  407  template <
typename NodeT>
 
  409  template <
typename NodeT>
 
  412  template <
typename NodeT>
 
  414  template <
typename NodeT>
 
  415  inline void EmitEagerDeoptIfSmi(
NodeT* node, 
Register object,
 
  417  template <
typename NodeT>
 
  418  inline void EmitEagerDeoptIfNotSmi(
NodeT* node, 
Register object,
 
  431  inline void IncrementAddress(
Register reg, int32_t delta);
 
  436  inline void EmitEnterExitFrame(
int extra_slots, 
StackFrame::Type frame_type,
 
  451  inline void Move(
Register dst, int32_t 
i);
 
  452  inline void Move(
Register dst, uint32_t 
i);
 
  474  inline void LoadUnalignedFloat64AndReverseByteOrder(
DoubleRegister dst,
 
  479  inline void ReverseByteOrderAndStoreUnalignedFloat64(
Register base,
 
  484  inline void NegateInt32(
Register val);
 
  489  template <
typename NodeT>
 
  507                                 bool fallthrough_when_true, 
Label* if_false,
 
  509                                 bool fallthrough_when_false);
 
  511  inline void JumpIfObjectTypeInRange(
Register heap_object,
 
  515  inline void JumpIfObjectTypeNotInRange(
 
  518  inline void AssertObjectTypeInRange(
Register heap_object,
 
  522  inline void BranchOnObjectTypeInRange(
 
  526      bool fallthrough_when_false);
 
  528#if V8_STATIC_ROOTS_BOOL 
  532  inline void JumpIfObjectNotInRange(
Register heap_object, 
Tagged_t lower_limit,
 
  539  inline void JumpIfJSAnyIsNotPrimitive(
Register heap_object, 
Label* target,
 
  544                              bool jump_if_true = 
true);
 
  545  inline void JumpIfString(
Register heap_object, 
Label* target,
 
  547  inline void JumpIfNotString(
Register heap_object, 
Label* target,
 
  549  inline void CheckJSAnyIsStringAndBranch(
Register heap_object, 
Label* if_true,
 
  551                                          bool fallthrough_when_true,
 
  554                                          bool fallthrough_when_false);
 
  571  template <
typename NodeT>
 
  572  inline void CompareInstanceTypeRangeAndEagerDeoptIf(
 
  577  template <
typename NodeT>
 
  581  template <
typename NodeT>
 
  586  template <
typename NodeT>
 
  591  template <
typename NodeT>
 
  613  inline void PrepareCallCFunction(
int num_reg_arguments,
 
  614                                   int num_double_registers = 0);
 
  618  template <
Builtin kBuiltin, 
typename... Args>
 
  624  inline void JumpToDeopt(
Label* target);
 
  673                                    bool fallthrough_when_true, 
Label* if_false,
 
  675                                    bool fallthrough_when_false);
 
  679                                    bool fallthrough_when_true, 
Label* if_false,
 
  681                                    bool fallthrough_when_false);
 
  688                                     bool fallthrough_when_true,
 
  691                                     bool fallthrough_when_false);
 
  701  inline void CompareByteAndJumpIf(
MemOperand left, int8_t right,
 
  706  inline void CompareDoubleAndJumpIfZeroOrNaN(
 
  709  inline void CompareDoubleAndJumpIfZeroOrNaN(
 
  715  inline void TestInt32AndJumpIfAnySet(
MemOperand operand, int32_t 
mask,
 
  718  inline void TestUint8AndJumpIfAnySet(
MemOperand operand, uint8_t 
mask,
 
  722  inline void TestInt32AndJumpIfAllClear(
 
  725  inline void TestInt32AndJumpIfAllClear(
 
  728  inline void TestUint8AndJumpIfAllClear(
 
  751  using MacroAssembler::Pop;
 
  753  template <
typename... T>
 
  754  inline void Push(T... vals);
 
  755  template <
typename... T>
 
  756  inline void PushReverse(T... vals);
 
  758  void OSRPrologue(
Graph* graph);
 
  759  void Prologue(
Graph* graph);
 
  761  inline void FinishCode();
 
  763  inline void AssertStackSizeCorrect();
 
  780  void MaybeEmitDeoptBuiltinsCall(
size_t eager_deopt_count,
 
  781                                  Label* eager_deopt_entry,
 
  782                                  size_t lazy_deopt_count,
 
  783                                  Label* lazy_deopt_entry);
 
  785  void GenerateCheckConstTrackingLetCellFooter(
Register context, 
Register data,
 
  786                                               int index, 
Label* done);
 
  791  void TryMigrateInstanceAndMarkMapAsMigrationTarget(
 
  795    return code_gen_state()->broker()->target_native_context();
 
 
  800    return code_gen_state()->safepoint_table_builder();
 
 
  803    return code_gen_state()->compilation_info();
 
 
  807    return scratch_register_scope_;
 
 
  811  bool allow_allocate()
 const { 
return allow_allocate_; }
 
  812  void set_allow_allocate(
bool value) { allow_allocate_ = 
value; }
 
  814  bool allow_call()
 const { 
return allow_call_; }
 
  815  void set_allow_call(
bool value) { allow_call_ = 
value; }
 
  817  bool allow_deferred_call()
 const { 
return allow_deferred_call_; }
 
  818  void set_allow_deferred_call(
bool value) { allow_deferred_call_ = 
value; }
 
  822  template <
typename Derived>
 
  823  class TemporaryRegisterScopeBase;
 
  826    return StandardFrameConstants::kExpressionsOffset -
 
 
  835  bool allow_allocate_ = 
false;
 
  836  bool allow_call_ = 
false;
 
  837  bool allow_deferred_call_ = 
false;
 
 
  842template <
typename Derived>
 
  880    static_cast<Derived*
>(
this)->ResetToDefaultImpl();
 
 
 
  935    int pushed_reg_index = 0;
 
  938        safepoint.DefineTaggedRegister(pushed_reg_index);
 
  942#ifdef V8_TARGET_ARCH_ARM64 
  943    pushed_reg_index = 
RoundUp<2>(pushed_reg_index);
 
  947#ifdef V8_TARGET_ARCH_ARM64 
  948    num_double_slots = 
RoundUp<2>(num_double_slots);
 
  950    safepoint.SetNumExtraSpillSlots(pushed_reg_index + num_double_slots);
 
 
 
  969    if (deopt->deopt_entry_label() == 
label) {
 
 
  976template <
typename NodeT>
 
  979  static_assert(NodeT::kProperties.can_eager_deopt());
 
  981  if (deopt_info->
reason() != DeoptimizeReason::kUnknown) {
 
 
  991template <
typename NodeT>
 
  998template <
typename NodeT>
 
 1006template <
typename NodeT>
 
 1013template <
typename NodeT>
 
 1022template <
typename T>
 
 1048    return count_ != that.count_;
 
 
 1051    return count_ == that.count_;
 
 
 1054    return count_ - it.count_;
 
 
 
 1062template <
typename T>
 
 1072template <
typename T>
 
 1082  case AssertCondition::k##Name: \ 
 
 1090  switch (operation) {
 
 1091    case Operation::kEqual:
 
 1092    case Operation::kStrictEqual:
 
 1094    case Operation::kLessThan:
 
 1096    case Operation::kLessThanOrEqual:
 
 1098    case Operation::kGreaterThan:
 
 1100    case Operation::kGreaterThanOrEqual:
 
 
 1108  switch (operation) {
 
 1109    case Operation::kEqual:
 
 1110    case Operation::kStrictEqual:
 
 1112    case Operation::kLessThan:
 
 1114    case Operation::kLessThanOrEqual:
 
 1116    case Operation::kGreaterThan:
 
 1118    case Operation::kGreaterThanOrEqual:
 
 
#define Assert(condition)
 
interpreter::OperandScale scale
 
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
 
V8_INLINE bool is_unused() const
 
void PushAll(RegList registers)
 
void PopAll(RegList registers)
 
Safepoint DefineSafepoint(Assembler *assembler)
 
constexpr bool is_empty() const
 
constexpr bool has(RegisterT reg) const
 
constexpr unsigned Count() const
 
constexpr RegisterT PopFirst()
 
static constexpr Register no_reg()
 
MachineRepresentation representation() const
 
Label * deopt_entry_label()
 
DeoptimizeReason reason() const
 
void set_reason(DeoptimizeReason reason)
 
void Include(const RegList list)
 
DoubleRegister AcquireDouble()
 
DoubleRegList AvailableDouble()
 
void SetAvailableDouble(DoubleRegList list)
 
TemporaryRegisterScopeBase(MaglevAssembler *masm, const SavedData &saved_data)
 
DoubleRegList available_double_
 
TemporaryRegisterScopeBase(MaglevAssembler *masm)
 
void IncludeDouble(const DoubleRegList list)
 
SavedData CopyForDeferBase()
 
void SetAvailable(RegList list)
 
~TemporaryRegisterScopeBase()
 
compiler::NativeContextRef native_context() const
 
Label * GetDeoptLabel(NodeT *node, DeoptimizeReason reason)
 
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
 
MaglevAssembler(Isolate *isolate, Zone *zone, MaglevCodeGenState *code_gen_state)
 
TemporaryRegisterScope * scratch_register_scope_
 
TemporaryRegisterScope * scratch_register_scope() const
 
MaglevSafepointTableBuilder * safepoint_table_builder() const
 
void CompareInstanceTypeRange(Register map, InstanceType lower_limit, InstanceType higher_limit)
 
void JumpIfNotSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
 
void EmitEagerDeoptIfSmi(NodeT *node, Register object, DeoptimizeReason reason)
 
static constexpr DoubleRegList GetAllocatableDoubleRegisters()
 
MaglevCompilationInfo * compilation_info() const
 
int GetFramePointerOffsetForStackSlot(const compiler::AllocatedOperand &operand)
 
MaglevCodeGenState * code_gen_state() const
 
void JumpIf(Condition cond, Label *target, Label::Distance distance=Label::kFar)
 
void LoadBitField(Register result, MemOperand operand)
 
void EmitEagerDeoptIf(Condition cond, DeoptimizeReason reason, NodeT *node)
 
void EmitEagerDeoptIfNotSmi(NodeT *node, Register object, DeoptimizeReason reason)
 
constexpr int GetFramePointerOffsetForStackSlot(int index)
 
bool IsDeoptLabel(Label *label)
 
static constexpr RegList GetAllocatableRegisters()
 
void CheckAndEmitDeferredIndirectPointerWriteBarrier(Register object, int offset, Register value, RegisterSnapshot register_snapshot, IndirectPointerTag tag)
 
void JumpToDeopt(Label *target)
 
void EmitEagerDeopt(NodeT *node, DeoptimizeReason reason)
 
MaglevCodeGenState *const code_gen_state_
 
std::conditional_t< store_mode==kField, int, Register > OffsetTypeFor
 
const std::vector< EagerDeoptInfo * > & eager_deopts() const
 
void PushEagerDeopt(EagerDeoptInfo *info)
 
static int TemporaryCount(size_t map_count)
 
Register GetObject() const
 
MapCompare(MaglevAssembler *masm, Register object, size_t map_count)
 
void Generate(Handle< Map > map, Condition cond, Label *if_true, Label::Distance distance=Label::kFar)
 
reference operator*() const
 
std::random_access_iterator_tag iterator_category
 
difference_type operator-(const RepeatIterator< T > &it) const
 
RepeatIterator & operator+=(difference_type diff)
 
RepeatIterator & operator--()
 
bool operator!=(const RepeatIterator< T > &that) const
 
RepeatIterator(T val, int count)
 
RepeatIterator & operator++()
 
bool operator==(const RepeatIterator< T > &that) const
 
SaveRegisterStateForCall(MaglevAssembler *masm, RegisterSnapshot snapshot)
 
void DefineSafepointWithLazyDeopt(LazyDeoptInfo *lazy_deopt_info)
 
RegisterSnapshot snapshot_
 
~SaveRegisterStateForCall()
 
ZoneLabelRef(Label *label)
 
static ZoneLabelRef UnsafeFromLabelPointer(Label *label)
 
#define NOOP_UNLESS_DEBUG_CODE
 
base::Vector< const DirectHandle< Object > > args
 
ZoneVector< RpoNumber > & result
 
FunctionLiteral * literal
 
#define ASSERT_CONDITION(V)
 
auto make_iterator_range(ForwardIterator begin, ForwardIterator end)
 
Builtin AllocateBuiltin(AllocationType alloc_type)
 
constexpr Condition ConditionFor(Operation operation)
 
ExternalReference SpaceAllocationTopAddress(Isolate *isolate, AllocationType alloc_type)
 
Condition ToCondition(AssertCondition cond)
 
auto RepeatValue(T val, int count)
 
constexpr Condition UnsignedConditionFor(Operation operation)
 
ExternalReference SpaceAllocationLimitAddress(Isolate *isolate, AllocationType alloc_type)
 
@ kUnsignedGreaterThanEqual
 
static constexpr RegList kAllocatableGeneralRegisters
 
constexpr int kSystemPointerSize
 
constexpr Register kMaglevExtraScratchRegister
 
constexpr Register kMaglevFlagsRegister
 
constexpr int kDoubleSize
 
static constexpr DoubleRegList kAllocatableDoubleRegisters
 
#define DCHECK(condition)
 
#define DCHECK_EQ(v1, v2)
 
constexpr T RoundUp(T x, intptr_t m)
 
#define V8_EXPORT_PRIVATE
 
DoubleRegList available_double_
 
RegList live_tagged_registers
 
DoubleRegList live_double_registers
 
std::unique_ptr< ValueMirror > value