5#ifndef V8_MAGLEV_MAGLEV_GRAPH_H_ 
    6#define V8_MAGLEV_MAGLEV_GRAPH_H_ 
   70    if (block->has_id()) {
 
   73             v8_flags.turbolev_non_eager_inlining);
 
 
   82  template <
typename Function>
 
   87      if (is_dead(*current)) {
 
   88        (*current)->mark_dead();
 
   90        if (current != last_non_dead) {
 
   98    if (current != last_non_dead) {
 
 
  210    if (cur.has_value()) {
 
  211      cur = (*cur).OuterScopeInfo(
broker);
 
  212      while (!cur->HasContext() && cur->HasOuterScopeInfo()) {
 
  213        cur = cur->OuterScopeInfo(
broker);
 
  215      if (cur->HasContext()) {
 
 
  230    compiler::OptionalScopeInfoRef res;
 
  231    if (
auto context_const = context->TryCast<
Constant>()) {
 
  232      res = context_const->object().AsContext().scope_info(
broker);
 
  233      DCHECK(res->HasContext());
 
  236          load->input(0).node(), load->offset(), 
broker);
 
  237      if (cur.has_value()) res = cur;
 
  238    } 
else if (
auto load_script =
 
  241          load_script->input(0).node(), load_script->offset(), 
broker);
 
  242      if (cur.has_value()) res = cur;
 
 
  261                         compiler::OptionalScopeInfoRef scope_info) {
 
 
 
static V8_INLINE constexpr int OffsetOfElementAt(int index)
 
reverse_iterator rbegin() V8_NOEXCEPT
 
void reserve(size_t new_cap)
 
std::reverse_iterator< const T * > const_reverse_iterator
 
void resize(size_t new_size)
 
reverse_iterator rend() V8_NOEXCEPT
 
void push_back(const T &value)
 
ZoneMap< InlinedAllocation *, SmallAllocationVector > allocations_escape_map_
 
ZoneMap< int, TaggedIndexConstant * > & tagged_index()
 
void set_max_call_stack_args(uint32_t stack_slots)
 
ZoneUnorderedMap< ValueNode *, compiler::OptionalScopeInfoRef > scope_infos_
 
void record_scope_info(ValueNode *context, compiler::OptionalScopeInfoRef scope_info)
 
bool has_recursive_calls() const
 
ZoneMap< Address, ExternalConstant * > & external_references()
 
ZoneMap< uint32_t, Uint32Constant * > & uint32()
 
BasicBlock * operator[](int i)
 
ZoneMap< InlinedAllocation *, SmallAllocationVector > & allocations_elide_map()
 
ZoneVector< Node * > & node_buffer()
 
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > inlined_functions_
 
Graph(Zone *zone, bool is_osr)
 
ZoneVector< BasicBlock * > & blocks()
 
uint32_t untagged_stack_slots() const
 
uint32_t max_deopted_stack_size() const
 
BlockConstIterator end() const
 
ZoneVector< InitialValue * > parameters_
 
compiler::OptionalScopeInfoRef TryGetScopeInfoForContextLoad(ValueNode *context, int offset, compiler::JSHeapBroker *broker)
 
BasicBlock::Id max_block_id() const
 
RegList & register_inputs()
 
bool has_recursive_calls_
 
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > & constants()
 
ZoneVector< MaglevCallSiteInfo * > & inlineable_calls()
 
BasicBlock * last_block() const
 
BlockConstReverseIterator rend() const
 
BlockConstReverseIterator rbegin() const
 
ZoneVector< InitialValue * > & osr_values()
 
void set_untagged_stack_slots(uint32_t stack_slots)
 
bool has_resumable_generator() const
 
ZoneMap< int32_t, Int32Constant * > & int32()
 
ZoneMap< InlinedAllocation *, SmallAllocationVector > allocations_elide_map_
 
ZoneMap< uint32_t, Uint32Constant * > uint32_
 
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > constants_
 
compiler::OptionalScopeInfoRef TryGetScopeInfo(ValueNode *context, compiler::JSHeapBroker *broker)
 
uint32_t max_call_stack_args_
 
void set_has_resumable_generator()
 
ZoneVector< BasicBlock * > blocks_
 
void IterateGraphAndSweepDeadBlocks(Function &&is_dead)
 
void set_tagged_stack_slots(uint32_t stack_slots)
 
uint32_t max_deopted_stack_size_
 
void add_peeled_bytecode_size(int size)
 
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > trusted_constants_
 
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > & inlined_functions()
 
ZoneMap< uint64_t, Float64Constant * > float_
 
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > & trusted_constants()
 
ZoneVector< InitialValue * > osr_values_
 
void set_has_recursive_calls(bool value)
 
void Add(BasicBlock *block)
 
ZoneMap< InlinedAllocation *, SmallAllocationVector > & allocations_escape_map()
 
uint32_t untagged_stack_slots_
 
BlockConstIterator begin() const
 
uint32_t max_call_stack_args() const
 
void add_inlined_bytecode_size(int size)
 
int total_peeled_bytecode_size_
 
static Graph * New(Zone *zone, bool is_osr)
 
uint32_t tagged_stack_slots_
 
int total_inlined_bytecode_size() const
 
ZoneMap< int32_t, Int32Constant * > int32_
 
const BasicBlock * operator[](int i) const
 
ZoneMap< Address, ExternalConstant * > external_references_
 
int total_peeled_bytecode_size() const
 
ZoneVector< InitialValue * > & parameters()
 
ZoneVector< Node * > node_buffer_
 
ZoneMap< int, TaggedIndexConstant * > tagged_index_
 
ZoneMap< RootIndex, RootConstant * > root_
 
uint32_t min_maglev_stackslots_for_unoptimized_frame_size()
 
int total_inlined_bytecode_size_
 
ZoneMap< uint64_t, Float64Constant * > & float64()
 
BasicBlock::Id max_block_id_
 
ZoneMap< int, SmiConstant * > & smi()
 
ZoneMap< int, SmiConstant * > smi_
 
void set_max_deopted_stack_size(uint32_t size)
 
void set_blocks(ZoneVector< BasicBlock * > blocks)
 
bool has_resumable_generator_
 
ZoneMap< RootIndex, RootConstant * > & root()
 
ZoneVector< MaglevCallSiteInfo * > inlineable_calls_
 
uint32_t tagged_stack_slots() const
 
uint32_t stack_slot() const
 
base::SmallVector< int32_t, 1 > stack_slots
 
ZoneVector< BasicBlock * >::const_reverse_iterator BlockConstReverseIterator
 
V8_EXPORT_PRIVATE FlagValues v8_flags
 
constexpr uint32_t kMaxUInt32
 
#define DCHECK_NE(v1, v2)
 
#define CHECK_EQ(lhs, rhs)
 
#define DCHECK(condition)
 
#define DCHECK_EQ(v1, v2)