#include <maglev-graph.h>
|
| Graph (Zone *zone, bool is_osr) |
|
BasicBlock * | operator[] (int i) |
|
const BasicBlock * | operator[] (int i) const |
|
int | num_blocks () const |
|
ZoneVector< BasicBlock * > & | blocks () |
|
BlockConstIterator | begin () const |
|
BlockConstIterator | end () const |
|
BlockConstReverseIterator | rbegin () const |
|
BlockConstReverseIterator | rend () const |
|
BasicBlock * | last_block () const |
|
void | Add (BasicBlock *block) |
|
void | set_blocks (ZoneVector< BasicBlock * > blocks) |
|
template<typename Function > |
void | IterateGraphAndSweepDeadBlocks (Function &&is_dead) |
|
uint32_t | tagged_stack_slots () const |
|
uint32_t | untagged_stack_slots () const |
|
uint32_t | max_call_stack_args () const |
|
uint32_t | max_deopted_stack_size () const |
|
void | set_tagged_stack_slots (uint32_t stack_slots) |
|
void | set_untagged_stack_slots (uint32_t stack_slots) |
|
void | set_max_call_stack_args (uint32_t stack_slots) |
|
void | set_max_deopted_stack_size (uint32_t size) |
|
int | total_inlined_bytecode_size () const |
|
void | add_inlined_bytecode_size (int size) |
|
int | total_peeled_bytecode_size () const |
|
void | add_peeled_bytecode_size (int size) |
|
ZoneMap< RootIndex, RootConstant * > & | root () |
|
ZoneVector< InitialValue * > & | osr_values () |
|
ZoneMap< int, SmiConstant * > & | smi () |
|
ZoneMap< int, TaggedIndexConstant * > & | tagged_index () |
|
ZoneMap< int32_t, Int32Constant * > & | int32 () |
|
ZoneMap< uint32_t, Uint32Constant * > & | uint32 () |
|
ZoneMap< uint64_t, Float64Constant * > & | float64 () |
|
ZoneMap< Address, ExternalConstant * > & | external_references () |
|
ZoneVector< InitialValue * > & | parameters () |
|
ZoneVector< MaglevCallSiteInfo * > & | inlineable_calls () |
|
ZoneVector< Node * > & | node_buffer () |
|
ZoneMap< InlinedAllocation *, SmallAllocationVector > & | allocations_escape_map () |
|
ZoneMap< InlinedAllocation *, SmallAllocationVector > & | allocations_elide_map () |
|
RegList & | register_inputs () |
|
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > & | constants () |
|
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > & | trusted_constants () |
|
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > & | inlined_functions () |
|
bool | has_recursive_calls () const |
|
void | set_has_recursive_calls (bool value) |
|
bool | is_osr () const |
|
uint32_t | min_maglev_stackslots_for_unoptimized_frame_size () |
|
uint32_t | NewObjectId () |
|
void | set_has_resumable_generator () |
|
bool | has_resumable_generator () const |
|
compiler::OptionalScopeInfoRef | TryGetScopeInfoForContextLoad (ValueNode *context, int offset, compiler::JSHeapBroker *broker) |
|
compiler::OptionalScopeInfoRef | TryGetScopeInfo (ValueNode *context, compiler::JSHeapBroker *broker) |
|
void | record_scope_info (ValueNode *context, compiler::OptionalScopeInfoRef scope_info) |
|
Zone * | zone () const |
|
BasicBlock::Id | max_block_id () const |
|
void * | operator new (size_t, Zone *)=delete |
|
void * | operator new (size_t size, void *ptr) |
|
void | operator delete (void *, size_t) |
|
void | operator delete (void *pointer, Zone *zone)=delete |
|
|
uint32_t | tagged_stack_slots_ = kMaxUInt32 |
|
uint32_t | untagged_stack_slots_ = kMaxUInt32 |
|
uint32_t | max_call_stack_args_ = kMaxUInt32 |
|
uint32_t | max_deopted_stack_size_ = kMaxUInt32 |
|
ZoneVector< BasicBlock * > | blocks_ |
|
ZoneMap< RootIndex, RootConstant * > | root_ |
|
ZoneVector< InitialValue * > | osr_values_ |
|
ZoneMap< int, SmiConstant * > | smi_ |
|
ZoneMap< int, TaggedIndexConstant * > | tagged_index_ |
|
ZoneMap< int32_t, Int32Constant * > | int32_ |
|
ZoneMap< uint32_t, Uint32Constant * > | uint32_ |
|
ZoneMap< uint64_t, Float64Constant * > | float_ |
|
ZoneMap< Address, ExternalConstant * > | external_references_ |
|
ZoneVector< InitialValue * > | parameters_ |
|
ZoneVector< MaglevCallSiteInfo * > | inlineable_calls_ |
|
ZoneMap< InlinedAllocation *, SmallAllocationVector > | allocations_escape_map_ |
|
ZoneMap< InlinedAllocation *, SmallAllocationVector > | allocations_elide_map_ |
|
RegList | register_inputs_ |
|
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > | constants_ |
|
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > | trusted_constants_ |
|
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > | inlined_functions_ |
|
ZoneVector< Node * > | node_buffer_ |
|
bool | has_recursive_calls_ = false |
|
int | total_inlined_bytecode_size_ = 0 |
|
int | total_peeled_bytecode_size_ = 0 |
|
bool | is_osr_ = false |
|
uint32_t | object_ids_ = 0 |
|
bool | has_resumable_generator_ = false |
|
ZoneUnorderedMap< ValueNode *, compiler::OptionalScopeInfoRef > | scope_infos_ |
|
BasicBlock::Id | max_block_id_ = 0 |
|
Definition at line 25 of file maglev-graph.h.
◆ SmallAllocationVector
◆ Graph()
v8::internal::maglev::Graph::Graph |
( |
Zone * | zone, |
|
|
bool | is_osr ) |
|
inline |
◆ Add()
void v8::internal::maglev::Graph::Add |
( |
BasicBlock * | block | ) |
|
|
inline |
◆ add_inlined_bytecode_size()
void v8::internal::maglev::Graph::add_inlined_bytecode_size |
( |
int | size | ) |
|
|
inline |
◆ add_peeled_bytecode_size()
void v8::internal::maglev::Graph::add_peeled_bytecode_size |
( |
int | size | ) |
|
|
inline |
◆ allocations_elide_map()
◆ allocations_escape_map()
◆ begin()
◆ blocks()
◆ constants()
◆ end()
◆ external_references()
◆ float64()
◆ has_recursive_calls()
bool v8::internal::maglev::Graph::has_recursive_calls |
( |
| ) |
const |
|
inline |
◆ has_resumable_generator()
bool v8::internal::maglev::Graph::has_resumable_generator |
( |
| ) |
const |
|
inline |
◆ inlineable_calls()
◆ inlined_functions()
◆ int32()
◆ is_osr()
bool v8::internal::maglev::Graph::is_osr |
( |
| ) |
const |
|
inline |
◆ IterateGraphAndSweepDeadBlocks()
void v8::internal::maglev::Graph::IterateGraphAndSweepDeadBlocks |
( |
Function && | is_dead | ) |
|
|
inline |
◆ last_block()
BasicBlock * v8::internal::maglev::Graph::last_block |
( |
| ) |
const |
|
inline |
◆ max_block_id()
◆ max_call_stack_args()
uint32_t v8::internal::maglev::Graph::max_call_stack_args |
( |
| ) |
const |
|
inline |
◆ max_deopted_stack_size()
uint32_t v8::internal::maglev::Graph::max_deopted_stack_size |
( |
| ) |
const |
|
inline |
◆ min_maglev_stackslots_for_unoptimized_frame_size()
uint32_t v8::internal::maglev::Graph::min_maglev_stackslots_for_unoptimized_frame_size |
( |
| ) |
|
|
inline |
◆ New()
static Graph * v8::internal::maglev::Graph::New |
( |
Zone * | zone, |
|
|
bool | is_osr ) |
|
inlinestatic |
◆ NewObjectId()
uint32_t v8::internal::maglev::Graph::NewObjectId |
( |
| ) |
|
|
inline |
◆ node_buffer()
◆ num_blocks()
int v8::internal::maglev::Graph::num_blocks |
( |
| ) |
const |
|
inline |
◆ operator[]() [1/2]
BasicBlock * v8::internal::maglev::Graph::operator[] |
( |
int | i | ) |
|
|
inline |
◆ operator[]() [2/2]
const BasicBlock * v8::internal::maglev::Graph::operator[] |
( |
int | i | ) |
const |
|
inline |
◆ osr_values()
◆ parameters()
◆ rbegin()
◆ record_scope_info()
void v8::internal::maglev::Graph::record_scope_info |
( |
ValueNode * | context, |
|
|
compiler::OptionalScopeInfoRef | scope_info ) |
|
inline |
◆ register_inputs()
RegList & v8::internal::maglev::Graph::register_inputs |
( |
| ) |
|
|
inline |
◆ rend()
◆ root()
◆ set_blocks()
◆ set_has_recursive_calls()
void v8::internal::maglev::Graph::set_has_recursive_calls |
( |
bool | value | ) |
|
|
inline |
◆ set_has_resumable_generator()
void v8::internal::maglev::Graph::set_has_resumable_generator |
( |
| ) |
|
|
inline |
◆ set_max_call_stack_args()
void v8::internal::maglev::Graph::set_max_call_stack_args |
( |
uint32_t | stack_slots | ) |
|
|
inline |
◆ set_max_deopted_stack_size()
void v8::internal::maglev::Graph::set_max_deopted_stack_size |
( |
uint32_t | size | ) |
|
|
inline |
◆ set_tagged_stack_slots()
void v8::internal::maglev::Graph::set_tagged_stack_slots |
( |
uint32_t | stack_slots | ) |
|
|
inline |
◆ set_untagged_stack_slots()
void v8::internal::maglev::Graph::set_untagged_stack_slots |
( |
uint32_t | stack_slots | ) |
|
|
inline |
◆ smi()
◆ tagged_index()
◆ tagged_stack_slots()
uint32_t v8::internal::maglev::Graph::tagged_stack_slots |
( |
| ) |
const |
|
inline |
◆ total_inlined_bytecode_size()
int v8::internal::maglev::Graph::total_inlined_bytecode_size |
( |
| ) |
const |
|
inline |
◆ total_peeled_bytecode_size()
int v8::internal::maglev::Graph::total_peeled_bytecode_size |
( |
| ) |
const |
|
inline |
◆ trusted_constants()
◆ TryGetScopeInfo()
◆ TryGetScopeInfoForContextLoad()
compiler::OptionalScopeInfoRef v8::internal::maglev::Graph::TryGetScopeInfoForContextLoad |
( |
ValueNode * | context, |
|
|
int | offset, |
|
|
compiler::JSHeapBroker * | broker ) |
|
inline |
◆ uint32()
◆ untagged_stack_slots()
uint32_t v8::internal::maglev::Graph::untagged_stack_slots |
( |
| ) |
const |
|
inline |
◆ zone()
Zone * v8::internal::maglev::Graph::zone |
( |
| ) |
const |
|
inline |
◆ allocations_elide_map_
◆ allocations_escape_map_
◆ blocks_
◆ constants_
◆ external_references_
◆ float_
◆ has_recursive_calls_
bool v8::internal::maglev::Graph::has_recursive_calls_ = false |
|
private |
◆ has_resumable_generator_
bool v8::internal::maglev::Graph::has_resumable_generator_ = false |
|
private |
◆ inlineable_calls_
◆ inlined_functions_
◆ int32_
◆ is_osr_
bool v8::internal::maglev::Graph::is_osr_ = false |
|
private |
◆ max_block_id_
◆ max_call_stack_args_
uint32_t v8::internal::maglev::Graph::max_call_stack_args_ = kMaxUInt32 |
|
private |
◆ max_deopted_stack_size_
uint32_t v8::internal::maglev::Graph::max_deopted_stack_size_ = kMaxUInt32 |
|
private |
◆ node_buffer_
◆ object_ids_
uint32_t v8::internal::maglev::Graph::object_ids_ = 0 |
|
private |
◆ osr_values_
◆ parameters_
◆ register_inputs_
RegList v8::internal::maglev::Graph::register_inputs_ |
|
private |
◆ root_
◆ scope_infos_
◆ smi_
◆ tagged_index_
◆ tagged_stack_slots_
uint32_t v8::internal::maglev::Graph::tagged_stack_slots_ = kMaxUInt32 |
|
private |
◆ total_inlined_bytecode_size_
int v8::internal::maglev::Graph::total_inlined_bytecode_size_ = 0 |
|
private |
◆ total_peeled_bytecode_size_
int v8::internal::maglev::Graph::total_peeled_bytecode_size_ = 0 |
|
private |
◆ trusted_constants_
◆ uint32_
◆ untagged_stack_slots_
uint32_t v8::internal::maglev::Graph::untagged_stack_slots_ = kMaxUInt32 |
|
private |
The documentation for this class was generated from the following file: