| Add(BasicBlock *block) | v8::internal::maglev::Graph | inline |
| add_inlined_bytecode_size(int size) | v8::internal::maglev::Graph | inline |
| add_peeled_bytecode_size(int size) | v8::internal::maglev::Graph | inline |
| allocations_elide_map() | v8::internal::maglev::Graph | inline |
| allocations_elide_map_ | v8::internal::maglev::Graph | private |
| allocations_escape_map() | v8::internal::maglev::Graph | inline |
| allocations_escape_map_ | v8::internal::maglev::Graph | private |
| begin() const | v8::internal::maglev::Graph | inline |
| blocks() | v8::internal::maglev::Graph | inline |
| blocks_ | v8::internal::maglev::Graph | private |
| constants() | v8::internal::maglev::Graph | inline |
| constants_ | v8::internal::maglev::Graph | private |
| end() const | v8::internal::maglev::Graph | inline |
| external_references() | v8::internal::maglev::Graph | inline |
| external_references_ | v8::internal::maglev::Graph | private |
| float64() | v8::internal::maglev::Graph | inline |
| float_ | v8::internal::maglev::Graph | private |
| Graph(Zone *zone, bool is_osr) | v8::internal::maglev::Graph | inline |
| has_recursive_calls() const | v8::internal::maglev::Graph | inline |
| has_recursive_calls_ | v8::internal::maglev::Graph | private |
| has_resumable_generator() const | v8::internal::maglev::Graph | inline |
| has_resumable_generator_ | v8::internal::maglev::Graph | private |
| inlineable_calls() | v8::internal::maglev::Graph | inline |
| inlineable_calls_ | v8::internal::maglev::Graph | private |
| inlined_functions() | v8::internal::maglev::Graph | inline |
| inlined_functions_ | v8::internal::maglev::Graph | private |
| int32() | v8::internal::maglev::Graph | inline |
| int32_ | v8::internal::maglev::Graph | private |
| is_osr() const | v8::internal::maglev::Graph | inline |
| is_osr_ | v8::internal::maglev::Graph | private |
| IterateGraphAndSweepDeadBlocks(Function &&is_dead) | v8::internal::maglev::Graph | inline |
| last_block() const | v8::internal::maglev::Graph | inline |
| max_block_id() const | v8::internal::maglev::Graph | inline |
| max_block_id_ | v8::internal::maglev::Graph | private |
| max_call_stack_args() const | v8::internal::maglev::Graph | inline |
| max_call_stack_args_ | v8::internal::maglev::Graph | private |
| max_deopted_stack_size() const | v8::internal::maglev::Graph | inline |
| max_deopted_stack_size_ | v8::internal::maglev::Graph | private |
| min_maglev_stackslots_for_unoptimized_frame_size() | v8::internal::maglev::Graph | inline |
| New(Zone *zone, bool is_osr) | v8::internal::maglev::Graph | inlinestatic |
| NewObjectId() | v8::internal::maglev::Graph | inline |
| node_buffer() | v8::internal::maglev::Graph | inline |
| node_buffer_ | v8::internal::maglev::Graph | private |
| num_blocks() const | v8::internal::maglev::Graph | inline |
| object_ids_ | v8::internal::maglev::Graph | private |
| operator delete(void *, size_t) | v8::internal::ZoneObject | inline |
| operator delete(void *pointer, Zone *zone)=delete | v8::internal::ZoneObject | |
| operator new(size_t, Zone *)=delete | v8::internal::ZoneObject | |
| operator new(size_t size, void *ptr) | v8::internal::ZoneObject | inline |
| operator[](int i) | v8::internal::maglev::Graph | inline |
| operator[](int i) const | v8::internal::maglev::Graph | inline |
| osr_values() | v8::internal::maglev::Graph | inline |
| osr_values_ | v8::internal::maglev::Graph | private |
| parameters() | v8::internal::maglev::Graph | inline |
| parameters_ | v8::internal::maglev::Graph | private |
| rbegin() const | v8::internal::maglev::Graph | inline |
| record_scope_info(ValueNode *context, compiler::OptionalScopeInfoRef scope_info) | v8::internal::maglev::Graph | inline |
| register_inputs() | v8::internal::maglev::Graph | inline |
| register_inputs_ | v8::internal::maglev::Graph | private |
| rend() const | v8::internal::maglev::Graph | inline |
| root() | v8::internal::maglev::Graph | inline |
| root_ | v8::internal::maglev::Graph | private |
| scope_infos_ | v8::internal::maglev::Graph | private |
| set_blocks(ZoneVector< BasicBlock * > blocks) | v8::internal::maglev::Graph | inline |
| set_has_recursive_calls(bool value) | v8::internal::maglev::Graph | inline |
| set_has_resumable_generator() | v8::internal::maglev::Graph | inline |
| set_max_call_stack_args(uint32_t stack_slots) | v8::internal::maglev::Graph | inline |
| set_max_deopted_stack_size(uint32_t size) | v8::internal::maglev::Graph | inline |
| set_tagged_stack_slots(uint32_t stack_slots) | v8::internal::maglev::Graph | inline |
| set_untagged_stack_slots(uint32_t stack_slots) | v8::internal::maglev::Graph | inline |
| SmallAllocationVector typedef | v8::internal::maglev::Graph | |
| smi() | v8::internal::maglev::Graph | inline |
| smi_ | v8::internal::maglev::Graph | private |
| tagged_index() | v8::internal::maglev::Graph | inline |
| tagged_index_ | v8::internal::maglev::Graph | private |
| tagged_stack_slots() const | v8::internal::maglev::Graph | inline |
| tagged_stack_slots_ | v8::internal::maglev::Graph | private |
| total_inlined_bytecode_size() const | v8::internal::maglev::Graph | inline |
| total_inlined_bytecode_size_ | v8::internal::maglev::Graph | private |
| total_peeled_bytecode_size() const | v8::internal::maglev::Graph | inline |
| total_peeled_bytecode_size_ | v8::internal::maglev::Graph | private |
| trusted_constants() | v8::internal::maglev::Graph | inline |
| trusted_constants_ | v8::internal::maglev::Graph | private |
| TryGetScopeInfo(ValueNode *context, compiler::JSHeapBroker *broker) | v8::internal::maglev::Graph | inline |
| TryGetScopeInfoForContextLoad(ValueNode *context, int offset, compiler::JSHeapBroker *broker) | v8::internal::maglev::Graph | inline |
| uint32() | v8::internal::maglev::Graph | inline |
| uint32_ | v8::internal::maglev::Graph | private |
| untagged_stack_slots() const | v8::internal::maglev::Graph | inline |
| untagged_stack_slots_ | v8::internal::maglev::Graph | private |
| zone() const | v8::internal::maglev::Graph | inline |