v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
maglev-graph.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_MAGLEV_MAGLEV_GRAPH_H_
6#define V8_MAGLEV_MAGLEV_GRAPH_H_
7
8#include <vector>
9
14
15namespace v8 {
16namespace internal {
17namespace maglev {
18
22
24
25class Graph final : public ZoneObject {
26 public:
27 static Graph* New(Zone* zone, bool is_osr) {
28 return zone->New<Graph>(zone, is_osr);
29 }
30
31 // Shouldn't be used directly; public so that Zone::New can access it.
55
56 BasicBlock* operator[](int i) { return blocks_[i]; }
57 const BasicBlock* operator[](int i) const { return blocks_[i]; }
58
59 int num_blocks() const { return static_cast<int>(blocks_.size()); }
61
62 BlockConstIterator begin() const { return blocks_.begin(); }
63 BlockConstIterator end() const { return blocks_.end(); }
66
67 BasicBlock* last_block() const { return blocks_.back(); }
68
69 void Add(BasicBlock* block) {
70 if (block->has_id()) {
71 // The inliner adds blocks multiple times.
72 DCHECK(v8_flags.maglev_non_eager_inlining ||
73 v8_flags.turbolev_non_eager_inlining);
74 } else {
75 block->set_id(max_block_id_++);
76 }
77 blocks_.push_back(block);
78 }
79
81
82 template <typename Function>
84 auto current = blocks_.begin();
85 auto last_non_dead = current;
86 while (current != blocks_.end()) {
87 if (is_dead(*current)) {
88 (*current)->mark_dead();
89 } else {
90 if (current != last_non_dead) {
91 // Move current to last non dead position.
92 *last_non_dead = *current;
93 }
94 ++last_non_dead;
95 }
96 ++current;
97 }
98 if (current != last_non_dead) {
99 blocks_.resize(blocks_.size() - (current - last_non_dead));
100 }
101 }
102
103 uint32_t tagged_stack_slots() const { return tagged_stack_slots_; }
104 uint32_t untagged_stack_slots() const { return untagged_stack_slots_; }
105 uint32_t max_call_stack_args() const { return max_call_stack_args_; }
127
134
139
151
155
157
158 // Running JS2, 99.99% of the cases, we have less than 2 dependencies.
160
161 // If the key K of the map escape, all the set allocations_escape_map[K] must
162 // also escape.
166 // The K of the map can be elided if it hasn't escaped and all the set
167 // allocations_elide_map[K] can also be elided.
171
176
181
188
189 bool is_osr() const { return is_osr_; }
191 DCHECK(is_osr());
192 if (osr_values().size() == 0) {
193 return InitialValue::stack_slot(0);
194 }
195 return osr_values().back()->stack_slot() + 1;
196 }
197
198 uint32_t NewObjectId() { return object_ids_++; }
199
202
203 compiler::OptionalScopeInfoRef TryGetScopeInfoForContextLoad(
205 compiler::OptionalScopeInfoRef cur = TryGetScopeInfo(context, broker);
207 return cur;
208 }
210 if (cur.has_value()) {
211 cur = (*cur).OuterScopeInfo(broker);
212 while (!cur->HasContext() && cur->HasOuterScopeInfo()) {
213 cur = cur->OuterScopeInfo(broker);
214 }
215 if (cur->HasContext()) {
216 return cur;
217 }
218 }
219 return {};
220 }
221
222 // Resolve the scope info of a context value.
223 // An empty result means we don't statically know the context's scope.
224 compiler::OptionalScopeInfoRef TryGetScopeInfo(
226 auto it = scope_infos_.find(context);
227 if (it != scope_infos_.end()) {
228 return it->second;
229 }
230 compiler::OptionalScopeInfoRef res;
231 if (auto context_const = context->TryCast<Constant>()) {
232 res = context_const->object().AsContext().scope_info(broker);
233 DCHECK(res->HasContext());
234 } else if (auto load = context->TryCast<LoadTaggedFieldForContextSlot>()) {
235 compiler::OptionalScopeInfoRef cur = TryGetScopeInfoForContextLoad(
236 load->input(0).node(), load->offset(), broker);
237 if (cur.has_value()) res = cur;
238 } else if (auto load_script =
239 context->TryCast<LoadTaggedFieldForScriptContextSlot>()) {
240 compiler::OptionalScopeInfoRef cur = TryGetScopeInfoForContextLoad(
241 load_script->input(0).node(), load_script->offset(), broker);
242 if (cur.has_value()) res = cur;
243 } else if (context->Is<InitialValue>()) {
244 // We should only fail to keep track of initial contexts originating from
245 // the OSR prequel.
246 // TODO(olivf): Keep track of contexts when analyzing OSR Prequel.
247 DCHECK(is_osr());
248 } else {
249 // Any context created within a function must be registered in
250 // graph()->scope_infos(). Initial contexts must be registered before
251 // BuildBody. We don't track context in generators (yet) and around eval
252 // the bytecode compiler creates contexts by calling
253 // Runtime::kNewFunctionInfo directly.
254 DCHECK(context->Is<Phi>() || context->Is<GeneratorRestoreRegister>() ||
255 context->Is<RegisterInput>() || context->Is<CallRuntime>());
256 }
257 return scope_infos_[context] = res;
258 }
259
261 compiler::OptionalScopeInfoRef scope_info) {
262 scope_infos_[context] = scope_info;
263 }
264
265 Zone* zone() const { return blocks_.zone(); }
266
268
269 private:
281 // Use the bits of the float as the key.
298 bool is_osr_ = false;
299 uint32_t object_ids_ = 0;
303};
304
305} // namespace maglev
306} // namespace internal
307} // namespace v8
308
309#endif // V8_MAGLEV_MAGLEV_GRAPH_H_
static V8_INLINE constexpr int OffsetOfElementAt(int index)
Definition contexts.h:512
reverse_iterator rbegin() V8_NOEXCEPT
void reserve(size_t new_cap)
std::reverse_iterator< const T * > const_reverse_iterator
void resize(size_t new_size)
reverse_iterator rend() V8_NOEXCEPT
void push_back(const T &value)
T * New(Args &&... args)
Definition zone.h:114
ZoneMap< InlinedAllocation *, SmallAllocationVector > allocations_escape_map_
ZoneMap< int, TaggedIndexConstant * > & tagged_index()
void set_max_call_stack_args(uint32_t stack_slots)
ZoneUnorderedMap< ValueNode *, compiler::OptionalScopeInfoRef > scope_infos_
void record_scope_info(ValueNode *context, compiler::OptionalScopeInfoRef scope_info)
ZoneMap< Address, ExternalConstant * > & external_references()
ZoneMap< uint32_t, Uint32Constant * > & uint32()
BasicBlock * operator[](int i)
ZoneMap< InlinedAllocation *, SmallAllocationVector > & allocations_elide_map()
ZoneVector< Node * > & node_buffer()
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > inlined_functions_
Graph(Zone *zone, bool is_osr)
ZoneVector< BasicBlock * > & blocks()
uint32_t untagged_stack_slots() const
uint32_t max_deopted_stack_size() const
BlockConstIterator end() const
ZoneVector< InitialValue * > parameters_
compiler::OptionalScopeInfoRef TryGetScopeInfoForContextLoad(ValueNode *context, int offset, compiler::JSHeapBroker *broker)
BasicBlock::Id max_block_id() const
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > & constants()
ZoneVector< MaglevCallSiteInfo * > & inlineable_calls()
BasicBlock * last_block() const
BlockConstReverseIterator rend() const
BlockConstReverseIterator rbegin() const
ZoneVector< InitialValue * > & osr_values()
void set_untagged_stack_slots(uint32_t stack_slots)
bool has_resumable_generator() const
ZoneMap< int32_t, Int32Constant * > & int32()
ZoneMap< InlinedAllocation *, SmallAllocationVector > allocations_elide_map_
ZoneMap< uint32_t, Uint32Constant * > uint32_
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > constants_
compiler::OptionalScopeInfoRef TryGetScopeInfo(ValueNode *context, compiler::JSHeapBroker *broker)
ZoneVector< BasicBlock * > blocks_
void IterateGraphAndSweepDeadBlocks(Function &&is_dead)
void set_tagged_stack_slots(uint32_t stack_slots)
void add_peeled_bytecode_size(int size)
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > trusted_constants_
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > & inlined_functions()
ZoneMap< uint64_t, Float64Constant * > float_
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > & trusted_constants()
ZoneVector< InitialValue * > osr_values_
void set_has_recursive_calls(bool value)
void Add(BasicBlock *block)
ZoneMap< InlinedAllocation *, SmallAllocationVector > & allocations_escape_map()
BlockConstIterator begin() const
uint32_t max_call_stack_args() const
void add_inlined_bytecode_size(int size)
static Graph * New(Zone *zone, bool is_osr)
int total_inlined_bytecode_size() const
ZoneMap< int32_t, Int32Constant * > int32_
const BasicBlock * operator[](int i) const
ZoneMap< Address, ExternalConstant * > external_references_
int total_peeled_bytecode_size() const
ZoneVector< InitialValue * > & parameters()
ZoneVector< Node * > node_buffer_
ZoneMap< int, TaggedIndexConstant * > tagged_index_
ZoneMap< RootIndex, RootConstant * > root_
uint32_t min_maglev_stackslots_for_unoptimized_frame_size()
ZoneMap< uint64_t, Float64Constant * > & float64()
ZoneMap< int, SmiConstant * > & smi()
ZoneMap< int, SmiConstant * > smi_
void set_max_deopted_stack_size(uint32_t size)
void set_blocks(ZoneVector< BasicBlock * > blocks)
ZoneMap< RootIndex, RootConstant * > & root()
ZoneVector< MaglevCallSiteInfo * > inlineable_calls_
uint32_t tagged_stack_slots() const
LineAndColumn current
JSHeapBroker * broker
int32_t offset
TNode< Context > context
base::SmallVector< int32_t, 1 > stack_slots
ZoneVector< BasicBlock * >::const_reverse_iterator BlockConstReverseIterator
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
constexpr uint32_t kMaxUInt32
Definition globals.h:387
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485