22bool CanAllocate(
const Node* node) {
23 switch (node->opcode()) {
24 case IrOpcode::kAbortCSADcheck:
25 case IrOpcode::kBitcastTaggedToWord:
26 case IrOpcode::kBitcastWordToTagged:
27 case IrOpcode::kCheckTurboshaftTypeOf:
28 case IrOpcode::kComment:
29 case IrOpcode::kDebugBreak:
30 case IrOpcode::kDeoptimizeIf:
31 case IrOpcode::kDeoptimizeUnless:
32 case IrOpcode::kEffectPhi:
33 case IrOpcode::kIfException:
35 case IrOpcode::kLoadImmutable:
36 case IrOpcode::kLoadElement:
37 case IrOpcode::kLoadField:
38 case IrOpcode::kLoadFromObject:
39 case IrOpcode::kLoadImmutableFromObject:
40 case IrOpcode::kMemoryBarrier:
41 case IrOpcode::kProtectedLoad:
42 case IrOpcode::kLoadTrapOnNull:
43 case IrOpcode::kProtectedStore:
44 case IrOpcode::kStoreTrapOnNull:
45 case IrOpcode::kRetain:
46 case IrOpcode::kStackPointerGreaterThan:
47#if V8_ENABLE_WEBASSEMBLY
48 case IrOpcode::kLoadLane:
49 case IrOpcode::kLoadTransform:
50 case IrOpcode::kStoreLane:
51 case IrOpcode::kLoadStackPointer:
52 case IrOpcode::kSetStackPointer:
54 case IrOpcode::kStaticAssert:
58 case IrOpcode::kStore:
59 case IrOpcode::kStoreElement:
60 case IrOpcode::kStoreField:
61 case IrOpcode::kStoreToObject:
62 case IrOpcode::kTraceInstruction:
63 case IrOpcode::kInitializeImmutableInObject:
64 case IrOpcode::kTrapIf:
65 case IrOpcode::kTrapUnless:
66 case IrOpcode::kUnalignedLoad:
67 case IrOpcode::kUnalignedStore:
68 case IrOpcode::kUnreachable:
69 case IrOpcode::kWord32AtomicAdd:
70 case IrOpcode::kWord32AtomicAnd:
71 case IrOpcode::kWord32AtomicCompareExchange:
72 case IrOpcode::kWord32AtomicExchange:
73 case IrOpcode::kWord32AtomicLoad:
74 case IrOpcode::kWord32AtomicOr:
75 case IrOpcode::kWord32AtomicPairAdd:
76 case IrOpcode::kWord32AtomicPairAnd:
77 case IrOpcode::kWord32AtomicPairCompareExchange:
78 case IrOpcode::kWord32AtomicPairExchange:
79 case IrOpcode::kWord32AtomicPairLoad:
80 case IrOpcode::kWord32AtomicPairOr:
81 case IrOpcode::kWord32AtomicPairStore:
82 case IrOpcode::kWord32AtomicPairSub:
83 case IrOpcode::kWord32AtomicPairXor:
84 case IrOpcode::kWord32AtomicStore:
85 case IrOpcode::kWord32AtomicSub:
86 case IrOpcode::kWord32AtomicXor:
87 case IrOpcode::kWord64AtomicAdd:
88 case IrOpcode::kWord64AtomicAnd:
89 case IrOpcode::kWord64AtomicCompareExchange:
90 case IrOpcode::kWord64AtomicExchange:
91 case IrOpcode::kWord64AtomicLoad:
92 case IrOpcode::kWord64AtomicOr:
93 case IrOpcode::kWord64AtomicStore:
94 case IrOpcode::kWord64AtomicSub:
95 case IrOpcode::kWord64AtomicXor:
107Node* SearchAllocatingNode(Node*
start, Node* limit,
Zone* temp_zone) {
108 ZoneQueue<Node*> queue(temp_zone);
109 ZoneSet<Node*> visited(temp_zone);
110 visited.insert(limit);
113 while (!queue.empty()) {
114 Node*
const current = queue.front();
116 if (visited.find(current) == visited.end()) {
117 visited.insert(current);
119 if (CanAllocate(current)) {
123 for (
int i = 0;
i < current->op()->EffectInputCount(); ++
i) {
131bool CanLoopAllocate(Node* loop_effect_phi,
Zone* temp_zone) {
134 for (
int i = 1;
i < control->InputCount(); ++
i) {
135 if (SearchAllocatingNode(loop_effect_phi->InputAt(
i), loop_effect_phi,
136 temp_zone) !=
nullptr) {
143Node* EffectPhiForPhi(Node* phi) {
145 for (Node* use : control->uses()) {
146 if (use->opcode() == IrOpcode::kEffectPhi) {
153void WriteBarrierAssertFailed(Node* node, Node*
object,
const char* name,
155 std::stringstream str;
156 str <<
"MemoryOptimizer could not remove write barrier for node #"
157 << node->id() <<
"\n";
158 str <<
" Run mksnapshot with --csa-trap-on-node=" << name <<
","
159 << node->id() <<
" to break in CSA code.\n";
160 Node* object_position = object;
161 if (object_position->opcode() == IrOpcode::kPhi) {
162 object_position = EffectPhiForPhi(object_position);
164 Node* allocating_node =
nullptr;
165 if (object_position && object_position->op()->EffectOutputCount() > 0) {
166 allocating_node = SearchAllocatingNode(node, object_position, temp_zone);
168 if (allocating_node) {
169 str <<
"\n There is a potentially allocating node in between:\n";
170 str <<
" " << *allocating_node <<
"\n";
171 str <<
" Run mksnapshot with --csa-trap-on-node=" << name <<
","
172 << allocating_node->id() <<
" to break there.\n";
173 if (allocating_node->opcode() == IrOpcode::kCall) {
174 str <<
" If this is a never-allocating runtime call, you can add an "
175 "exception to Runtime::MayAllocate.\n";
178 str <<
"\n It seems the store happened to something different than a "
181 str <<
" " << *
object <<
"\n";
182 str <<
" Run mksnapshot with --csa-trap-on-node=" << name <<
","
183 <<
object->id() <<
" to break there.\n";
185 FATAL(
"%s", str.str().c_str());
193 const char* function_debug_name,
TickCounter* tick_counter,
bool is_wasm)
195 memory_lowering_(
jsgraph, zone, &graph_assembler_, is_wasm,
196 allocation_folding, WriteBarrierAssertFailed,
197 function_debug_name),
198 wasm_address_reassociation_(
jsgraph, zone),
204 tick_counter_(tick_counter) {}
213 if (
v8_flags.turbo_wasm_address_reassociation) {
224 DCHECK_LT(0, node->op()->EffectInputCount());
225 switch (node->opcode()) {
226 case IrOpcode::kAllocate:
230 case IrOpcode::kAllocateRaw:
232 case IrOpcode::kCall:
233 return VisitCall(node, state, effect_chain);
234 case IrOpcode::kLoadFromObject:
235 case IrOpcode::kLoadImmutableFromObject:
237 case IrOpcode::kLoadElement:
239 case IrOpcode::kLoadField:
241 case IrOpcode::kProtectedLoad:
243 case IrOpcode::kProtectedStore:
245 case IrOpcode::kStoreToObject:
246 case IrOpcode::kInitializeImmutableInObject:
248 case IrOpcode::kStoreElement:
250 case IrOpcode::kStoreField:
252 case IrOpcode::kStore:
254 case IrOpcode::kStorePair:
258 if (!CanAllocate(node)) {
263 DCHECK_EQ(0, node->op()->EffectOutputCount());
269 if (node->opcode() == IrOpcode::kStoreField && edge.
index() == 1) {
271 if (parent->
opcode() == IrOpcode::kAllocateRaw &&
291 DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
300 for (
Edge const edge : node->use_edges()) {
301 Node*
const user = edge.from();
302 if (user->opcode() == IrOpcode::kStoreField && edge.index() == 0) {
304 if (child->
opcode() == IrOpcode::kAllocateRaw &&
313 for (
Edge const edge : node->use_edges()) {
314 Node*
const user = edge.from();
324 CHECK(reduction.Changed() && reduction.replacement() != node);
334 DCHECK(node->opcode() == IrOpcode::kLoadFromObject ||
335 node->opcode() == IrOpcode::kLoadImmutableFromObject);
346 DCHECK(node->opcode() == IrOpcode::kStoreToObject ||
347 node->opcode() == IrOpcode::kInitializeImmutableInObject);
354 DCHECK_EQ(IrOpcode::kLoadElement, node->opcode());
361 DCHECK_EQ(IrOpcode::kLoadField, node->opcode());
363 DCHECK(reduction.Changed());
372 reduction.replacement() == node);
374 reduction.replacement() != node) {
382 DCHECK_EQ(IrOpcode::kProtectedLoad, node->opcode());
383 if (
v8_flags.turbo_wasm_address_reassociation) {
394 DCHECK_EQ(IrOpcode::kProtectedStore, node->opcode());
395 if (
v8_flags.turbo_wasm_address_reassociation) {
406 DCHECK_EQ(IrOpcode::kStoreElement, node->opcode());
413 DCHECK_EQ(IrOpcode::kStoreField, node->opcode());
419 DCHECK_EQ(IrOpcode::kStore, node->opcode());
426 DCHECK_EQ(IrOpcode::kCall, node->opcode());
445 for (
size_t i = 1;
i < states.
size(); ++
i) {
446 if (states[
i] != state) state =
nullptr;
447 if (states[
i]->group() != group) group =
nullptr;
449 if (state ==
nullptr) {
450 if (group !=
nullptr) {
467 DCHECK_EQ(IrOpcode::kEffectPhi, node->opcode());
468 NodeId effect_chain = node->id();
469 int const input_count = node->InputCount() - 1;
472 if (control->
opcode() == IrOpcode::kLoop) {
474 if (CanLoopAllocate(node,
zone())) {
489 NodeId const id = node->id();
496 it->second.push_back(state);
498 if (it->second.size() ==
static_cast<size_t>(input_count)) {
511 for (
Edge const edge : node->use_edges()) {
513 EnqueueUse(edge.from(), edge.index(), state, effect_chain);
521 if (node->opcode() == IrOpcode::kEffectPhi) {
void TickAndMaybeEnterSafepoint()
static AllocationState const * Closed(AllocationGroup *group, Node *effect, Zone *zone)
Reduction ReduceLoadField(Node *node)
Reduction ReduceLoadElement(Node *node)
Reduction ReduceStoreToObject(Node *node, AllocationState const *state=nullptr)
Reduction ReduceStoreElement(Node *node, AllocationState const *state=nullptr)
Reduction ReduceLoadFromObject(Node *node)
Reduction ReduceStoreField(Node *node, AllocationState const *state=nullptr)
Reduction ReduceAllocateRaw(Node *node, AllocationType allocation_type, AllocationState const **state)
Reduction ReduceStore(Node *node, AllocationState const *state=nullptr)
void EnqueueMerge(Node *, int, AllocationState const *)
void VisitOtherEffect(Node *, AllocationState const *, NodeId)
JSGraphAssembler graph_assembler_
void VisitAllocateRaw(Node *, AllocationState const *, NodeId)
JSGraph * jsgraph() const
ZoneMap< NodeId, AllocationStates > pending_
AllocationState const * MergeStates(AllocationStates const &states)
void VisitNode(Node *, AllocationState const *, NodeId)
void VisitProtectedLoad(Node *, AllocationState const *, NodeId)
void VisitStoreToObject(Node *, AllocationState const *, NodeId)
ZoneQueue< Token > tokens_
MemoryOptimizer(JSHeapBroker *broker, JSGraph *jsgraph, Zone *zone, MemoryLowering::AllocationFolding allocation_folding, const char *function_debug_name, TickCounter *tick_counter, bool is_wasm)
void VisitLoadElement(Node *, AllocationState const *, NodeId)
void VisitLoadField(Node *, AllocationState const *, NodeId)
void EnqueueUses(Node *, AllocationState const *, NodeId)
void VisitLoadFromObject(Node *, AllocationState const *, NodeId)
AllocationState const * empty_state() const
void VisitCall(Node *, AllocationState const *, NodeId)
void VisitStore(Node *, AllocationState const *, NodeId)
WasmAddressReassociation * wasm_address_reassociation()
void VisitProtectedStore(Node *, AllocationState const *, NodeId)
void EnqueueUse(Node *, int, AllocationState const *, NodeId)
MemoryLowering * memory_lowering()
ZoneVector< AllocationState const * > AllocationStates
void VisitStoreElement(Node *, AllocationState const *, NodeId)
void ReplaceUsesAndKillNode(Node *node, Node *replacement)
TickCounter *const tick_counter_
void VisitStoreField(Node *, AllocationState const *, NodeId)
bool AllocationTypeNeedsUpdateToOld(Node *const user, const Edge edge)
static void ChangeOp(Node *node, const Operator *new_op)
static void ReplaceUses(Node *node, Node *value, Node *effect=nullptr, Node *success=nullptr, Node *exception=nullptr)
static Node * GetEffectInput(Node *node, int index=0)
static bool IsEffectEdge(Edge edge)
static Node * GetControlInput(Node *node, int index=0)
constexpr IrOpcode::Value opcode() const
const Operator * op() const
Node * InputAt(int index) const
void VisitProtectedMemOp(Node *node, uint32_t effect_chain)
#define V8_MAP_PACKING_BOOL
#define V8_ENABLE_SANDBOX_BOOL
LiftoffAssembler::CacheState state
CallDescriptor const * CallDescriptorOf(const Operator *const op)
const AllocateParameters & AllocateParametersOf(const Operator *op)
AllocationType AllocationTypeOf(const Operator *op)
V8_EXPORT_PRIVATE FlagValues v8_flags
other heap size generate builtins concurrently on separate threads in mksnapshot track concurrent recompilation artificial compilation delay in ms max number of threads that concurrent Turbofan can use(0 for unbounded)") DEFINE_BOOL( stress_concurrent_inlining
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
AllocationState const * state