v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
escape-analysis.cc
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
14#include "src/objects/map-inl.h"
15
16#ifdef DEBUG
17#define TRACE(...) \
18 do { \
19 if (v8_flags.trace_turbo_escape) PrintF(__VA_ARGS__); \
20 } while (false)
21#else
22#define TRACE(...)
23#endif
24
25namespace v8 {
26namespace internal {
27namespace compiler {
28
29template <class T>
30class Sidetable {
31 public:
32 explicit Sidetable(Zone* zone) : map_(zone) {}
33 T& operator[](const Node* node) {
34 NodeId id = node->id();
35 if (id >= map_.size()) {
36 map_.resize(id + 1);
37 }
38 return map_[id];
39 }
40
41 private:
43};
44
45template <class T>
47 public:
48 explicit SparseSidetable(Zone* zone, T def_value = T())
49 : def_value_(std::move(def_value)), map_(zone) {}
50 void Set(const Node* node, T value) {
51 auto iter = map_.find(node->id());
52 if (iter != map_.end()) {
53 iter->second = std::move(value);
54 } else if (value != def_value_) {
55 map_.insert(iter, std::make_pair(node->id(), std::move(value)));
56 }
57 }
58 const T& Get(const Node* node) const {
59 auto iter = map_.find(node->id());
60 return iter != map_.end() ? iter->second : def_value_;
61 }
62
63 private:
66};
67
68// Keeps track of the changes to the current node during reduction.
69// Encapsulates the current state of the IR graph and the reducer state like
70// side-tables. All access to the IR and the reducer state should happen through
71// a ReduceScope to ensure that changes and dependencies are tracked and all
72// necessary node revisitations happen.
74 public:
76 explicit ReduceScope(Node* node, Reduction* reduction)
77 : current_node_(node), reduction_(reduction) {}
78
80
81 protected:
82 Node* current_node() const { return current_node_; }
84
85 private:
88};
89
90// A VariableTracker object keeps track of the values of variables at all points
91// of the effect chain and introduces new phi nodes when necessary.
92// Initially and by default, variables are mapped to nullptr, which means that
93// the variable allocation point does not dominate the current point on the
94// effect chain. We map variables that represent uninitialized memory to the
95// Dead node to ensure it is not read.
96// Unmapped values are impossible by construction, it is indistinguishable if a
97// PersistentMap does not contain an element or maps it to the default element.
99 private:
100 // The state of all variables at one point in the effect chain.
101 class State {
102 public:
104
105 explicit State(Zone* zone) : map_(zone) {}
106 Node* Get(Variable var) const {
107 CHECK(var != Variable::Invalid());
108 return map_.Get(var);
109 }
110 void Set(Variable var, Node* node) {
111 CHECK(var != Variable::Invalid());
112 return map_.Set(var, node);
113 }
114 Map::iterator begin() const { return map_.begin(); }
115 Map::iterator end() const { return map_.end(); }
116 bool operator!=(const State& other) const { return map_ != other.map_; }
117
118 private:
120 };
121
122 public:
126
128 Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
129 Zone* zone() { return zone_; }
130
132 public:
133 Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
134 ~Scope();
136 Node* node = current_state_.Get(var);
137 if (node && node->opcode() == IrOpcode::kDead) {
138 // TODO(turbofan): We use {Dead} as a sentinel for uninitialized memory.
139 // Reading uninitialized memory can only happen in unreachable code. In
140 // this case, we have to mark the object as escaping to avoid dead nodes
141 // in the graph. This is a workaround that should be removed once we can
142 // handle dead nodes everywhere.
143 return Nothing<Node*>();
144 }
145 return Just(node);
146 }
147 void Set(Variable var, Node* node) { current_state_.Set(var, node); }
148
149 private:
152 };
153
154 private:
155 State MergeInputs(Node* effect_phi);
163};
164
165// Encapsulates the current state of the escape analysis reducer to preserve
166// invariants regarding changes and re-visitation.
168 public:
170 Zone* zone)
171 : virtual_objects_(zone),
172 replacements_(zone),
174 variable_states_(jsgraph, reducer, zone),
176 zone_(zone) {}
179
181 public:
183 Node* node, Reduction* reduction)
184 : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
185 tracker_(tracker),
186 reducer_(reducer) {}
188 VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
189 if (vobject) vobject->AddDependency(current_node());
190 return vobject;
191 }
192 // Create or retrieve a virtual object for the current node.
194 DCHECK_EQ(IrOpcode::kAllocate, current_node()->opcode());
195 VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
196 if (vobject) {
197 CHECK(vobject->size() == size);
198 } else {
199 vobject = tracker_->NewVirtualObject(size);
200 }
201 if (vobject) vobject->AddDependency(current_node());
202 vobject_ = vobject;
203 return vobject;
204 }
205
206 void SetVirtualObject(Node* object) {
207 vobject_ = tracker_->virtual_objects_.Get(object);
208 }
209
210 void SetEscaped(Node* node) {
211 if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
212 if (object->HasEscaped()) return;
213 TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
214 node->op()->mnemonic(), node->id(),
215 current_node()->op()->mnemonic(), current_node()->id());
216 object->SetEscaped();
217 object->RevisitDependants(reducer_);
218 }
219 }
220 // The inputs of the current node have to be accessed through the scope to
221 // ensure that they respect the node replacements.
223 return tracker_->ResolveReplacement(
224 NodeProperties::GetValueInput(current_node(), i));
225 }
227 return tracker_->ResolveReplacement(
228 NodeProperties::GetContextInput(current_node()));
229 }
230 // Accessing the current node is fine for `FrameState nodes.
232 DCHECK_EQ(current_node()->opcode(), IrOpcode::kFrameState);
233 return current_node();
234 }
235
236 void SetReplacement(Node* replacement) {
237 replacement_ = replacement;
238 vobject_ =
239 replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
240 if (replacement) {
241 TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
242 replacement->id());
243 } else {
244 TRACE("Set nullptr as replacement.\n");
245 }
246 }
247
248 void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
249
250 bool FrameStateMightLazyDeopt(Node* framestate) {
251 DCHECK_EQ(IrOpcode::kFrameState, framestate->opcode());
252 if (auto it = tracker_->framestate_might_lazy_deopt_.find(framestate);
253 it != tracker_->framestate_might_lazy_deopt_.end()) {
254 return it->second;
255 }
256 for (Node* use : framestate->uses()) {
257 switch (use->opcode()) {
258 case IrOpcode::kCheckpoint:
259 case IrOpcode::kDeoptimize:
260 case IrOpcode::kDeoptimizeIf:
261 case IrOpcode::kDeoptimizeUnless:
262 // These nodes only cause eager deopts.
263 break;
264 default:
265 if (use->opcode() == IrOpcode::kFrameState &&
266 !FrameStateMightLazyDeopt(use)) {
267 break;
268 }
269 return tracker_->framestate_might_lazy_deopt_[framestate] = true;
270 }
271 }
272 return tracker_->framestate_might_lazy_deopt_[framestate] = false;
273 }
274
276 if (replacement_ != tracker_->replacements_[current_node()] ||
277 vobject_ != tracker_->virtual_objects_.Get(current_node())) {
278 reduction()->set_value_changed();
279 }
280 tracker_->replacements_[current_node()] = replacement_;
281 tracker_->virtual_objects_.Set(current_node(), vobject_);
282 }
283
284 private:
287 VirtualObject* vobject_ = nullptr;
288 Node* replacement_ = nullptr;
289 };
290
293 if (Node* replacement = GetReplacementOf(node)) {
294 return replacement;
295 }
296 return node;
297 }
298
299 private:
301 static constexpr int kTrackingBudget = 600;
302
309
317 Zone* const zone_;
318};
319
321 TFGraph* graph, std::function<void(Node*, Reduction*)> reduce,
322 TickCounter* tick_counter, Zone* zone)
323 : graph_(graph),
324 state_(graph, kNumStates),
325 revisit_(zone),
326 stack_(zone),
327 reduce_(std::move(reduce)),
328 tick_counter_(tick_counter) {}
329
331 // Perform DFS and eagerly trigger revisitation as soon as possible.
332 // A stack element {node, i} indicates that input i of node should be visited
333 // next.
334 DCHECK(stack_.empty());
335 stack_.push({node, 0});
336 while (!stack_.empty()) {
338 Node* current = stack_.top().node;
339 int& input_index = stack_.top().input_index;
340 if (input_index < current->InputCount()) {
341 Node* input = current->InputAt(input_index);
342 input_index++;
343 switch (state_.Get(input)) {
344 case State::kVisited:
345 // The input is already reduced.
346 break;
347 case State::kOnStack:
348 // The input is on the DFS stack right now, so it will be revisited
349 // later anyway.
350 break;
352 case State::kRevisit: {
353 state_.Set(input, State::kOnStack);
354 stack_.push({input, 0});
355 break;
356 }
357 }
358 } else {
359 stack_.pop();
360 Reduction reduction;
361 reduce_(current, &reduction);
362 for (Edge edge : current->use_edges()) {
363 // Mark uses for revisitation.
364 Node* use = edge.from();
366 if (reduction.effect_changed()) Revisit(use);
367 } else {
368 if (reduction.value_changed()) Revisit(use);
369 }
370 }
371 state_.Set(current, State::kVisited);
372 // Process the revisitation buffer immediately. This improves performance
373 // of escape analysis. Using a stack for {revisit_} reverses the order in
374 // which the revisitation happens. This also seems to improve performance.
375 while (!revisit_.empty()) {
376 Node* revisit = revisit_.top();
377 if (state_.Get(revisit) == State::kRevisit) {
378 state_.Set(revisit, State::kOnStack);
379 stack_.push({revisit, 0});
380 }
381 revisit_.pop();
382 }
383 }
384 }
385}
386
388 if (state_.Get(node) == State::kVisited) {
389 TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
390 node->id());
391 state_.Set(node, State::kRevisit);
392 revisit_.push(node);
393 }
394}
395
397 Zone* zone)
398 : zone_(zone),
399 graph_(graph),
400 table_(zone, State(zone)),
401 buffer_(zone),
402 reducer_(reducer),
403 tick_counter_(reducer->tick_counter()) {}
404
406 Reduction* reduction)
407 : ReduceScope(node, reduction),
408 states_(states),
409 current_state_(states->zone_) {
410 switch (node->opcode()) {
411 case IrOpcode::kEffectPhi:
412 current_state_ = states_->MergeInputs(node);
413 break;
414 default:
415 int effect_inputs = node->op()->EffectInputCount();
416 if (effect_inputs == 1) {
417 current_state_ =
418 states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
419 } else {
420 DCHECK_EQ(0, effect_inputs);
421 }
422 }
423}
424
426 if (!reduction()->effect_changed() &&
429 }
431}
432
434 // A variable that is mapped to [nullptr] was not assigned a value on every
435 // execution path to the current effect phi. Relying on the invariant that
436 // every variable is initialized (at least with a sentinel like the Dead
437 // node), this means that the variable initialization does not dominate the
438 // current point. So for loop effect phis, we can keep nullptr for a variable
439 // as long as the first input of the loop has nullptr for this variable. For
440 // non-loop effect phis, we can even keep it nullptr as long as any input has
441 // nullptr.
442 DCHECK_EQ(IrOpcode::kEffectPhi, effect_phi->opcode());
443 int arity = effect_phi->op()->EffectInputCount();
444 Node* control = NodeProperties::GetControlInput(effect_phi, 0);
445 TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
446 bool is_loop = control->opcode() == IrOpcode::kLoop;
447 buffer_.reserve(arity + 1);
448
449 State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
450 State result = first_input;
451 for (std::pair<Variable, Node*> var_value : first_input) {
453 if (Node* value = var_value.second) {
454 Variable var = var_value.first;
455 TRACE("var %i:\n", var.id_);
456 buffer_.clear();
457 buffer_.push_back(value);
458 bool identical_inputs = true;
459 int num_defined_inputs = 1;
460 TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
461 for (int i = 1; i < arity; ++i) {
462 Node* next_value =
463 table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
464 if (next_value != value) identical_inputs = false;
465 if (next_value != nullptr) {
466 num_defined_inputs++;
467 TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
468 next_value->id());
469 } else {
470 TRACE(" input %i: nullptr\n", i);
471 }
472 buffer_.push_back(next_value);
473 }
474
475 Node* old_value = table_.Get(effect_phi).Get(var);
476 if (old_value) {
477 TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
478 } else {
479 TRACE(" old: nullptr\n");
480 }
481 // Reuse a previously created phi node if possible.
482 if (old_value && old_value->opcode() == IrOpcode::kPhi &&
483 NodeProperties::GetControlInput(old_value, 0) == control) {
484 // Since a phi node can never dominate its control node,
485 // [old_value] cannot originate from the inputs. Thus [old_value]
486 // must have been created by a previous reduction of this [effect_phi].
487 for (int i = 0; i < arity; ++i) {
488 Node* old_input = NodeProperties::GetValueInput(old_value, i);
489 Node* new_input = buffer_[i] ? buffer_[i] : graph_->Dead();
490 if (old_input != new_input) {
491 NodeProperties::ReplaceValueInput(old_value, new_input, i);
492 reducer_->Revisit(old_value);
493 }
494 }
495 result.Set(var, old_value);
496 } else {
497 if (num_defined_inputs == 1 && is_loop) {
498 // For loop effect phis, the variable initialization dominates iff it
499 // dominates the first input.
500 DCHECK_EQ(2, arity);
501 DCHECK_EQ(value, buffer_[0]);
502 result.Set(var, value);
503 } else if (num_defined_inputs < arity) {
504 // If the variable is undefined on some input of this non-loop effect
505 // phi, then its initialization does not dominate this point.
506 result.Set(var, nullptr);
507 } else {
508 DCHECK_EQ(num_defined_inputs, arity);
509 // We only create a phi if the values are different.
510 if (identical_inputs) {
511 result.Set(var, value);
512 } else {
513 TRACE("Creating new phi\n");
514 buffer_.push_back(control);
515 Node* phi = graph_->graph()->NewNode(
517 arity + 1, &buffer_.front());
518 // TODO(turbofan): Computing precise types here is tricky, because
519 // of the necessary revisitations. If we really need this, we should
520 // probably do it afterwards.
522 reducer_->AddRoot(phi);
523 result.Set(var, phi);
524 }
525 }
526 }
527#ifdef DEBUG
528 if (Node* result_node = result.Get(var)) {
529 TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
530 result_node->id());
531 } else {
532 TRACE(" result: nullptr\n");
533 }
534#endif
535 }
536 }
537 return result;
538}
539
540namespace {
541
542int OffsetOfFieldAccess(const Operator* op) {
543 DCHECK(op->opcode() == IrOpcode::kLoadField ||
544 op->opcode() == IrOpcode::kStoreField);
545 FieldAccess access = FieldAccessOf(op);
546 return access.offset;
547}
548
549Maybe<int> OffsetOfElementAt(ElementAccess const& access, int index) {
550 MachineRepresentation representation = access.machine_type.representation();
551 // Double elements accesses are not yet supported. See chromium:1237821.
552 if (representation == MachineRepresentation::kFloat64) return Nothing<int>();
553
554 DCHECK_GE(index, 0);
555 DCHECK_GE(ElementSizeLog2Of(representation), kTaggedSizeLog2);
556 return Just(access.header_size +
557 (index << ElementSizeLog2Of(representation)));
558}
559
560Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
561 DCHECK(op->opcode() == IrOpcode::kLoadElement ||
562 op->opcode() == IrOpcode::kStoreElement);
563 Type index_type = NodeProperties::GetType(index_node);
564 if (!index_type.Is(Type::OrderedNumber())) return Nothing<int>();
565 double max = index_type.Max();
566 double min = index_type.Min();
567 int index = static_cast<int>(min);
568 if (index < 0 || index != min || index != max) return Nothing<int>();
569 return OffsetOfElementAt(ElementAccessOf(op), index);
570}
571
572Node* LowerCompareMapsWithoutLoad(Node* checked_map,
573 ZoneRefSet<Map> const& checked_against,
574 JSGraph* jsgraph) {
575 Node* true_node = jsgraph->TrueConstant();
576 Node* false_node = jsgraph->FalseConstant();
577 Node* replacement = false_node;
578 for (MapRef map : checked_against) {
579 // We are using HeapConstantMaybeHole here instead of HeapConstantNoHole
580 // as we cannot do the CHECK(object is hole) here as the compile thread is
581 // parked during EscapeAnalysis for performance reasons, see pipeline.cc.
582 // TODO(cffsmith): do manual checking against hole values here.
583 Node* map_node = jsgraph->HeapConstantMaybeHole(map.object());
584 // We cannot create a HeapConstant type here as we are off-thread.
585 NodeProperties::SetType(map_node, Type::Internal());
586 Node* comparison = jsgraph->graph()->NewNode(
587 jsgraph->simplified()->ReferenceEqual(), checked_map, map_node);
588 NodeProperties::SetType(comparison, Type::Boolean());
589 if (replacement == false_node) {
590 replacement = comparison;
591 } else {
592 replacement = jsgraph->graph()->NewNode(
593 jsgraph->common()->Select(MachineRepresentation::kTaggedPointer),
594 comparison, true_node, replacement);
595 NodeProperties::SetType(replacement, Type::Boolean());
596 }
597 }
598 return replacement;
599}
600
601namespace {
602bool CheckMapsHelper(EscapeAnalysisTracker::Scope* current, Node* checked,
603 ZoneRefSet<Map> target) {
604 const VirtualObject* vobject = current->GetVirtualObject(checked);
605 Variable map_field;
606 Node* map;
607 if (vobject && !vobject->HasEscaped() &&
608 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
609 current->Get(map_field).To(&map)) {
610 if (map) {
611 Type const map_type = NodeProperties::GetType(map);
612 if (map_type.IsHeapConstant() &&
613 target.contains(map_type.AsHeapConstant()->Ref().AsMap())) {
614 current->MarkForDeletion();
615 return true;
616 }
617 } else {
618 // If the variable has no value, we have not reached the fixed-point
619 // yet.
620 return true;
621 }
622 }
623 return false;
624}
625} // namespace
626
627void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
628 JSGraph* jsgraph) {
629 switch (op->opcode()) {
630 case IrOpcode::kAllocate: {
631 NumberMatcher size(current->ValueInput(0));
632 if (!size.HasResolvedValue()) break;
633 int size_int = static_cast<int>(size.ResolvedValue());
634 if (size_int != size.ResolvedValue()) break;
635 if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
636 // Initialize with dead nodes as a sentinel for uninitialized memory.
637 for (Variable field : *vobject) {
638 current->Set(field, jsgraph->Dead());
639 }
640 }
641 break;
642 }
643 case IrOpcode::kFinishRegion:
644 current->SetVirtualObject(current->ValueInput(0));
645 break;
646 case IrOpcode::kStoreField: {
647 Node* object = current->ValueInput(0);
648 Node* value = current->ValueInput(1);
649 const VirtualObject* vobject = current->GetVirtualObject(object);
650 Variable var;
651 if (value->opcode() == IrOpcode::kTrustedHeapConstant) {
652 // TODO(dmercadier): enable escaping objects containing
653 // TrustedHeapConstants. This is currently disabled because it leads to
654 // bugs when Trusted HeapConstant and regular HeapConstant flow into the
655 // same Phi, which can then be marked as Compressed, messing up the
656 // tagging of the Trusted HeapConstant.
657 current->SetEscaped(object);
658 current->SetEscaped(value);
659 break;
660 }
661 // BoundedSize fields cannot currently be materialized by the deoptimizer,
662 // so we must not dematerialze them.
663 if (vobject && !vobject->HasEscaped() &&
664 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) &&
665 !FieldAccessOf(op).is_bounded_size_access) {
666 current->Set(var, value);
667 current->MarkForDeletion();
668 } else {
669 current->SetEscaped(object);
670 current->SetEscaped(value);
671 }
672 break;
673 }
674 case IrOpcode::kStoreElement: {
675 Node* object = current->ValueInput(0);
676 Node* index = current->ValueInput(1);
677 Node* value = current->ValueInput(2);
678 const VirtualObject* vobject = current->GetVirtualObject(object);
679 int offset;
680 Variable var;
681 if (vobject && !vobject->HasEscaped() &&
682 OffsetOfElementsAccess(op, index).To(&offset) &&
683 vobject->FieldAt(offset).To(&var)) {
684 current->Set(var, value);
685 current->MarkForDeletion();
686 } else {
687 current->SetEscaped(value);
688 current->SetEscaped(object);
689 }
690 break;
691 }
692 case IrOpcode::kLoadField: {
693 Node* object = current->ValueInput(0);
694 const VirtualObject* vobject = current->GetVirtualObject(object);
695 Variable var;
696 Node* value;
697 if (vobject && !vobject->HasEscaped() &&
698 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) &&
699 current->Get(var).To(&value)) {
700 current->SetReplacement(value);
701 } else {
702 current->SetEscaped(object);
703 }
704 break;
705 }
706 case IrOpcode::kLoadElement: {
707 Node* object = current->ValueInput(0);
708 Node* index = current->ValueInput(1);
709 const VirtualObject* vobject = current->GetVirtualObject(object);
710 int offset;
711 Variable var;
712 Node* value;
713 if (vobject && !vobject->HasEscaped() &&
714 OffsetOfElementsAccess(op, index).To(&offset) &&
715 vobject->FieldAt(offset).To(&var) && current->Get(var).To(&value)) {
716 current->SetReplacement(value);
717 break;
718 } else if (vobject && !vobject->HasEscaped()) {
719 // Compute the known length (aka the number of elements) of {object}
720 // based on the virtual object information.
721 ElementAccess const& access = ElementAccessOf(op);
722 int const length =
723 (vobject->size() - access.header_size) >>
724 ElementSizeLog2Of(access.machine_type.representation());
725 Variable var0, var1;
726 Node* value0;
727 Node* value1;
728 if (length == 1 &&
729 vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var) &&
730 current->Get(var).To(&value) &&
731 (value == nullptr ||
732 NodeProperties::GetType(value).Is(access.type))) {
733 // The {object} has no elements, and we know that the LoadElement
734 // {index} must be within bounds, thus it must always yield this
735 // one element of {object}.
736 current->SetReplacement(value);
737 break;
738 } else if (length == 2 &&
739 vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var0) &&
740 current->Get(var0).To(&value0) &&
741 (value0 == nullptr ||
742 NodeProperties::GetType(value0).Is(access.type)) &&
743 vobject->FieldAt(OffsetOfElementAt(access, 1)).To(&var1) &&
744 current->Get(var1).To(&value1) &&
745 (value1 == nullptr ||
746 NodeProperties::GetType(value1).Is(access.type))) {
747 if (value0 && value1) {
748 // The {object} has exactly two elements, so the LoadElement
749 // must return one of them (i.e. either the element at index
750 // 0 or the one at index 1). So we can turn the LoadElement
751 // into a Select operation instead (still allowing the {object}
752 // to be scalar replaced). We must however mark the elements
753 // of the {object} itself as escaping.
754 Node* check =
755 jsgraph->graph()->NewNode(jsgraph->simplified()->NumberEqual(),
756 index, jsgraph->ZeroConstant());
757 NodeProperties::SetType(check, Type::Boolean());
758 Node* select = jsgraph->graph()->NewNode(
759 jsgraph->common()->Select(access.machine_type.representation()),
760 check, value0, value1);
761 NodeProperties::SetType(select, access.type);
762 current->SetReplacement(select);
763 current->SetEscaped(value0);
764 current->SetEscaped(value1);
765 break;
766 } else {
767 // If the variables have no values, we have
768 // not reached the fixed-point yet.
769 break;
770 }
771 }
772 }
773 current->SetEscaped(object);
774 break;
775 }
776 case IrOpcode::kTypeGuard: {
777 current->SetVirtualObject(current->ValueInput(0));
778 break;
779 }
780 case IrOpcode::kReferenceEqual: {
781 Node* left = current->ValueInput(0);
782 Node* right = current->ValueInput(1);
783 const VirtualObject* left_object = current->GetVirtualObject(left);
784 const VirtualObject* right_object = current->GetVirtualObject(right);
785 Node* replacement = nullptr;
786 if (left_object && !left_object->HasEscaped()) {
787 if (right_object && !right_object->HasEscaped() &&
788 left_object->id() == right_object->id()) {
789 replacement = jsgraph->TrueConstant();
790 } else {
791 replacement = jsgraph->FalseConstant();
792 }
793 } else if (right_object && !right_object->HasEscaped()) {
794 replacement = jsgraph->FalseConstant();
795 }
796 // TODO(turbofan) This is a workaround for uninhabited types. If we
797 // replaced a value of uninhabited type with a constant, we would
798 // widen the type of the node. This could produce inconsistent
799 // types (which might confuse representation selection). We get
800 // around this by refusing to constant-fold and escape-analyze
801 // if the type is not inhabited.
802 if (replacement && !NodeProperties::GetType(left).IsNone() &&
803 !NodeProperties::GetType(right).IsNone()) {
804 current->SetReplacement(replacement);
805 break;
806 }
807 current->SetEscaped(left);
808 current->SetEscaped(right);
809 break;
810 }
811 case IrOpcode::kCheckMaps: {
812 CheckMapsParameters params = CheckMapsParametersOf(op);
813 Node* checked = current->ValueInput(0);
814 if (CheckMapsHelper(current, checked, params.maps())) {
815 break;
816 }
817 current->SetEscaped(checked);
818 break;
819 }
820 case IrOpcode::kTransitionElementsKindOrCheckMap: {
821 ElementsTransitionWithMultipleSources params =
823 Node* checked = current->ValueInput(0);
824 if (CheckMapsHelper(current, checked, ZoneRefSet<Map>(params.target()))) {
825 break;
826 }
827 current->SetEscaped(checked);
828 break;
829 }
830 case IrOpcode::kCompareMaps: {
831 Node* object = current->ValueInput(0);
832 const VirtualObject* vobject = current->GetVirtualObject(object);
833 Variable map_field;
834 Node* object_map;
835 if (vobject && !vobject->HasEscaped() &&
836 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
837 current->Get(map_field).To(&object_map)) {
838 if (object_map) {
839 current->SetReplacement(LowerCompareMapsWithoutLoad(
840 object_map, CompareMapsParametersOf(op), jsgraph));
841 break;
842 } else {
843 // If the variable has no value, we have not reached the fixed-point
844 // yet.
845 break;
846 }
847 }
848 current->SetEscaped(object);
849 break;
850 }
851 case IrOpcode::kCheckHeapObject: {
852 Node* checked = current->ValueInput(0);
853 switch (checked->opcode()) {
854 case IrOpcode::kAllocate:
855 case IrOpcode::kFinishRegion:
856 case IrOpcode::kHeapConstant:
857 current->SetReplacement(checked);
858 break;
859 default:
860 current->SetEscaped(checked);
861 break;
862 }
863 break;
864 }
865 case IrOpcode::kMapGuard: {
866 Node* object = current->ValueInput(0);
867 const VirtualObject* vobject = current->GetVirtualObject(object);
868 if (vobject && !vobject->HasEscaped()) {
869 current->MarkForDeletion();
870 }
871 break;
872 }
873 case IrOpcode::kStateValues:
874 // We visit StateValue nodes through their correpsonding FrameState node,
875 // so we need to make sure we revisit the FrameState.
876 current->SetValueChanged();
877 break;
878 case IrOpcode::kFrameState: {
879 // We mark the receiver as escaping due to the non-standard `.getThis`
880 // API.
881 FrameState frame_state{current->CurrentNode()};
882 FrameStateType type = frame_state.frame_state_info().type();
883 // This needs to be kept in sync with the frame types supported in
884 // `OptimizedJSFrame::Summarize`.
885 if (type != FrameStateType::kUnoptimizedFunction &&
886 type != FrameStateType::kJavaScriptBuiltinContinuation &&
887 type != FrameStateType::kJavaScriptBuiltinContinuationWithCatch) {
888 break;
889 }
890 if (!current->FrameStateMightLazyDeopt(current->CurrentNode())) {
891 // Only lazy deopt frame states are used to generate stack traces.
892 break;
893 }
894 StateValuesAccess::iterator it =
895 StateValuesAccess(frame_state.parameters()).begin();
896 if (!it.done()) {
897 if (Node* receiver = it.node()) {
898 current->SetEscaped(receiver);
899 }
900 current->SetEscaped(frame_state.function());
901 }
902 break;
903 }
904 default: {
905 // For unknown nodes, treat all value inputs as escaping.
906 int value_input_count = op->ValueInputCount();
907 for (int i = 0; i < value_input_count; ++i) {
908 Node* input = current->ValueInput(i);
909 current->SetEscaped(input);
910 }
911 if (OperatorProperties::HasContextInput(op)) {
912 current->SetEscaped(current->ContextInput());
913 }
914 break;
915 }
916 }
917}
918
919} // namespace
920
921void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
922 const Operator* op = node->op();
923 TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
924
925 EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
926 ReduceNode(op, &current, jsgraph());
927}
928
930 Zone* zone)
932 jsgraph->graph(),
933 [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
934 tick_counter, zone),
935 tracker_(zone->New<EscapeAnalysisTracker>(jsgraph, this, zone)),
936 jsgraph_(jsgraph) {}
937
939 Node* replacement = tracker_->GetReplacementOf(node);
940 // Replacements cannot have replacements. This is important to ensure
941 // re-visitation: If a replacement is replaced, then all nodes accessing
942 // the replacement have to be updated.
943 if (replacement) DCHECK_NULL(tracker_->GetReplacementOf(replacement));
944 return replacement;
945}
946
948 int field, Node* effect) {
949 return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
950 effect);
951}
952
956
958 int size)
959 : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
961 TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
962 int num_fields = size / kTaggedSize;
963 fields_.reserve(num_fields);
964 for (int i = 0; i < num_fields; ++i) {
965 fields_.push_back(var_states->NewVariable());
966 }
967}
968
969#undef TRACE
970
971} // namespace compiler
972} // namespace internal
973} // namespace v8
JSGraph * jsgraph
#define T
void reserve(size_t new_cap)
void push_back(const T &value)
T * New(Args &&... args)
Definition zone.h:114
const Operator * Phi(MachineRepresentation representation, int value_input_count)
std::function< void(Node *, Reduction *)> reduce_
EffectGraphReducer(TFGraph *graph, std::function< void(Node *, Reduction *)> reduce, TickCounter *tick_counter, Zone *zone)
Node * GetVirtualObjectField(const VirtualObject *vobject, int field, Node *effect)
const VirtualObject * GetVirtualObject(Node *node)
Scope(EffectGraphReducer *reducer, EscapeAnalysisTracker *tracker, Node *node, Reduction *reduction)
SparseSidetable< VirtualObject * > virtual_objects_
EscapeAnalysisTracker & operator=(const EscapeAnalysisTracker &)=delete
ZoneUnorderedMap< Node *, bool > framestate_might_lazy_deopt_
EscapeAnalysisTracker(const EscapeAnalysisTracker &)=delete
EscapeAnalysisTracker(JSGraph *jsgraph, EffectGraphReducer *reducer, Zone *zone)
void Reduce(Node *node, Reduction *reduction)
EscapeAnalysis(JSGraph *jsgraph, TickCounter *tick_counter, Zone *zone)
CommonOperatorBuilder * common() const
static Node * GetEffectInput(Node *node, int index=0)
static Node * GetValueInput(Node *node, int index)
static void SetType(Node *node, Type type)
static void ReplaceValueInput(Node *node, Node *value, int index)
static Node * GetControlInput(Node *node, int index=0)
constexpr IrOpcode::Value opcode() const
Definition node.h:52
const Operator * op() const
Definition node.h:50
Node * InputAt(int index) const
Definition node.h:70
NodeId id() const
Definition node.h:57
const char * mnemonic() const
Definition operator.h:79
constexpr Opcode opcode() const
Definition operator.h:75
const Value & Get(const Key &key) const
ReduceScope(Node *node, Reduction *reduction)
ZoneUnorderedMap< NodeId, T > map_
void Set(const Node *node, T value)
const T & Get(const Node *node) const
SparseSidetable(Zone *zone, T def_value=T())
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
Scope(VariableTracker *tracker, Node *node, Reduction *reduction)
VariableTracker & operator=(const VariableTracker &)=delete
VariableTracker(const VariableTracker &)=delete
Node * Get(Variable var, Node *effect)
VariableTracker(JSGraph *graph, EffectGraphReducer *reducer, Zone *zone)
Maybe< Variable > FieldAt(int offset) const
VirtualObject(VariableTracker *var_states, Id id, int size)
Zone * zone_
base::OwnedVector< uint8_t > buffer_
Definition assembler.cc:111
enum v8::internal::@1270::DeoptimizableCodeIterator::@67 state_
LineAndColumn current
int32_t offset
TNode< Object > receiver
std::map< const std::string, const std::string > map
Node * node
ZoneVector< RpoNumber > & result
#define TRACE(...)
STL namespace.
SnapshotTable< OpIndex, VariableData >::Key Variable
Definition operations.h:82
CheckMapsParameters const & CheckMapsParametersOf(Operator const *op)
const FieldAccess & FieldAccessOf(const Operator *op)
const ElementAccess & ElementAccessOf(const Operator *op)
FloatMatcher< double, IrOpcode::kNumberConstant > NumberMatcher
ZoneRefSet< Map > const & CompareMapsParametersOf(Operator const *op)
ElementsTransitionWithMultipleSources const & ElementsTransitionWithMultipleSourcesOf(const Operator *op)
constexpr int kTaggedSize
Definition globals.h:542
bool IsNone(Tagged< FieldType > obj)
Definition field-type.h:50
V8_EXPORT_PRIVATE constexpr int ElementSizeLog2Of(MachineRepresentation)
Maybe< T > Just(const T &t)
Definition v8-maybe.h:117
SourcePositionTable *const table_
Definition pipeline.cc:227
Reducer *const reducer_
Definition pipeline.cc:226
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define V8_NODISCARD
Definition v8config.h:693
std::unique_ptr< ValueMirror > value
TFGraph * graph_