v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
maglev-interpreter-frame-state.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_MAGLEV_MAGLEV_INTERPRETER_FRAME_STATE_H_
6#define V8_MAGLEV_MAGLEV_INTERPRETER_FRAME_STATE_H_
7
8#include <optional>
9
16#ifdef V8_ENABLE_MAGLEV
18#endif
20#include "src/zone/zone.h"
21
22namespace v8 {
23namespace internal {
24namespace maglev {
25
26class BasicBlock;
27class Graph;
28class MaglevGraphBuilder;
29class MergePointInterpreterFrameState;
30
31// Destructively intersects the right map into the left map, such that the
32// left map is mutated to become the result of the intersection. Values that
33// are in both maps are passed to the merging function to be merged with each
34// other -- again, the LHS here is expected to be mutated.
35template <typename Key, typename Value,
36 typename MergeFunc = std::equal_to<Value>>
38 const ZoneMap<Key, Value>& rhs_map,
39 MergeFunc&& func = MergeFunc()) {
40 // Walk the two maps in lock step. This relies on the fact that ZoneMaps are
41 // sorted.
42 typename ZoneMap<Key, Value>::iterator lhs_it = lhs_map.begin();
43 typename ZoneMap<Key, Value>::const_iterator rhs_it = rhs_map.begin();
44 while (lhs_it != lhs_map.end() && rhs_it != rhs_map.end()) {
45 if (lhs_it->first < rhs_it->first) {
46 // Remove from LHS elements that are not in RHS.
47 lhs_it = lhs_map.erase(lhs_it);
48 } else if (rhs_it->first < lhs_it->first) {
49 // Skip over elements that are only in RHS.
50 ++rhs_it;
51 } else {
52 // Apply the merge function to the values of the two iterators. If the
53 // function returns false, remove the value.
54 bool keep_value = func(lhs_it->second, rhs_it->second);
55 if (keep_value) {
56 ++lhs_it;
57 } else {
58 lhs_it = lhs_map.erase(lhs_it);
59 }
60 ++rhs_it;
61 }
62 }
63 // If we haven't reached the end of LHS by now, then we have reached the end
64 // of RHS, and the remaining items are therefore not in RHS. Remove them.
65 if (lhs_it != lhs_map.end()) {
66 lhs_map.erase(lhs_it, lhs_map.end());
67 }
68}
69
71
72class NodeInfo {
73 public:
74 NodeInfo() = default;
75
77 const NodeInfo& val;
78 };
80 type_ = other.val.type_;
81 alternative_ = other.val.alternative_;
82 if (other.val.possible_maps_are_known_ && !other.val.any_map_is_unstable_) {
83 possible_maps_ = other.val.possible_maps_;
85 }
86 }
87
88 NodeType type() const { return type_; }
90 return type_ = maglev::CombineType(type_, other);
91 }
93 return type_ = maglev::IntersectType(type_, other);
94 }
95
96 // Optional alternative nodes with the equivalent value but a different
97 // representation.
99 public:
100 AlternativeNodes() { store_.fill(nullptr); }
101
102#define ALTERNATIVES(V) \
103 V(tagged, Tagged) \
104 V(int32, Int32) \
105 V(truncated_int32_to_number, TruncatedInt32ToNumber) \
106 V(float64, Float64) \
107 V(checked_value, CheckedValue)
108
109 enum Kind {
110#define KIND(name, Name) k##Name,
112#undef KIND
114 };
115
116#define API(name, Name) \
117 ValueNode* name() const { return store_[Kind::k##Name]; } \
118 ValueNode* set_##name(ValueNode* val) { \
119 return store_[Kind::k##Name] = val; \
120 } \
121 template <typename Function> \
122 ValueNode* get_or_set_##name(Function create) { \
123 if (store_[Kind::k##Name]) return store_[Kind::k##Name]; \
124 return store_[Kind::k##Name] = create(); \
125 }
127#undef API
128#undef ALTERNATIVES
129
130 bool has_none() const { return store_ == AlternativeNodes().store_; }
131
132 void MergeWith(const AlternativeNodes& other) {
133 for (size_t i = 0; i < Kind::kNumberOfAlternatives; ++i) {
134 if (store_[i] && store_[i] != other.store_[i]) {
135 store_[i] = nullptr;
136 }
137 }
138 }
139
140 private:
141 // TODO(leszeks): At least one of these is redundant for every node,
142 // consider a more compressed form or even linked list.
143 std::array<ValueNode*, Kind::kNumberOfAlternatives> store_;
144
145 // Prevent callers from copying these when they try to update the
146 // alternatives by making these private.
149 friend class NodeInfo;
150 };
151
152 const AlternativeNodes& alternative() const { return alternative_; }
154
155 bool no_info_available() const {
156 return type_ == NodeType::kUnknown && alternative_.has_none() &&
158 }
159
160 bool is_smi() const { return NodeTypeIsSmi(type_); }
161 bool is_any_heap_object() const { return NodeTypeIsAnyHeapObject(type_); }
162 bool is_string() const { return NodeTypeIsString(type_); }
164 return NodeTypeIsInternalizedString(type_);
165 }
166 bool is_symbol() const { return NodeTypeIsSymbol(type_); }
167
168 // Mutate this node info by merging in another node info, with the result
169 // being a node info that is the subset of information valid in both inputs.
170 void MergeWith(const NodeInfo& other, Zone* zone,
171 bool& any_merged_map_is_unstable) {
172 IntersectType(other.type_);
173 alternative_.MergeWith(other.alternative_);
175 if (other.possible_maps_are_known_) {
176 // Map sets are the set of _possible_ maps, so on a merge we need to
177 // _union_ them together (i.e. intersect the set of impossible maps).
178 // Remember whether _any_ of these merges observed unstable maps.
179 possible_maps_.Union(other.possible_maps_, zone);
180 } else {
183 }
184 }
185
187 (any_map_is_unstable_ || other.any_map_is_unstable_);
188 any_merged_map_is_unstable =
189 any_merged_map_is_unstable || any_map_is_unstable_;
190 }
191
193
195 if (!any_map_is_unstable_) return;
198 any_map_is_unstable_ = false;
199 }
200
201 template <typename Function>
203 if (!any_map_is_unstable_) return;
204 for (auto map : possible_maps_) {
205 if (condition(map)) {
207 return;
208 }
209 }
210 }
211
213
215 // If !possible_maps_are_known_ then every map is possible and using the
216 // (probably empty) possible_maps_ set is definetly wrong.
218 return possible_maps_;
219 }
220
222 bool any_map_is_unstable, NodeType possible_type,
227#ifdef DEBUG
228 if (possible_maps.size()) {
229 NodeType expected = StaticTypeForMap(*possible_maps.begin(), broker);
230 for (auto map : possible_maps) {
231 expected =
233 }
234 // Ensure the claimed type is not narrower than what can be learned from
235 // the map checks.
236 DCHECK(NodeTypeIs(expected, possible_type));
237 } else {
238 DCHECK_EQ(possible_type, NodeType::kUnknown);
239 }
240#endif
241 CombineType(possible_type);
242 }
243
245
246 private:
247 NodeType type_ = NodeType::kUnknown;
248
250
251 // Maps for a node. Sets of maps that only contain stable maps are valid
252 // across side-effecting calls, as long as we install a dependency, otherwise
253 // they are cleared on side-effects.
254 // TODO(v8:7700): Investigate a better data structure to use than ZoneMap.
257
259};
260
261struct LoopEffects;
262
264 // Permanently valid if checked in a dominator.
266
267 // Copy constructor is defaulted but private so that we explicitly call the
268 // Clone method.
272
274 return zone->New<KnownNodeAspects>(*this);
275 }
276
277 // Loop headers can safely clone the node types, since those won't be
278 // invalidated in the loop body, and similarly stable maps will have
279 // dependencies installed. Unstable maps however might be invalidated by
280 // calls, and we don't know about these until it's too late.
281 KnownNodeAspects* CloneForLoopHeader(bool optimistic_initial_state,
282 LoopEffects* loop_effects,
283 Zone* zone) const;
284
286
288 // A side effect could change existing objects' maps. For stable maps we
289 // know this hasn't happened (because we added a dependency on the maps
290 // staying stable and therefore not possible to transition away from), but
291 // we can no longer assume that objects with unstable maps still have the
292 // same map. Unstable maps can also transition to stable ones, so we have to
293 // clear _all_ maps for a node if it had _any_ unstable map.
295 for (auto& it : node_infos) {
296 it.second.ClearUnstableMaps();
297 }
299 }
300
301 template <typename Function>
304 for (auto& it : node_infos) {
305 it.second.ClearUnstableMapsIfAny(condition);
306 }
307 }
308
310
311 NodeInfos::iterator FindInfo(ValueNode* node) {
312 return node_infos.find(node);
313 }
314 NodeInfos::const_iterator FindInfo(ValueNode* node) const {
315 return node_infos.find(node);
316 }
317 bool IsValid(NodeInfos::iterator& it) { return it != node_infos.end(); }
318 bool IsValid(NodeInfos::const_iterator& it) const {
319 return it != node_infos.end();
320 }
321
322 const NodeInfo* TryGetInfoFor(ValueNode* node) const {
323 return const_cast<KnownNodeAspects*>(this)->TryGetInfoFor(node);
324 }
326 auto info_it = FindInfo(node);
327 if (!IsValid(info_it)) return nullptr;
328 return &info_it->second;
329 }
331 LocalIsolate* isolate) {
332 auto info_it = FindInfo(node);
333 if (IsValid(info_it)) return &info_it->second;
334 auto res = &node_infos.emplace(node, NodeInfo()).first->second;
335 res->CombineType(StaticTypeForNode(broker, isolate, node));
336 return res;
337 }
338
340 if (auto info = TryGetInfoFor(node)) {
341 return info->type();
342 }
343 return NodeType::kUnknown;
344 }
345
346 void Merge(const KnownNodeAspects& other, Zone* zone);
347
348 // If IsCompatibleWithLoopHeader(other) returns true, it means that
349 // Merge(other) would not remove any information from `this`.
350 bool IsCompatibleWithLoopHeader(const KnownNodeAspects& other) const;
351
352 // TODO(leszeks): Store these more efficiently than with std::map -- in
353 // particular, clear out entries that are no longer reachable, perhaps also
354 // allow lookup by interpreter register rather than by node pointer.
355
357
358 // Cached property loads.
359
360 // Represents a key into the cache. This is either a NameRef, or an enum
361 // value.
363 public:
364 enum Type {
365 // kName must be zero so that pointers are unaffected.
366 kName = 0,
369 // TODO(leszeks): We could probably share kStringLength with
370 // kTypedArrayLength if needed.
372 };
373 static constexpr int kTypeMask = 0x3;
374 static_assert((kName & ~kTypeMask) == 0);
375 static_assert((kElements & ~kTypeMask) == 0);
376 static_assert((kTypedArrayLength & ~kTypeMask) == 0);
377 static_assert((kStringLength & ~kTypeMask) == 0);
378
382
386
390
391 // Allow implicit conversion from NameRef to key, so that callers in the
392 // common path can use a NameRef directly.
393 // NOLINTNEXTLINE
395 : data_(reinterpret_cast<Address>(ref.data())) {
397 }
398
399 bool operator==(const LoadedPropertyMapKey& other) const {
400 return data_ == other.data_;
401 }
402 bool operator<(const LoadedPropertyMapKey& other) const {
403 return data_ < other.data_;
404 }
405
407 DCHECK_EQ(type(), kName);
408 return compiler::NameRef(reinterpret_cast<compiler::ObjectData*>(data_),
409 false);
410 }
411
412 Type type() { return static_cast<Type>(data_ & kTypeMask); }
413
414 private:
415 explicit LoadedPropertyMapKey(Type type) : data_(type) {
416 DCHECK_NE(type, kName);
417 }
418
420 };
421 // Maps key->object->value, so that stores to a key can invalidate all loads
422 // of that key (in case the objects are aliasing).
425
426 // Valid across side-effecting calls, as long as we install a dependency.
428 // Flushed after side-effecting calls.
430
431 // Unconditionally valid across side-effecting calls.
465 // Flushed after side-effecting calls.
466 using LoadedContextSlotsKey = std::tuple<ValueNode*, int>;
469
475 uint32_t effect_epoch() const { return effect_epoch_; }
476 static constexpr uint32_t kEffectEpochForPureInstructions =
477 std::numeric_limits<uint32_t>::max();
478 static constexpr uint32_t kEffectEpochOverflow =
483
494
495 private:
499
501
503 const KnownNodeAspects&);
505 // Copy constructor for CloneForLoopHeader
507 bool&, LoopEffects*&, Zone*&>(
508 const KnownNodeAspects&, bool&, maglev::LoopEffects*&, Zone*&);
509 KnownNodeAspects(const KnownNodeAspects& other, bool optimistic_initial_state,
510 LoopEffects* loop_effects, Zone* zone);
511};
512
514 public:
523
526 info.zone()->New<KnownNodeAspects>(info.zone()),
528
529 inline void CopyFrom(const MaglevCompilationUnit& info,
531 bool preserve_known_node_aspects, Zone* zone);
532
534 // Conversions should be stored in known_node_aspects/NodeInfo.
535 DCHECK(!value->properties().is_conversion());
537 }
541
543 DCHECK_IMPLIES(reg.is_parameter(),
547 reg.ToParameterIndex() >= 0);
548 // Conversions should be stored in known_node_aspects/NodeInfo.
549 DCHECK(!value->properties().is_conversion());
550 frame_[reg] = value;
551 }
560
561 const RegisterFrameArray<ValueNode*>& frame() const { return frame_; }
562
565 return known_node_aspects_;
566 }
567
572
574
575 void add_object(VirtualObject* vobject) { virtual_objects_.Add(vobject); }
580
581 private:
585};
586
588 public:
595
598 const InterpreterFrameState& state)
600 virtual_objects_ = state.virtual_objects();
601 ForEachValue(info, [&](ValueNode*& entry, interpreter::Register reg) {
602 entry = state.get(reg);
603 });
604 }
605
609 delete;
611 delete;
612
613 template <typename Function>
614 void ForEachParameter(const MaglevCompilationUnit& info, Function&& f) const {
615 for (int i = 0; i < info.parameter_count(); i++) {
618 }
619 }
620
621 template <typename Function>
623 for (int i = 0; i < info.parameter_count(); i++) {
626 }
627 }
628
629 template <typename Function>
630 void ForEachLocal(const MaglevCompilationUnit& info, Function&& f) const {
631 int live_reg = 0;
632 for (int register_index : *liveness_) {
634 f(live_registers_and_accumulator_[info.parameter_count() +
635 context_register_count_ + live_reg++],
636 reg);
637 }
638 }
639
640 template <typename Function>
642 int live_reg = 0;
643 for (int register_index : *liveness_) {
645 f(live_registers_and_accumulator_[info.parameter_count() +
646 context_register_count_ + live_reg++],
647 reg);
648 }
649 }
650
651 template <typename Function>
653 ForEachParameter(info, f);
655 ForEachLocal(info, f);
656 }
657
658 template <typename Function>
659 void ForEachRegister(const MaglevCompilationUnit& info, Function&& f) const {
660 ForEachParameter(info, f);
662 ForEachLocal(info, f);
663 }
664
665 template <typename Function>
667 ForEachRegister(info, f);
668 if (liveness_->AccumulatorIsLive()) {
670 }
671 }
672
673 template <typename Function>
674 void ForEachValue(const MaglevCompilationUnit& info, Function&& f) const {
675 ForEachRegister(info, f);
676 if (liveness_->AccumulatorIsLive()) {
678 }
679 }
680
682
684 DCHECK(liveness_->AccumulatorIsLive());
685 return live_registers_and_accumulator_[size(info) - 1];
686 }
688 DCHECK(liveness_->AccumulatorIsLive());
689 return live_registers_and_accumulator_[size(info) - 1];
690 }
691
693 return live_registers_and_accumulator_[info.parameter_count()];
694 }
696 return live_registers_and_accumulator_[info.parameter_count()];
697 }
698
700 const MaglevCompilationUnit& info) const {
701 DCHECK(reg.is_valid());
703 return context(info);
704 }
706 return accumulator(info);
707 }
708 if (reg.is_parameter()) {
709 DCHECK_LT(reg.ToParameterIndex(), info.parameter_count());
710 return live_registers_and_accumulator_[reg.ToParameterIndex()];
711 }
712 int live_reg = 0;
713 // TODO(victorgomes): See if we can do better than a linear search here.
714 for (int register_index : *liveness_) {
715 if (reg == interpreter::Register(register_index)) {
716 return live_registers_and_accumulator_[info.parameter_count() +
718 live_reg];
719 }
720 live_reg++;
721 }
722 // No value in this frame state.
723 return nullptr;
724 }
725
726 size_t size(const MaglevCompilationUnit& info) const {
727 return SizeFor(info, liveness_);
728 }
729
733 virtual_objects_ = vos;
734 }
735
736 private:
737 static size_t SizeFor(const MaglevCompilationUnit& info,
739 return info.parameter_count() + context_register_count_ +
740 liveness->live_value_count();
741 }
742
743 // TODO(leszeks): Only include the context register if there are any
744 // Push/PopContext calls.
745 static const int context_register_count_ = 1;
749};
750
752#ifdef V8_ENABLE_MAGLEV
753
754 public:
755 bool is_initialized() const { return values_[0].GetPayload().is_initialized; }
756
757 template <typename Function>
758 void ForEachGeneralRegister(Function&& f) {
759 RegisterState* current_value = &values_[0];
761 f(reg, *current_value);
762 ++current_value;
763 }
764 }
765
766 template <typename Function>
767 void ForEachDoubleRegister(Function&& f) {
768 RegisterState* current_value = &double_values_[0];
769 for (DoubleRegister reg :
771 f(reg, *current_value);
772 ++current_value;
773 }
774 }
775
776 private:
778 RegisterState double_values_[kAllocatableDoubleRegisterCount] = {{}};
779#endif // V8_ENABLE_MAGLEV
780};
781
783 public:
790
792 const MaglevCompilationUnit& info, const InterpreterFrameState& state,
793 int merge_offset, int predecessor_count, BasicBlock* predecessor,
794 const compiler::BytecodeLivenessState* liveness);
795
797 const InterpreterFrameState& start_state,
798 const MaglevCompilationUnit& info, int merge_offset,
800 const compiler::LoopInfo* loop_info, bool has_been_peeled = false);
801
804 const compiler::BytecodeLivenessState* liveness, int handler_offset,
805 bool was_used, interpreter::Register context_register, Graph* graph);
806
807 // Merges an unmerged framestate with a possibly merged framestate into |this|
808 // framestate.
809 void Merge(MaglevGraphBuilder* graph_builder, InterpreterFrameState& unmerged,
810 BasicBlock* predecessor);
811 void Merge(MaglevGraphBuilder* graph_builder,
812 MaglevCompilationUnit& compilation_unit,
813 InterpreterFrameState& unmerged, BasicBlock* predecessor);
814 void InitializeLoop(MaglevGraphBuilder* graph_builder,
815 MaglevCompilationUnit& compilation_unit,
816 InterpreterFrameState& unmerged, BasicBlock* predecessor,
817 bool optimistic_initial_state = false,
818 LoopEffects* loop_effects = nullptr);
819 void InitializeWithBasicBlock(BasicBlock* current_block);
820
821 // Merges an unmerged framestate with a possibly merged framestate into |this|
822 // framestate.
823 void MergeLoop(MaglevGraphBuilder* graph_builder,
824 InterpreterFrameState& loop_end_state,
825 BasicBlock* loop_end_block);
826 void MergeLoop(MaglevGraphBuilder* graph_builder,
827 MaglevCompilationUnit& compilation_unit,
828 InterpreterFrameState& loop_end_state,
829 BasicBlock* loop_end_block);
831 const LoopEffects* loop_effects();
832 // Merges a frame-state that might not be mergable, in which case we need to
833 // re-compile the loop again. Calls FinishBlock only if the merge succeeded.
834 bool TryMergeLoop(MaglevGraphBuilder* graph_builder,
835 InterpreterFrameState& loop_end_state,
836 const std::function<BasicBlock*()>& FinishBlock);
837
838 // Merges an unmerged framestate into a possibly merged framestate at the
839 // start of the target catchblock.
840 void MergeThrow(MaglevGraphBuilder* handler_builder,
841 const MaglevCompilationUnit* handler_unit,
842 const KnownNodeAspects& known_node_aspects,
843 const VirtualObjectList virtual_objects);
844
845 // Merges a dead framestate (e.g. one which has been early terminated with a
846 // deopt).
855
861
862 // Merges a dead loop framestate (e.g. one where the block containing the
863 // JumpLoop has been early terminated with a deopt).
864 void MergeDeadLoop(const MaglevCompilationUnit& compilation_unit) {
865 // This should be the last predecessor we try to merge.
868 MergeDead(compilation_unit);
869 // This means that this is no longer a loop.
871 }
872
873 // Clears dead loop state, after all merges have already be done.
881
882 void RemovePredecessorAt(int predecessor_id);
883
884 // Returns and clears the known node aspects on this state. Expects to only
885 // ever be called once, when starting a basic block with this state.
890
894
896 return frame_state_;
897 }
899
900 bool has_phi() const { return !phis_.is_empty(); }
901 Phi::List* phis() { return &phis_; }
902
903 uint32_t predecessor_count() const { return predecessor_count_; }
904
905 uint32_t predecessors_so_far() const { return predecessors_so_far_; }
906
917
921
923 VirtualObjectList from_ifs,
924 const char* prelude = nullptr) {
925 if (!v8_flags.trace_maglev_graph_building) return;
926 if (prelude) {
927 std::cout << prelude << std::endl;
928 }
929 from_ifs.Print(std::cout,
930 "* VOs (Interpreter Frame State): ", info.graph_labeller());
932 std::cout, "* VOs (Merge Frame State): ", info.graph_labeller());
933 }
934
935 bool is_loop() const {
937 }
938
943
948
949 bool is_unmerged_loop() const {
950 // If this is a loop and not all predecessors are set, then the loop isn't
951 // merged yet.
954 }
955
957 // If there is only one predecessor, and it's not set, then this is a loop
958 // merge with no forward control flow entering it.
959 return is_unmerged_loop() && !is_resumable_loop() &&
961 }
962
963 bool IsUnreachableByForwardEdge() const;
964
968 bool is_resumable_loop() const {
970 DCHECK_IMPLIES(res, is_loop());
971 return res;
972 }
976
977 int merge_offset() const { return merge_offset_; }
978
980
982 DCHECK(loop_metadata_.has_value());
984 return loop_metadata_->loop_info;
985 }
986 void ClearLoopInfo() { loop_metadata_->loop_info = nullptr; }
987 bool HasLoopInfo() const {
988 return loop_metadata_.has_value() && loop_metadata_->loop_info;
989 }
990
995
996 private:
1000
1001 // For each non-Phi value in the frame state, store its alternative
1002 // representations to avoid re-converting on Phi creation.
1004 public:
1006
1007 explicit Alternatives(const NodeInfo* node_info)
1008 : node_type_(node_info ? node_info->type() : NodeType::kUnknown),
1009 tagged_alternative_(node_info ? node_info->alternative().tagged()
1010 : nullptr) {}
1011
1012 NodeType node_type() const { return node_type_; }
1014
1015 private:
1016 Alternatives** next() { return &next_; }
1017
1018 // For now, Phis are tagged, so only store the tagged alternative.
1023 };
1025
1026 template <typename T, typename... Args>
1027 friend T* Zone::New(Args&&... args);
1028
1030 const MaglevCompilationUnit& info, int merge_offset,
1031 int predecessor_count, int predecessors_so_far, BasicBlock** predecessors,
1032 BasicBlockType type, const compiler::BytecodeLivenessState* liveness);
1033
1034 void MergePhis(MaglevGraphBuilder* builder,
1035 MaglevCompilationUnit& compilation_unit,
1036 InterpreterFrameState& unmerged, BasicBlock* predecessor,
1037 bool optimistic_loop_phis);
1039 MaglevCompilationUnit& compilation_unit,
1040 InterpreterFrameState& unmerged,
1041 BasicBlock* predecessor);
1042
1043 ValueNode* MergeValue(const MaglevGraphBuilder* graph_builder,
1045 const KnownNodeAspects& unmerged_aspects,
1046 ValueNode* merged, ValueNode* unmerged,
1047 Alternatives::List* per_predecessor_alternatives,
1048 bool optimistic_loop_phis = false);
1049
1050 void ReducePhiPredecessorCount(unsigned num);
1051
1053 MaglevCompilationUnit& compilation_unit,
1054 const VirtualObjectList unmerged_vos,
1055 const KnownNodeAspects& unmerged_aspects);
1056
1058 const VirtualObjectList unmerged_vos,
1059 const KnownNodeAspects& unmerged_aspects,
1060 VirtualObject* merged, VirtualObject* unmerged);
1061
1062 std::optional<ValueNode*> MergeVirtualObjectValue(
1063 const MaglevGraphBuilder* graph_builder,
1064 const KnownNodeAspects& unmerged_aspects, ValueNode* merged,
1065 ValueNode* unmerged);
1066
1067 void MergeLoopValue(MaglevGraphBuilder* graph_builder,
1069 const KnownNodeAspects& unmerged_aspects,
1070 ValueNode* merged, ValueNode* unmerged);
1071
1073
1081
1083
1086
1087 uint32_t bitfield_;
1088
1090
1092
1096
1097 union {
1098 // {pre_predecessor_alternatives_} is used to keep track of the alternatives
1099 // of Phi inputs. Once the block has been merged, it's not used anymore.
1101 // {backedge_deopt_frame_} is used to record the deopt frame for the
1102 // backedge, in case we want to insert a deopting conversion during phi
1103 // untagging. It is set when visiting the JumpLoop (and will only be set for
1104 // loop headers), when the header has already been merged and
1105 // {per_predecessor_alternatives_} is thus not used anymore.
1107 // For catch blocks, store the interpreter register holding the context.
1108 // This will be the same value for all incoming merges.
1110 };
1111
1116 std::optional<LoopMetadata> loop_metadata_ = std::nullopt;
1117};
1118
1120 explicit LoopEffects(int loop_header, Zone* zone)
1121 :
1122#ifdef DEBUG
1123 loop_header(loop_header),
1124#endif
1126 objects_written(zone),
1127 keys_cleared(zone),
1128 allocations(zone) {
1129 }
1130#ifdef DEBUG
1131 int loop_header;
1132#endif
1139 void Merge(const LoopEffects* other) {
1141 unstable_aspects_cleared = other->unstable_aspects_cleared;
1142 }
1144 may_have_aliasing_contexts = other->may_have_aliasing_contexts;
1145 }
1146 context_slot_written.insert(other->context_slot_written.begin(),
1147 other->context_slot_written.end());
1148 objects_written.insert(other->objects_written.begin(),
1149 other->objects_written.end());
1150 keys_cleared.insert(other->keys_cleared.begin(), other->keys_cleared.end());
1151 allocations.insert(other->allocations.begin(), other->allocations.end());
1152 }
1153};
1154
1157 bool preserve_known_node_aspects = false,
1158 Zone* zone = nullptr) {
1159 DCHECK_IMPLIES(preserve_known_node_aspects, zone);
1160 if (v8_flags.trace_maglev_graph_building) {
1161 std::cout << "- Copying frame state from merge @" << &state << std::endl;
1162 state.PrintVirtualObjects(info, virtual_objects());
1163 }
1165 state.frame_state().ForEachValue(
1166 info, [&](ValueNode* value, interpreter::Register reg) {
1167 frame_[reg] = value;
1168 });
1169 if (preserve_known_node_aspects) {
1170 known_node_aspects_ = state.CloneKnownNodeAspects(zone);
1171 } else {
1172 // Move "what we know" across without copying -- we can safely mutate it
1173 // now, as we won't be entering this merge point again.
1174 known_node_aspects_ = state.TakeKnownNodeAspects();
1175 }
1176 virtual_objects_ = state.frame_state().virtual_objects();
1177}
1178
1181 return as_interpreted().frame_state()->virtual_objects();
1182 }
1184 return parent()->GetVirtualObjects();
1185}
1186
1187} // namespace maglev
1188} // namespace internal
1189} // namespace v8
1190
1191#endif // V8_MAGLEV_MAGLEV_INTERPRETER_FRAME_STATE_H_
#define T
static constexpr T decode(U value)
Definition bit-field.h:66
static V8_NODISCARD constexpr U update(U previous, T value)
Definition bit-field.h:61
void Union(ZoneCompactSet< T > const &other, Zone *zone)
T * New(Args &&... args)
Definition zone.h:114
static constexpr Register virtual_accumulator()
static constexpr Register FromParameterIndex(int index)
static constexpr Register current_context()
static constexpr Register function_closure()
const compiler::BytecodeLivenessState *const liveness_
void ForEachRegister(const MaglevCompilationUnit &info, Function &&f)
void ForEachLocal(const MaglevCompilationUnit &info, Function &&f)
CompactInterpreterFrameState(const MaglevCompilationUnit &info, const compiler::BytecodeLivenessState *liveness, const InterpreterFrameState &state)
void ForEachValue(const MaglevCompilationUnit &info, Function &&f) const
void ForEachParameter(const MaglevCompilationUnit &info, Function &&f)
ValueNode *& context(const MaglevCompilationUnit &info) const
const compiler::BytecodeLivenessState * liveness() const
ValueNode *& accumulator(const MaglevCompilationUnit &info)
void ForEachParameter(const MaglevCompilationUnit &info, Function &&f) const
CompactInterpreterFrameState(CompactInterpreterFrameState &&)=delete
CompactInterpreterFrameState & operator=(CompactInterpreterFrameState &&)=delete
void ForEachValue(const MaglevCompilationUnit &info, Function &&f)
void ForEachLocal(const MaglevCompilationUnit &info, Function &&f) const
ValueNode *& context(const MaglevCompilationUnit &info)
void ForEachRegister(const MaglevCompilationUnit &info, Function &&f) const
static size_t SizeFor(const MaglevCompilationUnit &info, const compiler::BytecodeLivenessState *liveness)
CompactInterpreterFrameState & operator=(const CompactInterpreterFrameState &)=delete
ValueNode * GetValueOf(interpreter::Register reg, const MaglevCompilationUnit &info) const
CompactInterpreterFrameState(const MaglevCompilationUnit &info, const compiler::BytecodeLivenessState *liveness)
ValueNode *& accumulator(const MaglevCompilationUnit &info) const
size_t size(const MaglevCompilationUnit &info) const
CompactInterpreterFrameState(const CompactInterpreterFrameState &)=delete
const InterpretedDeoptFrame & as_interpreted() const
Definition maglev-ir.h:1428
void CopyFrom(const MaglevCompilationUnit &info, MergePointInterpreterFrameState &state, bool preserve_known_node_aspects, Zone *zone)
void set(interpreter::Register reg, ValueNode *value)
void set_known_node_aspects(KnownNodeAspects *known_node_aspects)
InterpreterFrameState(const MaglevCompilationUnit &info, KnownNodeAspects *known_node_aspects, VirtualObjectList virtual_objects)
void set_virtual_objects(const VirtualObjectList &virtual_objects)
ValueNode * get(interpreter::Register reg) const
const RegisterFrameArray< ValueNode * > & frame() const
static constexpr DoubleRegList GetAllocatableDoubleRegisters()
static constexpr RegList GetAllocatableRegisters()
void MergeDead(const MaglevCompilationUnit &compilation_unit, unsigned num=1)
ValueNode * NewLoopPhi(Zone *zone, interpreter::Register reg)
void PrintVirtualObjects(const MaglevCompilationUnit &info, VirtualObjectList from_ifs, const char *prelude=nullptr)
ValueNode * NewExceptionPhi(Zone *zone, interpreter::Register reg)
void Merge(MaglevGraphBuilder *graph_builder, InterpreterFrameState &unmerged, BasicBlock *predecessor)
void MergeLoop(MaglevGraphBuilder *graph_builder, InterpreterFrameState &loop_end_state, BasicBlock *loop_end_block)
void MergeVirtualObject(MaglevGraphBuilder *builder, const VirtualObjectList unmerged_vos, const KnownNodeAspects &unmerged_aspects, VirtualObject *merged, VirtualObject *unmerged)
static MergePointInterpreterFrameState * NewForLoop(const InterpreterFrameState &start_state, const MaglevCompilationUnit &info, int merge_offset, int predecessor_count, const compiler::BytecodeLivenessState *liveness, const compiler::LoopInfo *loop_info, bool has_been_peeled=false)
MergePointInterpreterFrameState(const MaglevCompilationUnit &info, int merge_offset, int predecessor_count, int predecessors_so_far, BasicBlock **predecessors, BasicBlockType type, const compiler::BytecodeLivenessState *liveness)
void MergeDeadLoop(const MaglevCompilationUnit &compilation_unit)
void MergeThrow(MaglevGraphBuilder *handler_builder, const MaglevCompilationUnit *handler_unit, const KnownNodeAspects &known_node_aspects, const VirtualObjectList virtual_objects)
bool TryMergeLoop(MaglevGraphBuilder *graph_builder, InterpreterFrameState &loop_end_state, const std::function< BasicBlock *()> &FinishBlock)
ValueNode * MergeValue(const MaglevGraphBuilder *graph_builder, interpreter::Register owner, const KnownNodeAspects &unmerged_aspects, ValueNode *merged, ValueNode *unmerged, Alternatives::List *per_predecessor_alternatives, bool optimistic_loop_phis=false)
static MergePointInterpreterFrameState * NewForCatchBlock(const MaglevCompilationUnit &unit, const compiler::BytecodeLivenessState *liveness, int handler_offset, bool was_used, interpreter::Register context_register, Graph *graph)
void MergeLoopValue(MaglevGraphBuilder *graph_builder, interpreter::Register owner, const KnownNodeAspects &unmerged_aspects, ValueNode *merged, ValueNode *unmerged)
std::optional< ValueNode * > MergeVirtualObjectValue(const MaglevGraphBuilder *graph_builder, const KnownNodeAspects &unmerged_aspects, ValueNode *merged, ValueNode *unmerged)
void MergePhis(MaglevGraphBuilder *builder, MaglevCompilationUnit &compilation_unit, InterpreterFrameState &unmerged, BasicBlock *predecessor, bool optimistic_loop_phis)
void MergeVirtualObjects(MaglevGraphBuilder *builder, MaglevCompilationUnit &compilation_unit, InterpreterFrameState &unmerged, BasicBlock *predecessor)
void InitializeLoop(MaglevGraphBuilder *graph_builder, MaglevCompilationUnit &compilation_unit, InterpreterFrameState &unmerged, BasicBlock *predecessor, bool optimistic_initial_state=false, LoopEffects *loop_effects=nullptr)
static Derived * New(Zone *zone, std::initializer_list< ValueNode * > inputs, Args &&... args)
Definition maglev-ir.h:1912
std::array< ValueNode *, Kind::kNumberOfAlternatives > store_
AlternativeNodes & operator=(const AlternativeNodes &) V8_NOEXCEPT=default
AlternativeNodes(const AlternativeNodes &) V8_NOEXCEPT=default
void SetPossibleMaps(const PossibleMaps &possible_maps, bool any_map_is_unstable, NodeType possible_type, compiler::JSHeapBroker *broker)
void ClearUnstableMapsIfAny(const Function &condition)
void MergeWith(const NodeInfo &other, Zone *zone, bool &any_merged_map_is_unstable)
const AlternativeNodes & alternative() const
NodeInfo(ClearUnstableMapsOnCopy other) V8_NOEXCEPT
void Add(VirtualObject *object)
Definition maglev-ir.h:5878
void Print(std::ostream &os, const char *prefix, MaglevGraphLabeller *labeller) const
Definition maglev-ir.cc:329
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
JSHeapBroker * broker
TNode< Context > context
ZoneVector< RpoNumber > & result
LiftoffRegister reg
#define API(name, Name)
#define KIND(name, Name)
#define ALTERNATIVES(V)
static constexpr int kAllocatableDoubleRegisterCount
constexpr NodeType CombineType(NodeType left, NodeType right)
Definition maglev-ir.h:661
constexpr bool NodeTypeIs(NodeType type, NodeType to_check)
Definition maglev-ir.h:669
constexpr NodeType IntersectType(NodeType left, NodeType right)
Definition maglev-ir.h:665
NodeType StaticTypeForNode(compiler::JSHeapBroker *broker, LocalIsolate *isolate, ValueNode *node)
static constexpr int kAllocatableGeneralRegisterCount
NodeType StaticTypeForMap(compiler::MapRef map, compiler::JSHeapBroker *broker)
Definition maglev-ir.h:680
void DestructivelyIntersect(ZoneMap< Key, Value > &lhs_map, const ZoneMap< Key, Value > &rhs_map, MergeFunc &&func=MergeFunc())
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
#define V8_NOEXCEPT
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
NodeInfos::const_iterator FindInfo(ValueNode *node) const
ZoneMap< uint32_t, AvailableExpression > available_expressions
KnownNodeAspects & operator=(const KnownNodeAspects &other)=delete
bool IsValid(NodeInfos::const_iterator &it) const
KnownNodeAspects(KnownNodeAspects &&other)=delete
ZoneMap< std::tuple< ValueNode *, int >, ValueNode * > loaded_context_constants
const NodeInfo * TryGetInfoFor(ValueNode *node) const
void Merge(const KnownNodeAspects &other, Zone *zone)
NodeInfo * GetOrCreateInfoFor(ValueNode *node, compiler::JSHeapBroker *broker, LocalIsolate *isolate)
NodeInfos::iterator FindInfo(ValueNode *node)
KnownNodeAspects(const KnownNodeAspects &other) V8_NOEXCEPT=default
bool IsCompatibleWithLoopHeader(const KnownNodeAspects &other) const
KnownNodeAspects * CloneForLoopHeader(bool optimistic_initial_state, LoopEffects *loop_effects, Zone *zone) const
KnownNodeAspects & operator=(KnownNodeAspects &&other)=delete
ZoneSet< KnownNodeAspects::LoadedContextSlotsKey > context_slot_written
ZoneSet< KnownNodeAspects::LoadedPropertyMapKey > keys_cleared