v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
graph-assembler.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <optional>
8
12#include "src/codegen/tnode.h"
13#include "src/common/globals.h"
19// For TNode types.
25#include "src/objects/oddball.h"
26#include "src/objects/string.h"
27
28namespace v8 {
29namespace internal {
30namespace compiler {
31
33 public:
34 explicit BlockInlineReduction(GraphAssembler* gasm) : gasm_(gasm) {
35 DCHECK(!gasm_->inline_reductions_blocked_);
36 gasm_->inline_reductions_blocked_ = true;
37 }
39 DCHECK(gasm_->inline_reductions_blocked_);
40 gasm_->inline_reductions_blocked_ = false;
41 }
42
43 private:
45};
46
48 MachineGraph* mcgraph, Zone* zone, BranchSemantics default_branch_semantics,
49 std::optional<NodeChangedCallback> node_changed_callback,
50 bool mark_loop_exits)
51 : temp_zone_(zone),
52 mcgraph_(mcgraph),
53 default_branch_semantics_(default_branch_semantics),
54 effect_(nullptr),
55 control_(nullptr),
56 node_changed_callback_(node_changed_callback),
57 inline_reducers_(zone),
58 inline_reductions_blocked_(false),
59 loop_headers_(zone),
60 mark_loop_exits_(mark_loop_exits) {
62}
63
65
67 return AddClonedNode(mcgraph()->IntPtrConstant(value));
68}
69
73
75 return AddClonedNode(mcgraph()->Int32Constant(value));
76}
77
81
83 return AddClonedNode(mcgraph()->Int64Constant(value));
84}
85
87 return AddClonedNode(mcgraph()->Uint64Constant(value));
88}
89
91 return AddNode(graph()->NewNode(
92 machine()->Is64()
93 ? common()->Int64Constant(value)
94 : common()->Int32Constant(static_cast<int32_t>(value))));
95}
96
101
103 return AddClonedNode(mcgraph()->Float64Constant(value));
104}
105
110
113 AddClonedNode(jsgraph()->ConstantNoHole(ref, broker())));
114}
115
118 AddClonedNode(jsgraph()->ConstantNoHole(value)));
119}
120
124
128
130 return AddNode(
131 graph()->NewNode(common()->Parameter(index), graph()->start()));
132}
133
135 return AddClonedNode(jsgraph()->CEntryStubConstant(result_size));
136}
137
141
145
146#if V8_ENABLE_WEBASSEMBLY
147Node* GraphAssembler::LoadStackPointer() {
148 return AddNode(graph()->NewNode(machine()->LoadStackPointer(), effect()));
149}
150
151Node* GraphAssembler::SetStackPointer(Node* node) {
152 return AddNode(
153 graph()->NewNode(machine()->SetStackPointer(), node, effect()));
154}
155#endif
156
158 return Load(MachineType::Float64(), heap_number,
160}
161
162#define SINGLETON_CONST_DEF(Name, Type) \
163 TNode<Type> JSGraphAssembler::Name##Constant() { \
164 return TNode<Type>::UncheckedCast( \
165 AddClonedNode(jsgraph()->Name##Constant())); \
166 }
168#undef SINGLETON_CONST_DEF
169
170#define SINGLETON_CONST_TEST_DEF(Name, ...) \
171 TNode<Boolean> JSGraphAssembler::Is##Name(TNode<Object> value) { \
172 return TNode<Boolean>::UncheckedCast( \
173 ReferenceEqual(value, Name##Constant())); \
174 }
176#undef SINGLETON_CONST_TEST_DEF
177
178#define PURE_UNOP_DEF(Name) \
179 Node* GraphAssembler::Name(Node* input) { \
180 return AddNode(graph()->NewNode(machine()->Name(), input)); \
181 }
183#undef PURE_UNOP_DEF
184
185#define PURE_BINOP_DEF(Name) \
186 Node* GraphAssembler::Name(Node* left, Node* right) { \
187 return AddNode(graph()->NewNode(machine()->Name(), left, right)); \
188 }
189#define PURE_BINOP_DEF_TNODE(Name, Result, Left, Right) \
190 TNode<Result> GraphAssembler::Name(SloppyTNode<Left> left, \
191 SloppyTNode<Right> right) { \
192 return AddNode<Result>(graph()->NewNode(machine()->Name(), left, right)); \
193 }
195#undef PURE_BINOP_DEF
196#undef PURE_BINOP_DEF_TNODE
197
206
215
217 TNode<UintPtrT> right) {
218 return kSystemPointerSize == 8
219 ? TNode<UintPtrT>::UncheckedCast(Int64Add(left, right))
220 : TNode<UintPtrT>::UncheckedCast(Int32Add(left, right));
221}
223 TNode<UintPtrT> right) {
224 return kSystemPointerSize == 8
225 ? TNode<UintPtrT>::UncheckedCast(Int64Sub(left, right))
226 : TNode<UintPtrT>::UncheckedCast(Int32Sub(left, right));
227}
228
230 TNode<UintPtrT> right) {
231 return kSystemPointerSize == 8
232 ? TNode<UintPtrT>::UncheckedCast(Uint64Div(left, right))
233 : TNode<UintPtrT>::UncheckedCast(Uint32Div(left, right));
234}
235
242
243#define CHECKED_BINOP_DEF(Name) \
244 Node* GraphAssembler::Name(Node* left, Node* right) { \
245 return AddNode( \
246 graph()->NewNode(machine()->Name(), left, right, control())); \
247 }
249#undef CHECKED_BINOP_DEF
250
252 return WordEqual(left, right);
253}
254
257 return Word32Equal(left, right);
258 } else {
259 return WordEqual(left, right);
260 }
261}
262
265 return BitcastWord32ToWord64(Int32Sub(left, right));
266 } else {
267 return IntSub(left, right);
268 }
269}
270
273 return Int32LessThan(left, right);
274 } else {
275 return IntLessThan(left, right);
276 }
277}
278
280 CHECK(machine()->Float64RoundDown().IsSupported());
281 return AddNode(graph()->NewNode(machine()->Float64RoundDown().op(), value));
282}
283
285 CHECK(machine()->Float64RoundTruncate().IsSupported());
286 return AddNode(
287 graph()->NewNode(machine()->Float64RoundTruncate().op(), value));
288}
289
294
295Node* GraphAssembler::Projection(int index, Node* value, Node* ctrl) {
296 return AddNode(graph()->NewNode(common()->Projection(index), value,
297 ctrl ? ctrl : control()));
298}
299
301 return AddNode(
302 graph()->NewNode(simplified()->AllocateRaw(Type::Any(), allocation), size,
303 effect(), control()));
304}
305
309
311 Node* value = AddNode(graph()->NewNode(simplified()->LoadField(access),
312 object, effect(), control()));
313 return value;
314}
315
324
326 Node* index) {
327 Node* value = AddNode(graph()->NewNode(simplified()->LoadElement(access),
328 object, index, effect(), control()));
329 return value;
330}
331
333 Node* value) {
334 return AddNode(graph()->NewNode(simplified()->StoreField(access), object,
335 value, effect(), control()));
336}
337
339 ExternalReference const ref =
341 return AddNode(graph()->NewNode(
342 simplified()->StoreMessage(), jsgraph()->ExternalConstant(ref),
343 jsgraph()->TheHoleConstant(), effect(), control()));
344}
345
346#ifdef V8_MAP_PACKING
347TNode<Map> GraphAssembler::UnpackMapWord(Node* map_word) {
348 map_word = BitcastTaggedToWordForTagAndSmiBits(map_word);
349 // TODO(wenyuzhao): Clear header metadata.
350 Node* map = WordXor(map_word, IntPtrConstant(Internals::kMapWordXorMask));
352}
353
354Node* GraphAssembler::PackMapWord(TNode<Map> map) {
355 Node* map_word = BitcastTaggedToWordForTagAndSmiBits(map);
356 Node* packed = WordXor(map_word, IntPtrConstant(Internals::kMapWordXorMask));
357 return BitcastWordToTaggedSigned(packed);
358}
359#endif
360
362 Node* map_word = Load(MachineType::TaggedPointer(), object,
364#ifdef V8_MAP_PACKING
365 return UnpackMapWord(map_word);
366#else
367 return TNode<Map>::UncheckedCast(map_word);
368#endif
369}
370
372 Node* index, Node* value) {
373 return AddNode(graph()->NewNode(simplified()->StoreElement(access), object,
374 index, value, effect(), control()));
375}
376
378 MapRef fast_map,
379 TNode<HeapObject> object,
380 TNode<Number> index,
381 TNode<Object> value) {
382 AddNode(graph()->NewNode(
383 simplified()->TransitionAndStoreElement(double_map, fast_map), object,
384 index, value, effect(), control()));
385}
386
391
397
403
408
413
418
424
430
436
442
448
454
456 TNode<Number> from,
457 TNode<Number> to) {
458 return AddNode<String>(graph()->NewNode(
459 simplified()->StringSubstring(), string, from, to, effect(), control()));
460}
461
466
470
475
477 return AddNode(graph()->NewNode(simplified()->BooleanNot(), cond));
478}
479
481 return AddNode(graph()->NewNode(simplified()->CheckSmi(feedback), value,
482 effect(), control()));
483}
484
486 const FeedbackSource& feedback) {
487 return AddNode(graph()->NewNode(simplified()->CheckNumberFitsInt32(feedback),
488 value, effect(), control()));
489}
490
492 const FeedbackSource& feedback) {
493 return AddNode(graph()->NewNode(simplified()->CheckNumber(feedback), value,
494 effect(), control()));
495}
496
498 const FeedbackSource& feedback) {
499 return AddNode(graph()->NewNode(simplified()->CheckIf(reason, feedback), cond,
500 effect(), control()));
501}
502
503Node* JSGraphAssembler::Assert(Node* cond, const char* condition_string,
504 const char* file, int line) {
505 return AddNode(graph()->NewNode(
506 common()->Assert(BranchSemantics::kJS, condition_string, file, line),
507 cond, effect(), control()));
508}
509
510void JSGraphAssembler::Assert(TNode<Word32T> cond, const char* condition_string,
511 const char* file, int line) {
512 AddNode(graph()->NewNode(
513 common()->Assert(BranchSemantics::kMachine, condition_string, file, line),
514 cond, effect(), control()));
515}
516
521
525
531
543
548
553
559
564
566 public:
568 InstanceType instance_type,
569 std::set<ElementsKind> candidates)
570 : assembler_(assembler),
571 instance_type_(instance_type),
572 candidates_(std::move(candidates)) {
574 // TODO(v8:11111): Optimize for JS_RAB_GSAB_DATA_VIEW_TYPE too.
575 DCHECK(instance_type_ == JS_DATA_VIEW_TYPE ||
576 instance_type_ == JS_TYPED_ARRAY_TYPE);
577 }
578
579 bool maybe_rab_gsab() const {
580 if (candidates_.empty()) return true;
581 return !base::all_of(candidates_, [](auto e) {
583 });
584 }
585
586 std::optional<int> TryComputeStaticElementShift() {
587 DCHECK(instance_type_ != JS_RAB_GSAB_DATA_VIEW_TYPE);
588 if (instance_type_ == JS_DATA_VIEW_TYPE) return 0;
589 if (candidates_.empty()) return std::nullopt;
590 int shift = ElementsKindToShiftSize(*candidates_.begin());
591 if (!base::all_of(candidates_, [shift](auto e) {
592 return ElementsKindToShiftSize(e) == shift;
593 })) {
594 return std::nullopt;
595 }
596 return shift;
597 }
598
599 std::optional<int> TryComputeStaticElementSize() {
600 DCHECK(instance_type_ != JS_RAB_GSAB_DATA_VIEW_TYPE);
601 if (instance_type_ == JS_DATA_VIEW_TYPE) return 1;
602 if (candidates_.empty()) return std::nullopt;
603 int size = ElementsKindToByteSize(*candidates_.begin());
604 if (!base::all_of(candidates_, [size](auto e) {
605 return ElementsKindToByteSize(e) == size;
606 })) {
607 return std::nullopt;
608 }
609 return size;
610 }
611
613 TNode<Context> context) {
614 auto& a = *assembler_;
615
616 // Case 1: Normal (backed by AB/SAB) or non-length tracking backed by GSAB
617 // (can't go oob once constructed)
618 auto GsabFixedOrNormal = [&]() {
620 view, UseInfo::Word());
621 };
622
623 // If we statically know we cannot have rab/gsab backed, we can simply
624 // load from the view.
625 if (!maybe_rab_gsab()) {
626 return GsabFixedOrNormal();
627 }
628
629 // Otherwise, we need to generate the checks for the view's bitfield.
630 TNode<Word32T> bitfield = a.EnterMachineGraph<Word32T>(
632 view),
634 TNode<Word32T> length_tracking_bit = a.Word32And(
635 bitfield, a.Uint32Constant(JSArrayBufferView::kIsLengthTracking));
636 TNode<Word32T> backed_by_rab_bit = a.Word32And(
637 bitfield, a.Uint32Constant(JSArrayBufferView::kIsBackedByRab));
638
639 // Load the underlying buffer.
640 TNode<HeapObject> buffer = a.LoadField<HeapObject>(
642
643 // Compute the element size.
644 TNode<Uint32T> element_size;
645 if (auto size_opt = TryComputeStaticElementSize()) {
646 element_size = a.Uint32Constant(*size_opt);
647 } else {
648 DCHECK_EQ(instance_type_, JS_TYPED_ARRAY_TYPE);
649 TNode<Map> typed_array_map = a.LoadField<Map>(
651 TNode<Uint32T> elements_kind = a.LoadElementsKind(typed_array_map);
652 element_size = a.LookupByteSizeForElementsKind(elements_kind);
653 }
654
655 // 2) Fixed length backed by RAB (can go oob once constructed)
656 auto RabFixed = [&]() {
657 TNode<UintPtrT> unchecked_byte_length = MachineLoadField<UintPtrT>(
659 UseInfo::Word());
660 TNode<UintPtrT> underlying_byte_length = MachineLoadField<UintPtrT>(
664 UseInfo::Word());
665
666 TNode<UintPtrT> byte_length =
667 a
668 .MachineSelectIf<UintPtrT>(a.UintPtrLessThanOrEqual(
669 a.UintPtrAdd(byte_offset, unchecked_byte_length),
670 underlying_byte_length))
671 .Then([&]() { return unchecked_byte_length; })
672 .Else([&]() { return a.UintPtrConstant(0); })
673 .Value();
674 return a.UintPtrDiv(byte_length, a.ChangeUint32ToUintPtr(element_size));
675 };
676
677 // 3) Length-tracking backed by RAB (JSArrayBuffer stores the length)
678 auto RabTracking = [&]() {
683 UseInfo::Word());
684
685 return a
686 .MachineSelectIf<UintPtrT>(
687 a.UintPtrLessThanOrEqual(byte_offset, byte_length))
688 .Then([&]() {
689 // length = floor((byte_length - byte_offset) / element_size)
690 return a.UintPtrDiv(a.UintPtrSub(byte_length, byte_offset),
691 a.ChangeUint32ToUintPtr(element_size));
692 })
693 .Else([&]() { return a.UintPtrConstant(0); })
694 .ExpectTrue()
695 .Value();
696 };
697
698 // 4) Length-tracking backed by GSAB (BackingStore stores the length)
699 auto GsabTracking = [&]() {
702 a.JSCallRuntime1(Runtime::kGrowableSharedArrayBufferByteLength,
703 buffer, context, std::nullopt, Operator::kNoWrite)));
704 TNode<UintPtrT> byte_length =
705 a.EnterMachineGraph<UintPtrT>(temp, UseInfo::Word());
708 UseInfo::Word());
709
710 return a
711 .MachineSelectIf<UintPtrT>(
712 a.UintPtrLessThanOrEqual(byte_offset, byte_length))
713 .Then([&]() {
714 // length = floor((byte_length - byte_offset) / element_size)
715 return a.UintPtrDiv(a.UintPtrSub(byte_length, byte_offset),
716 a.ChangeUint32ToUintPtr(element_size));
717 })
718 .Else([&]() { return a.UintPtrConstant(0); })
719 .ExpectTrue()
720 .Value();
721 };
722
723 return a.MachineSelectIf<UintPtrT>(length_tracking_bit)
724 .Then([&]() {
725 return a.MachineSelectIf<UintPtrT>(backed_by_rab_bit)
726 .Then(RabTracking)
727 .Else(GsabTracking)
728 .Value();
729 })
730 .Else([&]() {
731 return a.MachineSelectIf<UintPtrT>(backed_by_rab_bit)
732 .Then(RabFixed)
733 .Else(GsabFixedOrNormal)
734 .Value();
735 })
736 .Value();
737 }
738
740 TNode<Context> context) {
741 auto& a = *assembler_;
742
743 // Case 1: Normal (backed by AB/SAB) or non-length tracking backed by GSAB
744 // (can't go oob once constructed)
745 auto GsabFixedOrNormal = [&]() {
748 UseInfo::Word());
749 };
750
751 // If we statically know we cannot have rab/gsab backed, we can simply
752 // use load from the view.
753 if (!maybe_rab_gsab()) {
754 return GsabFixedOrNormal();
755 }
756
757 // Otherwise, we need to generate the checks for the view's bitfield.
758 TNode<Word32T> bitfield = a.EnterMachineGraph<Word32T>(
760 view),
762 TNode<Word32T> length_tracking_bit = a.Word32And(
763 bitfield, a.Uint32Constant(JSArrayBufferView::kIsLengthTracking));
764 TNode<Word32T> backed_by_rab_bit = a.Word32And(
765 bitfield, a.Uint32Constant(JSArrayBufferView::kIsBackedByRab));
766
767 // Load the underlying buffer.
768 TNode<HeapObject> buffer = a.LoadField<HeapObject>(
770
771 // Case 2: Fixed length backed by RAB (can go oob once constructed)
772 auto RabFixed = [&]() {
773 TNode<UintPtrT> unchecked_byte_length = MachineLoadField<UintPtrT>(
775 UseInfo::Word());
776 TNode<UintPtrT> underlying_byte_length = MachineLoadField<UintPtrT>(
780 UseInfo::Word());
781
782 return a
783 .MachineSelectIf<UintPtrT>(a.UintPtrLessThanOrEqual(
784 a.UintPtrAdd(byte_offset, unchecked_byte_length),
785 underlying_byte_length))
786 .Then([&]() { return unchecked_byte_length; })
787 .Else([&]() { return a.UintPtrConstant(0); })
788 .Value();
789 };
790
791 auto RoundDownToElementSize = [&](TNode<UintPtrT> byte_size) {
792 if (auto shift_opt = TryComputeStaticElementShift()) {
793 constexpr uintptr_t all_bits = static_cast<uintptr_t>(-1);
794 if (*shift_opt == 0) return byte_size;
796 a.WordAnd(byte_size, a.UintPtrConstant(all_bits << (*shift_opt))));
797 }
798 DCHECK_EQ(instance_type_, JS_TYPED_ARRAY_TYPE);
799 TNode<Map> typed_array_map = a.LoadField<Map>(
801 TNode<Uint32T> elements_kind = a.LoadElementsKind(typed_array_map);
802 TNode<Uint32T> element_shift =
803 a.LookupByteShiftForElementsKind(elements_kind);
805 a.WordShl(a.WordShr(byte_size, element_shift), element_shift));
806 };
807
808 // Case 3: Length-tracking backed by RAB (JSArrayBuffer stores the length)
809 auto RabTracking = [&]() {
814 UseInfo::Word());
815
816 return a
817 .MachineSelectIf<UintPtrT>(
818 a.UintPtrLessThanOrEqual(byte_offset, byte_length))
819 .Then([&]() {
820 return RoundDownToElementSize(
821 a.UintPtrSub(byte_length, byte_offset));
822 })
823 .Else([&]() { return a.UintPtrConstant(0); })
824 .ExpectTrue()
825 .Value();
826 };
827
828 // Case 4: Length-tracking backed by GSAB (BackingStore stores the length)
829 auto GsabTracking = [&]() {
832 a.JSCallRuntime1(Runtime::kGrowableSharedArrayBufferByteLength,
833 buffer, context, std::nullopt, Operator::kNoWrite)));
834 TNode<UintPtrT> byte_length =
835 a.EnterMachineGraph<UintPtrT>(temp, UseInfo::Word());
838 UseInfo::Word());
839
840 return a
841 .MachineSelectIf<UintPtrT>(
842 a.UintPtrLessThanOrEqual(byte_offset, byte_length))
843 .Then([&]() {
844 return RoundDownToElementSize(
845 a.UintPtrSub(byte_length, byte_offset));
846 })
847 .Else([&]() { return a.UintPtrConstant(0); })
848 .ExpectTrue()
849 .Value();
850 };
851
852 return a.MachineSelectIf<UintPtrT>(length_tracking_bit)
853 .Then([&]() {
854 return a.MachineSelectIf<UintPtrT>(backed_by_rab_bit)
855 .Then(RabTracking)
856 .Else(GsabTracking)
857 .Value();
858 })
859 .Else([&]() {
860 return a.MachineSelectIf<UintPtrT>(backed_by_rab_bit)
861 .Then(RabFixed)
862 .Else(GsabFixedOrNormal)
863 .Value();
864 })
865 .Value();
866 }
867
870 auto& a = *assembler_;
871
872 // Load the underlying buffer and its bitfield.
873 TNode<HeapObject> buffer = a.LoadField<HeapObject>(
875 // Mask the detached bit.
876 TNode<Word32T> detached_bit = a.ArrayBufferDetachedBit(buffer);
877
878 // If we statically know we cannot have rab/gsab backed, we are done here.
879 if (!maybe_rab_gsab()) {
880 return detached_bit;
881 }
882
883 // Otherwise, we need to generate the checks for the view's bitfield.
884 TNode<Word32T> bitfield = a.EnterMachineGraph<Word32T>(
886 view),
888 TNode<Word32T> length_tracking_bit = a.Word32And(
889 bitfield, a.Uint32Constant(JSArrayBufferView::kIsLengthTracking));
890 TNode<Word32T> backed_by_rab_bit = a.Word32And(
891 bitfield, a.Uint32Constant(JSArrayBufferView::kIsBackedByRab));
892
893 auto RabLengthTracking = [&]() {
896 UseInfo::Word());
897
898 TNode<UintPtrT> underlying_byte_length = MachineLoadField<UintPtrT>(
900
901 return a.Word32Or(detached_bit,
902 a.UintPtrLessThan(underlying_byte_length, byte_offset));
903 };
904
905 auto RabFixed = [&]() {
906 TNode<UintPtrT> unchecked_byte_length = MachineLoadField<UintPtrT>(
908 UseInfo::Word());
911 UseInfo::Word());
912
913 TNode<UintPtrT> underlying_byte_length = MachineLoadField<UintPtrT>(
915
916 return a.Word32Or(
917 detached_bit,
918 a.UintPtrLessThan(underlying_byte_length,
919 a.UintPtrAdd(byte_offset, unchecked_byte_length)));
920 };
921
922 // Dispatch depending on rab/gsab and length tracking.
923 return a.MachineSelectIf<Word32T>(backed_by_rab_bit)
924 .Then([&]() {
925 return a.MachineSelectIf<Word32T>(length_tracking_bit)
926 .Then(RabLengthTracking)
927 .Else(RabFixed)
928 .Value();
929 })
930 .Else([&]() { return detached_bit; })
931 .Value();
932 }
933
934 private:
935 template <typename T>
937 const UseInfo& use_info) {
938 return assembler_->EnterMachineGraph<T>(
939 assembler_->LoadField<T>(access, object), use_info);
940 }
941
944 std::set<ElementsKind> candidates_;
945};
946
948 TNode<JSArrayBufferView> array_buffer_view, InstanceType instance_type,
949 std::set<ElementsKind> elements_kinds_candidates, TNode<Context> context) {
950 ArrayBufferViewAccessBuilder builder(this, instance_type,
951 std::move(elements_kinds_candidates));
953 builder.BuildByteLength(array_buffer_view, context),
956}
957
959 TNode<HeapObject> buffer) {
963 return Word32And(bitfield,
964 Uint32Constant(JSArrayBuffer::WasDetachedBit::kMask));
965}
966
973
975 TNode<JSTypedArray> typed_array,
976 std::set<ElementsKind> elements_kinds_candidates, TNode<Context> context) {
977 ArrayBufferViewAccessBuilder builder(this, JS_TYPED_ARRAY_TYPE,
978 std::move(elements_kinds_candidates));
979 return ExitMachineGraph<Number>(builder.BuildLength(typed_array, context),
982}
983
985 TNode<JSTypedArray> typed_array,
986 std::set<ElementsKind> elements_kinds_candidates,
987 const FeedbackSource& feedback) {
988 ArrayBufferViewAccessBuilder builder(this, JS_TYPED_ARRAY_TYPE,
989 std::move(elements_kinds_candidates));
990
991 TNode<Word32T> detached_check =
992 builder.BuildDetachedOrOutOfBoundsCheck(typed_array);
993 TNode<Boolean> is_not_detached =
994 ExitMachineGraph<Boolean>(Word32Equal(detached_check, Uint32Constant(0)),
995 MachineRepresentation::kBit, Type::Boolean());
996 CheckIf(is_not_detached, DeoptimizeReason::kArrayBufferWasDetached, feedback);
997}
998
1000 TNode<Uint32T> elements_kind) {
1001 TNode<UintPtrT> index = ChangeUint32ToUintPtr(Int32Sub(
1005 typed_array_and_rab_gsab_typed_array_elements_kind_shifts()));
1007 Load(MachineType::Uint8(), shift_table, index));
1008}
1009
1011 TNode<Uint32T> elements_kind) {
1012 TNode<UintPtrT> index = ChangeUint32ToUintPtr(Int32Sub(
1016 typed_array_and_rab_gsab_typed_array_elements_kind_sizes()));
1018 Load(MachineType::Uint8(), size_table, index));
1019}
1020
1022 Runtime::FunctionId function_id, TNode<Object> arg0, TNode<Context> context,
1023 std::optional<FrameState> frame_state, Operator::Properties properties) {
1024 return MayThrow([&]() {
1025 if (frame_state.has_value()) {
1026 return AddNode<Object>(graph()->NewNode(
1027 javascript()->CallRuntime(function_id, 1, properties), arg0, context,
1028 static_cast<Node*>(*frame_state), effect(), control()));
1029 } else {
1030 return AddNode<Object>(graph()->NewNode(
1031 javascript()->CallRuntime(function_id, 1, properties), arg0, context,
1032 effect(), control()));
1033 }
1034 });
1035}
1036
1038 TNode<Object> arg0,
1039 TNode<Object> arg1,
1040 TNode<Context> context,
1041 FrameState frame_state) {
1042 return MayThrow([&]() {
1043 return AddNode<Object>(
1044 graph()->NewNode(javascript()->CallRuntime(function_id, 2), arg0, arg1,
1045 context, frame_state, effect(), control()));
1046 });
1047}
1048
1050 DCHECK_EQ(op->ValueInputCount(), 1);
1051 return AddNode(
1052 graph()->NewNode(common()->Chained(op), input, effect(), control()));
1053}
1054
1056 return AddNode(
1057 graph()->NewNode(common()->TypeGuard(type), value, effect(), control()));
1058}
1059
1061 return AddNode(graph()->NewNode(common()->Checkpoint(), frame_state, effect(),
1062 control()));
1063}
1064
1066 return AddNode(
1067 graph()->NewNode(machine()->DebugBreak(), effect(), control()));
1068}
1069
1076
1081
1083 bool is_tagged) {
1084 return AddNode<RawPtrT>(
1085 graph()->NewNode(machine()->StackSlot(size, alignment, is_tagged)));
1086}
1087
1089#ifdef V8_ENABLE_DIRECT_HANDLE
1090 // With direct locals, the argument can be passed directly.
1091 return BitcastTaggedToWord(argument);
1092#else
1093 // With indirect locals, the argument has to be stored on the stack and the
1094 // slot address is passed.
1095 Node* stack_slot = StackSlot(sizeof(uintptr_t), alignof(uintptr_t), true);
1098 stack_slot, 0, BitcastTaggedToWord(argument));
1099 return stack_slot;
1100#endif
1101}
1102
1104 Node* value) {
1105 return AddNode(graph()->NewNode(machine()->Store(rep), object, offset, value,
1106 effect(), control()));
1107}
1108
1110 Node* value) {
1111 return Store(rep, object, IntPtrConstant(offset), value);
1112}
1113
1115 return AddNode(graph()->NewNode(machine()->Load(type), object, offset,
1116 effect(), control()));
1117}
1118
1120 return Load(type, object, IntPtrConstant(offset));
1121}
1122
1124 Node* offset, Node* value) {
1125 Operator const* const op =
1129 : machine()->UnalignedStore(rep);
1130 return AddNode(
1131 graph()->NewNode(op, object, offset, value, effect(), control()));
1132}
1133
1135 Node* offset) {
1136 Operator const* const op =
1137 (type.representation() == MachineRepresentation::kWord8 ||
1138 machine()->UnalignedLoadSupported(type.representation()))
1139 ? machine()->Load(type)
1140 : machine()->UnalignedLoad(type);
1141 return AddNode(graph()->NewNode(op, object, offset, effect(), control()));
1142}
1143
1145 Node* offset, Node* value) {
1146 return AddNode(graph()->NewNode(machine()->ProtectedStore(rep), object,
1147 offset, value, effect(), control()));
1148}
1149
1151 Node* offset) {
1152 return AddNode(graph()->NewNode(machine()->ProtectedLoad(type), object,
1153 offset, effect(), control()));
1154}
1155
1157 Node* offset) {
1158 return AddNode(graph()->NewNode(machine()->LoadTrapOnNull(type), object,
1159 offset, effect(), control()));
1160}
1161
1163 Node* offset, Node* value) {
1164 return AddNode(graph()->NewNode(machine()->StoreTrapOnNull(rep), object,
1165 offset, value, effect(), control()));
1166}
1167
1169 return AddNode(graph()->NewNode(common()->Retain(), buffer, effect()));
1170}
1171
1173 return AddNode(graph()->NewNode(
1174 machine()->Is64() ? machine()->Int64Add() : machine()->Int32Add(), a, b));
1175}
1176
1178 return AddNode(graph()->NewNode(
1179 machine()->Is64() ? machine()->Int64Sub() : machine()->Int32Sub(), a, b));
1180}
1181
1183 return AddNode<Number>(graph()->NewNode(
1184 PlainPrimitiveToNumberOperator(), PlainPrimitiveToNumberBuiltinConstant(),
1185 value, effect()));
1186}
1187
1189 return AddNode(
1190 graph()->NewNode(machine()->BitcastWordToTaggedSigned(), value));
1191}
1192
1194 return AddNode(graph()->NewNode(machine()->BitcastWordToTagged(), value,
1195 effect(), control()));
1196}
1197
1199 return AddNode(graph()->NewNode(machine()->BitcastTaggedToWord(), value,
1200 effect(), control()));
1201}
1202
1207
1209 return AddNode(graph()->NewNode(machine()->BitcastMaybeObjectToWord(), value,
1210 effect(), control()));
1211}
1212
1214 FeedbackSource const& feedback,
1215 Node* condition, Node* frame_state) {
1216 return AddNode(graph()->NewNode(common()->DeoptimizeIf(reason, feedback),
1217 condition, frame_state, effect(), control()));
1218}
1219
1221 FeedbackSource const& feedback,
1222 Node* condition, Node* frame_state) {
1223 return AddNode(graph()->NewNode(common()->DeoptimizeUnless(reason, feedback),
1224 condition, frame_state, effect(), control()));
1225}
1226
1228 int inputs_size, Node** inputs) {
1229 return Call(common()->Call(call_descriptor), inputs_size, inputs);
1230}
1231
1233 Node** inputs) {
1234 DCHECK_EQ(IrOpcode::kCall, op->opcode());
1235 return AddNode<Object>(graph()->NewNode(op, inputs_size, inputs));
1236}
1237
1238void GraphAssembler::TailCall(const CallDescriptor* call_descriptor,
1239 int inputs_size, Node** inputs) {
1240#ifdef DEBUG
1241 static constexpr int kTargetEffectControl = 3;
1242 DCHECK_EQ(inputs_size,
1243 call_descriptor->ParameterCount() + kTargetEffectControl);
1244#endif // DEBUG
1245
1246 Node* node = AddNode(graph()->NewNode(common()->TailCall(call_descriptor),
1247 inputs_size, inputs));
1248
1249 // Unlike ConnectUnreachableToEnd, the TailCall node terminates a block; to
1250 // keep it live, it *must* be connected to End (also in Turboprop schedules).
1252
1253 // Setting effect, control to nullptr effectively terminates the current block
1254 // by disallowing the addition of new nodes until a new label has been bound.
1255 InitializeEffectControl(nullptr, nullptr);
1256}
1257
1260 GraphAssemblerLabel<0u>* if_false) {
1262 if (if_true->IsDeferred() != if_false->IsDeferred()) {
1263 hint = if_false->IsDeferred() ? BranchHint::kTrue : BranchHint::kFalse;
1264 }
1265
1266 BranchImpl(default_branch_semantics_, condition, if_true, if_false, hint);
1267}
1268
1270 AddNode(graph()->NewNode(simplified()->RuntimeAbort(reason)));
1271}
1272
1274 DCHECK_EQ(effect()->opcode(), IrOpcode::kUnreachable);
1275 Node* throw_node = graph()->NewNode(common()->Throw(), effect(), control());
1277 if (node_changed_callback_.has_value()) {
1278 (*node_changed_callback_)(graph()->end());
1279 }
1280 effect_ = control_ = mcgraph()->Dead();
1281}
1282
1284 DCHECK(node->op()->HasProperty(Operator::kPure));
1286 return node;
1287}
1288
1291 // Reducers may add new nodes to the graph using this graph assembler,
1292 // however they should never introduce nodes that need further reduction,
1293 // so block reduction
1294 BlockInlineReduction scope(this);
1295 Reduction reduction;
1296 for (auto reducer : inline_reducers_) {
1297 reduction = reducer->Reduce(node, nullptr);
1298 if (reduction.Changed()) break;
1299 }
1300 if (reduction.Changed()) {
1301 Node* replacement = reduction.replacement();
1302 if (replacement != node) {
1303 // Replace all uses of node and kill the node to make sure we don't
1304 // leave dangling dead uses.
1305 NodeProperties::ReplaceUses(node, replacement, effect(), control());
1306 node->Kill();
1307 return replacement;
1308 }
1309 }
1310 }
1311
1312 if (node->opcode() == IrOpcode::kTerminate) {
1313 return node;
1314 }
1315
1317 return node;
1318}
1319
1321 effect_ = nullptr;
1322 control_ = nullptr;
1323}
1324
1326 effect_ = effect;
1327 control_ = control;
1328}
1329
1331 if (!to_number_operator_.is_set()) {
1332 Callable callable =
1333 Builtins::CallableFor(isolate(), Builtin::kPlainPrimitiveToNumber);
1335 auto call_descriptor = Linkage::GetStubCallDescriptor(
1336 graph()->zone(), callable.descriptor(),
1337 callable.descriptor().GetStackParameterCount(), flags,
1339 to_number_operator_.set(common()->Call(call_descriptor));
1340 }
1341 return to_number_operator_.get();
1342}
1343
1344} // namespace compiler
1345} // namespace internal
1346} // namespace v8
#define Assert(condition)
Builtins::Kind kind
Definition builtins.cc:40
static V8_EXPORT_PRIVATE Callable CallableFor(Isolate *isolate, Builtin builtin)
Definition builtins.cc:214
CallInterfaceDescriptor descriptor() const
Definition callable.h:23
static V8_EXPORT_PRIVATE ExternalReference address_of_pending_message(LocalIsolate *local_isolate)
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kMapOffset
static constexpr MachineType Float64()
static constexpr MachineType Uint8()
static constexpr MachineType TaggedPointer()
static constexpr MachineRepresentation PointerRepresentation()
static TNode UncheckedCast(compiler::Node *node)
Definition tnode.h:413
static FieldAccess ForJSArrayBufferViewBuffer()
static FieldAccess ForJSArrayBufferViewByteLength()
static FieldAccess ForMap(WriteBarrierKind write_barrier=kMapWriteBarrier)
static FieldAccess ForJSArrayBufferViewByteOffset()
static FieldAccess ForJSArrayBufferByteLength()
static FieldAccess ForJSArrayBufferBitField()
static FieldAccess ForJSArrayBufferViewBitField()
TNode< Word32T > BuildDetachedOrOutOfBoundsCheck(TNode< JSArrayBufferView > view)
TNode< T > MachineLoadField(FieldAccess const &access, TNode< HeapObject > object, const UseInfo &use_info)
TNode< UintPtrT > BuildByteLength(TNode< JSArrayBufferView > view, TNode< Context > context)
ArrayBufferViewAccessBuilder(JSGraphAssembler *assembler, InstanceType instance_type, std::set< ElementsKind > candidates)
TNode< UintPtrT > BuildLength(TNode< JSArrayBufferView > view, TNode< Context > context)
virtual SimplifiedOperatorBuilder * simplified()
void BranchImpl(BranchSemantics semantics, Node *condition, GraphAssemblerLabel< sizeof...(Vars)> *if_true, GraphAssemblerLabel< sizeof...(Vars)> *if_false, BranchHint hint, Vars...)
V8_INLINE void UpdateEffectControlWith(Node *node)
std::optional< NodeChangedCallback > node_changed_callback_
Node * Checkpoint(FrameState frame_state)
TNode< RawPtrT > StackSlot(int size, int alignment, bool is_tagged=false)
Node * DeoptimizeIf(DeoptimizeReason reason, FeedbackSource const &feedback, Node *condition, Node *frame_state)
Node * LoadUnaligned(MachineType type, Node *object, Node *offset)
Node * Store(StoreRepresentation rep, Node *object, Node *offset, Node *value)
TNode< Uint32T > Uint32Constant(uint32_t value)
Node * TaggedEqual(Node *left, Node *right)
Node * DeoptimizeIfNot(DeoptimizeReason reason, FeedbackSource const &feedback, Node *condition, Node *frame_state)
Node * ExternalConstant(ExternalReference ref)
TNode< UintPtrT > UintPtrAdd(TNode< UintPtrT > left, TNode< UintPtrT > right)
Node * TypeGuard(Type type, Node *value)
Node * ProtectedStore(MachineRepresentation rep, Node *object, Node *offset, Node *value)
Node * SmiLessThan(Node *left, Node *right)
Node * BitcastTaggedToWordForTagAndSmiBits(Node *value)
Node * LoadHeapNumberValue(Node *heap_number)
MachineOperatorBuilder * machine() const
Node * IntPtrEqual(Node *left, Node *right)
TNode< UintPtrT > UintPtrConstant(uintptr_t value)
Node * LoadTrapOnNull(MachineType type, Node *object, Node *offset)
TNode< UintPtrT > ChangeUint32ToUintPtr(SloppyTNode< Uint32T > value)
Node * ProtectedLoad(MachineType type, Node *object, Node *offset)
void BranchWithCriticalSafetyCheck(Node *condition, GraphAssemblerLabel< 0u > *if_true, GraphAssemblerLabel< 0u > *if_false)
V8_INLINE Node * AddClonedNode(Node *node)
Node * Projection(int index, Node *value, Node *ctrl=nullptr)
TNode< UintPtrT > UintPtrDiv(TNode< UintPtrT > left, TNode< UintPtrT > right)
void InitializeEffectControl(Node *effect, Node *control)
TNode< BoolT > UintPtrLessThan(TNode< UintPtrT > left, TNode< UintPtrT > right)
TNode< Object > Call(const CallDescriptor *call_descriptor, int inputs_size, Node **inputs)
Node * StoreTrapOnNull(StoreRepresentation rep, Node *object, Node *offset, Node *value)
Node * UniqueIntPtrConstant(intptr_t value)
void TailCall(const CallDescriptor *call_descriptor, int inputs_size, Node **inputs)
GraphAssembler(MachineGraph *jsgraph, Zone *zone, BranchSemantics default_branch_semantics, std::optional< NodeChangedCallback > node_changed_callback=std::nullopt, bool mark_loop_exits=false)
CommonOperatorBuilder * common() const
TNode< UintPtrT > UintPtrSub(TNode< UintPtrT > left, TNode< UintPtrT > right)
Node * SmiSub(Node *left, Node *right)
Node * Load(MachineType type, Node *object, Node *offset)
Node * StoreUnaligned(MachineRepresentation rep, Node *object, Node *offset, Node *value)
Node * TruncateFloat64ToInt64(Node *value, TruncateKind kind)
TNode< BoolT > UintPtrLessThanOrEqual(TNode< UintPtrT > left, TNode< UintPtrT > right)
TNode< Number > StringLength(TNode< String > string)
TNode< Map > LoadMap(TNode< HeapObject > object)
TNode< Number > ArrayBufferViewByteLength(TNode< JSArrayBufferView > array_buffer_view, InstanceType instance_type, std::set< ElementsKind > elements_kinds_candidates, TNode< Context > context)
TNode< Number > NumberConstant(double value)
TNode< Boolean > ObjectIsUndetectable(TNode< Object > value)
TNode< Number > NumberBitwiseOr(TNode< Number > lhs, TNode< Number > rhs)
Node * StoreField(FieldAccess const &, Node *object, Node *value)
Node * CheckSmi(Node *value, const FeedbackSource &feedback={})
TNode< Object > ConvertTaggedHoleToUndefined(TNode< Object > value)
Node * StringCharCodeAt(TNode< String > string, TNode< Number > position)
TNode< Number > NumberMax(TNode< Number > lhs, TNode< Number > rhs)
TNode< Object > DoubleArrayMin(TNode< JSArray > array)
Node * StoreElement(ElementAccess const &, Node *object, Node *index, Node *value)
Node * Allocate(AllocationType allocation, Node *size)
TNode< HeapObject > HeapConstant(Handle< HeapObject > object)
TNode< Object > JSCallRuntime2(Runtime::FunctionId function_id, TNode< Object > arg0, TNode< Object > arg1, TNode< Context > context, FrameState frame_state)
TNode< Object > MayThrow(const NodeGenerator0 &body)
TNode< FixedArrayBase > MaybeGrowFastElements(ElementsKind kind, const FeedbackSource &feedback, TNode< JSArray > array, TNode< FixedArrayBase > elements, TNode< Number > new_length, TNode< Number > old_length)
TNode< Word32T > ArrayBufferViewDetachedBit(TNode< JSArrayBufferView > array_buffer_view)
TNode< T > ExitMachineGraph(TNode< U > input, MachineRepresentation output_representation, Type output_type)
TNode< Number > TypedArrayLength(TNode< JSTypedArray > typed_array, std::set< ElementsKind > elements_kinds_candidates, TNode< Context > context)
TNode< Boolean > NumberLessThan(TNode< Number > lhs, TNode< Number > rhs)
SetOncePointer< Operator const > to_number_operator_
TNode< Object > Constant(ObjectRef ref)
Node * CheckNumberFitsInt32(Node *value, const FeedbackSource &feedback={})
TNode< Number > NumberBitwiseAnd(TNode< Number > lhs, TNode< Number > rhs)
TNode< Boolean > ToBoolean(TNode< Object > value)
TNode< Number > NumberShiftRightLogical(TNode< Number > lhs, TNode< Number > rhs)
Node * CheckIf(Node *cond, DeoptimizeReason reason, const FeedbackSource &feedback={})
TNode< Number > NumberSubtract(TNode< Number > lhs, TNode< Number > rhs)
Node * CheckNumber(Node *value, const FeedbackSource &feedback={})
Node * Assert(Node *cond, const char *condition_string="", const char *file="", int line=-1)
TNode< Boolean > ReferenceEqual(TNode< Object > lhs, TNode< Object > rhs)
TNode< Uint32T > LookupByteSizeForElementsKind(TNode< Uint32T > elements_kind)
TNode< Object > JSCallRuntime1(Runtime::FunctionId function_id, TNode< Object > arg0, TNode< Context > context, std::optional< FrameState > frame_state, Operator::Properties properties=Operator::kNoProperties)
void TransitionAndStoreElement(MapRef double_map, MapRef fast_map, TNode< HeapObject > object, TNode< Number > index, TNode< Object > value)
TNode< Number > NumberAdd(TNode< Number > lhs, TNode< Number > rhs)
TNode< Number > NumberMin(TNode< Number > lhs, TNode< Number > rhs)
TNode< T > EnterMachineGraph(TNode< U > input, UseInfo use_info)
void CheckIfTypedArrayWasDetachedOrOutOfBounds(TNode< JSTypedArray > typed_array, std::set< ElementsKind > elements_kinds_candidates, const FeedbackSource &feedback)
TNode< Boolean > ObjectIsSmi(TNode< Object > value)
TNode< Boolean > NumberLessThanOrEqual(TNode< Number > lhs, TNode< Number > rhs)
TNode< Number > PlainPrimitiveToNumber(TNode< Object > value)
TNode< String > StringSubstring(TNode< String > string, TNode< Number > from, TNode< Number > to)
TNode< Uint32T > LoadElementsKind(TNode< Map > map)
Node * LoadField(FieldAccess const &, Node *object)
Node * Chained(const Operator *op, Node *input)
TNode< Word32T > ArrayBufferDetachedBit(TNode< HeapObject > buffer)
TNode< Boolean > NumberEqual(TNode< Number > lhs, TNode< Number > rhs)
TNode< String > StringFromSingleCharCode(TNode< Number > code)
TNode< Uint32T > LookupByteShiftForElementsKind(TNode< Uint32T > elements_kind)
TNode< Smi > SmiConstant(int32_t value)
TNode< Object > DoubleArrayMax(TNode< JSArray > array)
SimplifiedOperatorBuilder * simplified() override
TNode< Boolean > NumberIsFloat64Hole(TNode< Number > value)
TNode< Boolean > ObjectIsCallable(TNode< Object > value)
Node * LoadElement(ElementAccess const &, Node *object, Node *index)
static CallDescriptor * GetStubCallDescriptor(Zone *zone, const CallInterfaceDescriptor &descriptor, int stack_parameter_count, CallDescriptor::Flags flags, Operator::Properties properties=Operator::kNoProperties, StubCallMode stub_mode=StubCallMode::kCallCodeObject)
Definition linkage.cc:587
const Operator * Load(LoadRepresentation rep)
const Operator * UnalignedLoad(LoadRepresentation rep)
bool UnalignedStoreSupported(MachineRepresentation rep)
const Operator * UnalignedStore(UnalignedStoreRepresentation rep)
const Operator * Store(StoreRepresentation rep)
static void ReplaceUses(Node *node, Node *value, Node *effect=nullptr, Node *success=nullptr, Node *exception=nullptr)
static void MergeControlToEnd(TFGraph *graph, CommonOperatorBuilder *common, Node *node)
constexpr Opcode opcode() const
Definition operator.h:75
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
static TypeCache const * Get()
static UseInfo TruncatingWord32()
Definition use-info.h:200
Register const value_
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
ZoneVector< OpIndex > candidates
#define PURE_UNOP_DEF(Name)
#define PURE_BINOP_DEF(Name)
#define SINGLETON_CONST_TEST_DEF(Name,...)
#define PURE_BINOP_DEF_TNODE(Name, Result, Left, Right)
#define SINGLETON_CONST_DEF(Name, Type)
#define CHECKED_BINOP_DEF(Name)
#define PURE_ASSEMBLER_MACH_BINOP_LIST(V, T)
#define CHECKED_ASSEMBLER_MACH_BINOP_LIST(V)
#define PURE_ASSEMBLER_MACH_UNOP_LIST(V)
#define JSGRAPH_SINGLETON_CONSTANT_LIST(V)
int32_t offset
Control control_
Effect effect_
Node * node
ZoneVector< RpoNumber > & result
int position
Definition liveedit.cc:290
STL namespace.
bool all_of(const C &container, const P &predicate)
HeapConstantNoHole(BUILTIN_CODE(isolate(), AllocateInOldGeneration))) DEFINE_GETTER(ArrayConstructorStubConstant
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit bit_field2
Definition map-inl.h:123
bool IsRabGsabTypedArrayElementsKind(ElementsKind kind)
@ FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr int ElementsKindToShiftSize(ElementsKind elements_kind)
const int kHeapObjectTag
Definition v8-internal.h:72
return value
Definition map-inl.h:893
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr int ElementsKindToByteSize(ElementsKind elements_kind)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_NODISCARD
Definition v8config.h:693
MachineGraph * mcgraph_
WasmGraphAssembler gasm_