5#ifndef V8_COMPILER_TURBOSHAFT_WASM_LOAD_ELIMINATION_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_WASM_LOAD_ELIMINATION_REDUCER_H_
10#if !V8_ENABLE_WEBASSEMBLY
11#error This header should only be included if WebAssembly is enabled.
60 type_index == other.type_index && size == other.size &&
83 static T**
prev(
T t) {
return &(t.data().prev_same_offset); }
84 static T*
next(
T t) {
return &(t.data().next_same_offset); }
90 static T**
prev(
T t) {
return &(t.data().prev_same_base); }
91 static T*
next(
T t) {
return &(t.data().next_same_base); }
128 if (old_value.
valid() && !new_value.
valid()) {
130 }
else if (new_value.
valid() && !old_value.
valid()) {
149 for (
auto it = offset_keys->second.begin();
150 it != offset_keys->second.end();) {
168 it = offset_keys->second.RemoveAt(it);
175 template <EntriesWithOffsets offsets = EntriesWithOffsets::kInval
idate>
185 for (
auto it = base_keys.second.with_offsets.begin();
186 it != base_keys.second.with_offsets.end();) {
188 if (
key.data().mem.mutability ==
false) {
195 it = base_keys.second.with_offsets.RemoveAt(it);
204 return WasmStruct::kHeaderSize + type->field_offset(field_index);
209 uint8_t size = get.type->field(get.field_index).value_kind_size();
210 bool mutability = get.type->mutability(get.field_index);
217 uint8_t size = set.type->field(set.field_index).value_kind_size();
218 bool mutability = set.type->mutability(set.field_index);
225 static constexpr bool mutability =
false;
231 uint8_t size,
bool mutability,
242 uint8_t size = set.type->field(set.field_index).value_kind_size();
243 bool mutability = set.type->mutability(set.field_index);
250 uint8_t size = get.type->field(get.field_index).value_kind_size();
251 bool mutability = get.type->mutability(get.field_index);
258 static constexpr bool mutability =
false;
265 std::cout <<
"WasmMemoryContentTable:\n";
267 for (
Key key : base_keys.second.with_offsets) {
268 std::cout <<
" * " <<
key.data().mem.base <<
" - "
269 <<
key.data().mem.offset <<
" ==> " <<
Get(
key) <<
"\n";
277 uint8_t size,
bool mutability,
OpIndex value) {
284 Set(existing_key->second, value);
310 if (AssertNotNullOp* check = op.
TryCast<AssertNotNullOp>()) {
311 base = check->object();
314 if (WasmTypeCastOp*
cast = op.
TryCast<WasmTypeCastOp>()) {
328 base_keys->second.with_offsets.PushFront(
key);
339 offset_keys->second.PushFront(
key);
400 bool compute_start_snapshot =
true;
405 compute_start_snapshot =
true;
409 if (last->destination->IsLoop() &&
410 last->destination->LastPredecessor() == block) {
411 const Block* loop_header = last->destination;
426 const Block* loop_1st_pred =
430 auto pred_snapshots =
433 pred_snapshots->alias_snapshot);
437 compute_start_snapshot =
false;
456 OpIndex op_idx,
const StringPrepareForGetCodeUnitOp& op);
467 template <
bool for_loop_revisit = false>
512 if (
v8_flags.turboshaft_wasm_load_elimination) {
513 DCHECK(AllowHandleDereference::IsAllowed());
519#define EMIT_OP(Name) \
520 OpIndex REDUCE_INPUT_GRAPH(Name)(OpIndex ig_index, const Name##Op& op) { \
521 if (v8_flags.turboshaft_wasm_load_elimination) { \
522 OpIndex ig_replacement_index = analyzer_.Replacement(ig_index); \
523 if (ig_replacement_index.valid()) { \
524 OpIndex replacement = Asm().MapToNewGraph(ig_replacement_index); \
525 return replacement; \
528 return Next::ReduceInputGraph##Name(ig_index, op); \
534 EMIT_OP(StringPrepareForGetCodeUnit)
538 const StructSetOp& op) {
539 if (
v8_flags.turboshaft_wasm_load_elimination) {
540 OpIndex ig_replacement_index = analyzer_.Replacement(ig_index);
541 if (ig_replacement_index.valid()) {
545 DCHECK_EQ(ig_replacement_index, ig_index);
547 return OpIndex::Invalid();
550 return Next::ReduceInputGraphStructSet(ig_index, op);
555 Asm().data(), Asm().modifiable_input_graph(), Asm().phase_zone()};
558void WasmLoadEliminationAnalyzer::ProcessBlock(
const Block& block,
559 bool compute_start_snapshot) {
560 if (compute_start_snapshot) {
563 if (block.IsLoop() && BackedgeHasSnapshot(block)) {
570 StoreLoopSnapshotInForwardPredecessor(block);
578 case Opcode::kStructGet:
579 ProcessStructGet(op_idx, op.
Cast<StructGetOp>());
581 case Opcode::kStructSet:
582 ProcessStructSet(op_idx, op.
Cast<StructSetOp>());
584 case Opcode::kArrayLength:
585 ProcessArrayLength(op_idx, op.
Cast<ArrayLengthOp>());
587 case Opcode::kWasmAllocateArray:
588 ProcessWasmAllocateArray(op_idx, op.
Cast<WasmAllocateArrayOp>());
590 case Opcode::kStringAsWtf16:
591 ProcessStringAsWtf16(op_idx, op.
Cast<StringAsWtf16Op>());
593 case Opcode::kStringPrepareForGetCodeUnit:
594 ProcessStringPrepareForGetCodeUnit(
595 op_idx, op.
Cast<StringPrepareForGetCodeUnitOp>());
597 case Opcode::kAnyConvertExtern:
598 ProcessAnyConvertExtern(op_idx, op.
Cast<AnyConvertExternOp>());
600 case Opcode::kAssertNotNull:
603 ProcessAssertNotNull(op_idx, op.
Cast<AssertNotNullOp>());
605 case Opcode::kArraySet:
607 case Opcode::kAllocate:
627 case Opcode::kAssumeMap:
628 case Opcode::kCatchBlockBegin:
629 case Opcode::kRetain:
630 case Opcode::kDidntThrow:
631 case Opcode::kCheckException:
632 case Opcode::kAtomicRMW:
633 case Opcode::kAtomicWord32Pair:
634 case Opcode::kMemoryBarrier:
635 case Opcode::kJSStackCheck:
636 case Opcode::kWasmStackCheck:
637 case Opcode::kSimd128LaneMemory:
638 case Opcode::kGlobalSet:
639 case Opcode::kParameter:
640 case Opcode::kSetStackPointer:
645 case Opcode::kWordBinop:
651 case Opcode::kFrameState:
652 case Opcode::kDeoptimizeIf:
653 case Opcode::kComparison:
654 case Opcode::kTrapIf:
662 case Opcode::kDeoptimize:
663 case Opcode::kReturn:
686 InvalidateAllNonAliasingInputs(op);
704 uint8_t in_memory_size) {
705 if (in_memory_size !=
706 MemoryRepresentation::FromRegisterRepresentation(actual,
true)
718 return expected_reg_repr == actual;
721void WasmLoadEliminationAnalyzer::ProcessStructGet(
OpIndex op_idx,
722 const StructGetOp& get) {
723 OpIndex existing = memory_.Find(get);
724 if (existing.
valid()) {
728 uint8_t size = get.type->field(get.field_index).value_kind_size();
731 replacements_[op_idx] = existing;
735 replacements_[op_idx] = OpIndex::Invalid();
736 memory_.Insert(get, op_idx);
739void WasmLoadEliminationAnalyzer::ProcessStructSet(
OpIndex op_idx,
740 const StructSetOp& set) {
741 if (memory_.HasValueWithIncorrectMutability(set)) {
745 replacements_[op_idx] = op_idx;
749 memory_.Invalidate(set);
754 if (non_aliasing_objects_.HasKeyFor(value)) {
755 non_aliasing_objects_.Set(value,
false);
759void WasmLoadEliminationAnalyzer::ProcessArrayLength(
760 OpIndex op_idx,
const ArrayLengthOp& length) {
761 static constexpr int offset = wle::kArrayLengthFieldIndex;
762 OpIndex existing = memory_.FindLoadLike(length.array(),
offset);
763 if (existing.
valid()) {
767 DCHECK_EQ(length.outputs_rep().size(), 1);
770 replacements_[op_idx] = existing;
773 replacements_[op_idx] = OpIndex::Invalid();
774 memory_.InsertLoadLike(length.array(),
offset, op_idx);
777void WasmLoadEliminationAnalyzer::ProcessWasmAllocateArray(
778 OpIndex op_idx,
const WasmAllocateArrayOp& alloc) {
779 non_aliasing_objects_.Set(op_idx,
true);
780 static constexpr int offset = wle::kArrayLengthFieldIndex;
781 memory_.InsertLoadLike(op_idx,
offset, alloc.length());
784void WasmLoadEliminationAnalyzer::ProcessStringAsWtf16(
785 OpIndex op_idx,
const StringAsWtf16Op& op) {
786 static constexpr int offset = wle::kStringAsWtf16Index;
787 OpIndex existing = memory_.FindLoadLike(op.string(),
offset);
788 if (existing.
valid()) {
790 replacements_[op_idx] = existing;
793 replacements_[op_idx] = OpIndex::Invalid();
794 memory_.InsertLoadLike(op.string(),
offset, op_idx);
797void WasmLoadEliminationAnalyzer::ProcessStringPrepareForGetCodeUnit(
798 OpIndex op_idx,
const StringPrepareForGetCodeUnitOp& prep) {
799 static constexpr int offset = wle::kStringPrepareForGetCodeunitIndex;
800 OpIndex existing = memory_.FindLoadLike(prep.string(),
offset);
801 if (existing.
valid()) {
802 DCHECK_EQ(Opcode::kStringPrepareForGetCodeUnit,
803 graph_.Get(existing).opcode);
804 replacements_[op_idx] = existing;
807 replacements_[op_idx] = OpIndex::Invalid();
808 memory_.InsertLoadLike(prep.string(),
offset, op_idx);
811void WasmLoadEliminationAnalyzer::ProcessAnyConvertExtern(
812 OpIndex op_idx,
const AnyConvertExternOp& convert) {
813 static constexpr int offset = wle::kAnyConvertExternIndex;
814 OpIndex existing = memory_.FindLoadLike(convert.object(),
offset);
815 if (existing.
valid()) {
817 replacements_[op_idx] = existing;
820 replacements_[op_idx] = OpIndex::Invalid();
821 memory_.InsertLoadLike(convert.object(),
offset, op_idx);
824void WasmLoadEliminationAnalyzer::ProcessAssertNotNull(
825 OpIndex op_idx,
const AssertNotNullOp& assert) {
826 static constexpr int offset = wle::kAssertNotNullIndex;
827 OpIndex existing = memory_.FindLoadLike(assert.object(),
offset);
828 if (existing.
valid()) {
830 replacements_[op_idx] = existing;
833 replacements_[op_idx] = OpIndex::Invalid();
834 memory_.InsertLoadLike(assert.object(),
offset, op_idx);
841void WasmLoadEliminationAnalyzer::ProcessCall(
OpIndex op_idx,
855 switch (*builtin_id) {
856 case Builtin::kExample:
867 InvalidateAllNonAliasingInputs(op);
871 memory_.InvalidateMaybeAliasing();
874void WasmLoadEliminationAnalyzer::InvalidateAllNonAliasingInputs(
877 InvalidateIfAlias(input);
881void WasmLoadEliminationAnalyzer::InvalidateIfAlias(
OpIndex op_idx) {
882 if (
auto key = non_aliasing_objects_.TryGetKeyFor(op_idx);
883 key.has_value() && non_aliasing_objects_.Get(*
key)) {
887 non_aliasing_objects_.Set(*
key,
false);
897void WasmLoadEliminationAnalyzer::DcheckWordBinop(
OpIndex op_idx,
901 if (
auto key = non_aliasing_objects_.TryGetKeyFor(left);
902 key.has_value() && non_aliasing_objects_.Get(*
key)) {
914void WasmLoadEliminationAnalyzer::ProcessAllocate(
OpIndex op_idx,
917 non_aliasing_objects_.Set(op_idx,
true);
920void WasmLoadEliminationAnalyzer::ProcessPhi(
OpIndex op_idx,
const PhiOp& phi) {
921 InvalidateAllNonAliasingInputs(phi);
930 if (inputs.
size() > 0) {
931 bool same_inputs =
true;
934 if (memory_.ResolveBase(input) != first) {
940 replacements_[op_idx] = first;
945void WasmLoadEliminationAnalyzer::FinishBlock(
const Block* block) {
946 block_to_snapshot_mapping_[block->index()] =
947 Snapshot{non_aliasing_objects_.Seal(), memory_.Seal()};
950void WasmLoadEliminationAnalyzer::SealAndDiscard() {
951 non_aliasing_objects_.Seal();
955void WasmLoadEliminationAnalyzer::StoreLoopSnapshotInForwardPredecessor(
956 const Block& loop_header) {
957 auto non_aliasing_snapshot = non_aliasing_objects_.Seal();
958 auto memory_snapshot = memory_.Seal();
960 block_to_snapshot_mapping_
962 Snapshot{non_aliasing_snapshot, memory_snapshot};
964 non_aliasing_objects_.StartNewSnapshot(non_aliasing_snapshot);
965 memory_.StartNewSnapshot(memory_snapshot);
968bool WasmLoadEliminationAnalyzer::BackedgeHasSnapshot(
969 const Block& loop_header)
const {
975template <
bool for_loop_revisit>
976bool WasmLoadEliminationAnalyzer::BeginBlock(
const Block* block) {
980 block_to_snapshot_mapping_[block->LastPredecessor()->index()]
985 predecessor_alias_snapshots_.clear();
986 predecessor_memory_snapshots_.clear();
988 auto pred_snapshots = block_to_snapshot_mapping_[p->index()];
992 block->IsLoop() && block->LastPredecessor() == p);
993 if (!pred_snapshots.has_value()) {
994 DCHECK(!for_loop_revisit);
1003 predecessor_alias_snapshots_.push_back(pred_snapshots->alias_snapshot);
1004 predecessor_memory_snapshots_.push_back(pred_snapshots->memory_snapshot);
1010 constexpr int kBackedgeOffset = 0;
1011 constexpr int kForwardEdgeOffset = 1;
1013 bool loop_needs_revisit =
false;
1018 if (for_loop_revisit && predecessors[kForwardEdgeOffset] &&
1019 !predecessors[kBackedgeOffset]) {
1022 loop_needs_revisit =
true;
1024 return base::all_of(predecessors);
1026 non_aliasing_objects_.StartNewSnapshot(
1027 base::VectorOf(predecessor_alias_snapshots_), merge_aliases);
1037 if (for_loop_revisit && predecessors[kForwardEdgeOffset].valid() &&
1038 predecessors[kBackedgeOffset] != predecessors[kForwardEdgeOffset]) {
1042 loop_needs_revisit =
true;
1044 return base::all_equal(predecessors) ? predecessors[0] : OpIndex::Invalid();
1046 memory_.StartNewSnapshot(base::VectorOf(predecessor_memory_snapshots_),
1049 if (block->IsLoop())
return loop_needs_revisit;
#define REDUCE_INPUT_GRAPH(operation)
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
constexpr size_t size() const
Vector< T > SubVectorFrom(size_t from) const
void MarkLoopForRevisit()
Block * NeighboringPredecessor() const
NeighboringPredecessorIterable PredecessorsIterable() const
Block * LastPredecessor() const
void SetNoNotify(Key key, OpIndex new_value)
void Set(Key key, OpIndex new_value)
Key NewKey(KeyData data, OpIndex initial_value=OpIndex{})
void StartNewSnapshot(base::Vector< const Snapshot > predecessors)
SnapshotTableKey< OpIndex, KeyData > Key
V8_INLINE const Operation & Get(OpIndex i) const
static constexpr OpIndex Invalid()
constexpr bool valid() const
static constexpr OptionalOpIndex Nullopt()
const OpIndex & Get(Key key) const
SnapshotTableKey< OpIndex, KeyData > Key
void StartNewSnapshot(base::Vector< const Snapshot > predecessors, const ChangeCallback &change_callback={})
Value Get(OpIndex idx) const
typename SnapshotTable< bool, NoKeyData >::Key Key
void ProcessStringPrepareForGetCodeUnit(OpIndex op_idx, const StringPrepareForGetCodeUnitOp &op)
FixedBlockSidetable< std::optional< Snapshot > > block_to_snapshot_mapping_
bool BackedgeHasSnapshot(const Block &loop_header) const
void ProcessArrayLength(OpIndex op_idx, const ArrayLengthOp &op)
void ProcessWasmAllocateArray(OpIndex op_idx, const WasmAllocateArrayOp &op)
ZoneVector< MemorySnapshot > predecessor_memory_snapshots_
void DcheckWordBinop(OpIndex op_idx, const WordBinopOp &binop)
void ProcessBlock(const Block &block, bool compute_start_snapshot)
void ProcessPhi(OpIndex op_idx, const PhiOp &op)
void ProcessStringAsWtf16(OpIndex op_idx, const StringAsWtf16Op &op)
void ProcessStructSet(OpIndex op_idx, const StructSetOp &op)
void InvalidateAllNonAliasingInputs(const Operation &op)
bool BeginBlock(const Block *block)
wle::WasmMemoryContentTable::Snapshot MemorySnapshot
void ProcessStructGet(OpIndex op_idx, const StructGetOp &op)
void ProcessAllocate(OpIndex op_idx, const AllocateOp &op)
AliasTable non_aliasing_objects_
FixedOpIndexSidetable< OpIndex > replacements_
void InvalidateIfAlias(OpIndex op_idx)
void StoreLoopSnapshotInForwardPredecessor(const Block &loop_header)
void ProcessAssertNotNull(OpIndex op_idx, const AssertNotNullOp &op)
WasmLoadEliminationAnalyzer(PipelineData *data, Graph &graph, Zone *phase_zone)
wle::WasmMemoryContentTable memory_
AliasTable::Snapshot AliasSnapshot
void ProcessCall(OpIndex op_idx, const CallOp &op)
void FinishBlock(const Block *block)
void ProcessAnyConvertExtern(OpIndex op_idx, const AnyConvertExternOp &op)
ZoneVector< AliasSnapshot > predecessor_alias_snapshots_
OpIndex Replacement(OpIndex index)
OpIndex REDUCE_INPUT_GRAPH StructSet(OpIndex ig_index, const StructSetOp &op)
SparseOpIndexSnapshotTable< bool > & non_aliasing_objects_
ZoneUnorderedMap< WasmMemoryAddress, Key > all_keys_
void Invalidate(const StructSetOp &set)
int field_offset(const wasm::StructType *type, int field_index)
bool HasValueWithIncorrectMutability(const StructSetOp &set)
OpIndex FindLoadLike(OpIndex op_idx, int offset_sentinel)
void Insert(const StructGetOp &get, OpIndex get_idx)
void OnNewKey(Key key, OpIndex value)
OpIndex Find(const StructGetOp &get)
OpIndex FindImpl(OpIndex object, int offset, wasm::ModuleTypeIndex type_index, uint8_t size, bool mutability, OptionalOpIndex index=OptionalOpIndex::Nullopt())
OpIndex ResolveBase(OpIndex base)
void Insert(const StructSetOp &set)
const wasm::WasmModule * module_
bool TypesUnrelated(wasm::ModuleTypeIndex type1, wasm::ModuleTypeIndex type2)
void InvalidateMaybeAliasing()
void OnValueChange(Key key, OpIndex old_value, OpIndex new_value)
WasmMemoryContentTable(PipelineData *data, Zone *zone, SparseOpIndexSnapshotTable< bool > &non_aliasing_objects, FixedOpIndexSidetable< OpIndex > &replacements, Graph &graph)
void InsertLoadLike(OpIndex base_idx, int offset_sentinel, OpIndex value_idx)
void AddKeyInBaseOffsetMaps(Key key)
ZoneUnorderedMap< int, v8::base::DoublyThreadedList< Key, OffsetListTraits > > offset_keys_
void Insert(OpIndex base, int32_t offset, wasm::ModuleTypeIndex type_index, uint8_t size, bool mutability, OpIndex value)
ZoneUnorderedMap< OpIndex, BaseData > base_keys_
FixedOpIndexSidetable< OpIndex > & replacements_
void RemoveKeyFromBaseOffsetMaps(Key key)
JSHeapBroker *const broker_
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
static constexpr int kLoadLikeSize
static constexpr int kArrayLengthFieldIndex
static constexpr int kAnyConvertExternIndex
static constexpr wasm::ModuleTypeIndex kLoadLikeType
size_t hash_value(WasmMemoryAddress const &mem)
static constexpr int kAssertNotNullIndex
static constexpr int kStringAsWtf16Index
static constexpr int kStringPrepareForGetCodeunitIndex
bool RepIsCompatible(RegisterRepresentation actual, RegisterRepresentation expected_reg_repr, uint8_t in_memory_size)
V8_INLINE size_t fast_hash_combine()
V8_EXPORT_PRIVATE V8_INLINE bool ShouldSkipOperation(const Operation &op)
std::optional< Builtin > TryGetBuiltinId(const ConstantOp *target, JSHeapBroker *broker)
V8_EXPORT_PRIVATE bool ShouldSkipOptimizationStep()
base::SmallVector< Block *, 4 > SuccessorBlocks(const Block &block, const Graph &graph)
V8_INLINE bool HeapTypesUnrelated(HeapType heap1, HeapType heap2, const WasmModule *module1, const WasmModule *module2)
void Print(Tagged< Object > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
const intptr_t kSmiTagMask
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
V< CallTarget > callee() const
OpEffects Effects() const
bool IsBlockTerminator() const
base::Vector< const OpIndex > inputs() const
const underlying_operation_t< Op > * TryCast() const
underlying_operation_t< Op > & Cast()
base::Vector< const RegisterRepresentation > outputs_rep() const
OpEffects Effects() const
MemorySnapshot memory_snapshot
AliasSnapshot alias_snapshot
V< WordType > left() const
V< WordType > right() const
v8::base::DoublyThreadedList< Key, BaseListTraits > with_offsets
static bool non_empty(T t)
static bool non_empty(T t)
wasm::ModuleTypeIndex type_index
bool operator==(const WasmMemoryAddress &other) const
std::unique_ptr< ValueMirror > key