5#ifndef V8_COMPILER_TURBOSHAFT_LATE_LOAD_ELIMINATION_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_LATE_LOAD_ELIMINATION_REDUCER_H_
33 if (v8_flags.turboshaft_trace_load_elimination) \
34 StdoutStream() << x << std::endl; \
153 return or_ == other.or_ &&
and_ == other.and_;
159 return minmax.
or_ == 0 && minmax.
and_ == -1ull;
166 MapMask hash = map.hash_value();
170 return hash * 0x2545f4914f6cdd1d;
182 return {a.or_ | b.
or_, a.and_ & b.
and_};
186 return ((a.and_ & b.
or_) == a.and_) || ((b.
and_ & a.or_) == b.
and_);
197 return base == other.base && index == other.index &&
202 template <
typename H>
228 static T**
prev(
T t) {
return &(t.data().prev_same_offset); }
229 static T*
next(
T t) {
return &(t.data().next_same_offset); }
235 static T**
prev(
T t) {
return &(t.data().prev_same_base); }
236 static T*
next(
T t) {
return &(t.data().next_same_base); }
334 if (old_value.
valid() && !new_value.
valid()) {
336 }
else if (new_value.
valid() && !old_value.
valid()) {
345 Invalidate(store.base(), store.index(), store.offset);
349 TRACE(
"> MemoryContentTable: Invalidating based on "
354 TRACE(
">> base is non-aliasing");
359 for (
auto it = base_keys->second.with_offsets.begin();
360 it != base_keys->second.with_offsets.end();) {
364 if (index.valid() ||
offset ==
key.data().mem.offset) {
366 it = base_keys->second.with_offsets.RemoveAt(it);
367 TRACE(
">>> invalidating " <<
key.data().mem);
374 for (
auto it = base_keys->second.with_indices.begin();
375 it != base_keys->second.with_indices.end();) {
378 it = base_keys->second.with_indices.RemoveAt(it);
382 TRACE(
">> base is maybe-aliasing");
386 TRACE(
">> Invalidating everything because of valid index");
403 TRACE(
">>> Invalidating indexed memory " <<
key.data().mem);
407 TRACE(
">>> Invalidating everything maybe-aliasing at offset " <<
offset);
414 TRACE(
">> InvalidateMaybeAliasing");
422 for (
auto it = base_keys.second.with_offsets.begin();
423 it != base_keys.second.with_offsets.end();) {
428 it = base_keys.second.with_offsets.RemoveAt(it);
429 TRACE(
">>> Invalidating " <<
key.data().mem);
432 for (
auto it = base_keys.second.with_indices.begin();
433 it != base_keys.second.with_indices.end();) {
435 it = base_keys.second.with_indices.RemoveAt(it);
436 TRACE(
">>> Invalidating " <<
key.data().mem);
445 int32_t
offset = load.offset;
446 uint8_t element_size_log2 = index.valid() ? load.element_size_log2 : 0;
447 uint8_t size = load.loaded_rep.SizeInBytes();
458 int32_t
offset = store.offset;
459 uint8_t element_size_log2 = index.valid() ? store.element_size_log2 : 0;
461 uint8_t size = store.stored_rep.SizeInBytes();
463 if (store.kind.is_immutable) {
473 int32_t
offset = load.offset;
474 uint8_t element_size_log2 = index.valid() ? load.element_size_log2 : 0;
475 uint8_t size = load.loaded_rep.SizeInBytes();
477 if (load.kind.is_immutable) {
486 std::cout <<
"MemoryContentTable:\n";
488 for (
Key key : base_keys.second.with_offsets) {
489 std::cout <<
" * " <<
key.data().mem.base <<
" - "
490 <<
key.data().mem.index <<
" - " <<
key.data().mem.offset
491 <<
" - " <<
key.data().mem.element_size_log2 <<
" ==> "
494 for (
Key key : base_keys.second.with_indices) {
495 std::cout <<
" * " <<
key.data().mem.base <<
" - "
496 <<
key.data().mem.index <<
" - " <<
key.data().mem.offset
497 <<
" - " <<
key.data().mem.element_size_log2 <<
" ==> "
514 uint8_t element_size_log2, uint8_t size,
OpIndex value) {
518 TRACE(
"> MemoryContentTable: will insert " << mem
519 <<
" with value=" << value);
522 TRACE(
">> Reusing existing key");
523 Set(existing_key->second, value);
528 TRACE(
">> Bailing out because too many keys");
539 uint8_t element_size_log2, uint8_t size,
OpIndex value) {
543 TRACE(
"> MemoryContentTable: will insert immutable "
544 << mem <<
" with value=" << value);
547 TRACE(
">> Reusing existing key");
553 TRACE(
">> Bailing out because too many keys");
568 for (
auto it = offset_keys->second.begin();
569 it != offset_keys->second.end();) {
583 TRACE(
">>>> InvalidateAtOffset: not invalidating thanks for maps: "
588 it = offset_keys->second.RemoveAt(it);
589 TRACE(
">>>> InvalidateAtOffset: invalidating " <<
key.data().mem);
606 if (
key.data().mem.index.valid()) {
607 base_keys->second.with_indices.PushFront(
key);
609 base_keys->second.with_offsets.PushFront(
key);
613 if (
key.data().mem.index.valid()) {
614 data.with_indices.PushFront(
key);
616 data.with_offsets.PushFront(
key);
621 if (
key.data().mem.index.valid()) {
629 offset_keys->second.PushFront(
key);
688 raw_base_assumption_(raw_base_assumption),
689 replacements_(graph.op_id_count(),
phase_zone, &graph),
692 memory_(
phase_zone, non_aliasing_objects_, object_maps_, replacements_),
693 block_to_snapshot_mapping_(graph.block_count(),
phase_zone),
705 void ProcessBlock(
const Block& block,
bool compute_start_snapshot);
717 template <
bool for_loop_revisit = false>
718 bool BeginBlock(
const Block* block);
719 void FinishBlock(
const Block* block);
725 void SealAndDiscard();
726 void StoreLoopSnapshotInForwardPredecessor(
const Block& loop_header);
730 bool BackedgeHasSnapshot(
const Block& loop_header)
const;
732 void InvalidateAllNonAliasingInputs(
const Operation& op);
733 void InvalidateIfAlias(
OpIndex op_idx);
741#if V8_ENABLE_WEBASSEMBLY
742 bool is_wasm_ =
data_->is_wasm();
747 std::map<OpIndex, base::SmallMap<std::map<OpIndex, OpIndex>, 4>>
784 if (is_wasm_ ||
v8_flags.turboshaft_load_elimination) {
785 DCHECK(AllowHandleDereference::IsAllowed());
792 if (is_wasm_ ||
v8_flags.turboshaft_load_elimination) {
793 Replacement replacement = analyzer_.GetReplacement(ig_index);
796 OpIndex replacement_idx = Asm().MapToNewGraph(replacement_ig_index);
798 if (analyzer_.GetReplacement(replacement_ig_index)
799 .IsTaggedLoadToInt32Load()) {
800 DCHECK_EQ(Asm().output_graph().
Get(replacement_idx).outputs_rep()[0],
801 RegisterRepresentation::Word32());
805 .
Get(replacement_idx)
807 .AllowImplicitRepresentationChangeTo(
808 load.outputs_rep()[0],
809 Asm().output_graph().IsCreatedFromTurbofan()));
811 return replacement_idx;
813 auto loaded_rep = load.loaded_rep;
814 auto result_rep = load.result_rep;
815 DCHECK_EQ(result_rep, RegisterRepresentation::Tagged());
816 loaded_rep = MemoryRepresentation::Int32();
817 result_rep = RegisterRepresentation::Word32();
818 return Asm().Load(Asm().MapToNewGraph(load.base()),
819 Asm().MapToNewGraph(load.index()), load.kind,
820 loaded_rep, result_rep, load.offset,
821 load.element_size_log2);
824 return Next::ReduceInputGraphLoad(ig_index, load);
828 if (is_wasm_ ||
v8_flags.turboshaft_load_elimination) {
829 Replacement replacement = analyzer_.GetReplacement(ig_index);
833 return Asm().MapToNewGraph(replacement.
replacement());
836 return Next::ReduceInputGraphChange(ig_index, change);
841 if (is_wasm_ ||
v8_flags.turboshaft_load_elimination) {
842 Replacement replacement = analyzer_.GetReplacement(ig_index);
844 return OpIndex::Invalid();
847 return Next::ReduceInputGraphTaggedBitcast(ig_index, bitcast);
859 const bool is_wasm_ =
__ data() -> is_wasm();
863 ? RawBaseAssumption::kMaybeInnerPointer
864 : RawBaseAssumption::kNoInnerPointer;
867 raw_base_assumption_};
#define REDUCE(operation)
#define REDUCE_INPUT_GRAPH(operation)
uint8_t data_[MAX_STACK_LENGTH]
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
void SetNoNotify(Key key, OpIndex new_value)
void Set(Key key, OpIndex new_value)
Key NewKey(KeyData data, OpIndex initial_value=OpIndex{})
SnapshotTableKey< OpIndex, KeyData > Key
MemoryContentTable::Snapshot MemorySnapshot
ZoneVector< MapSnapshot > predecessor_maps_snapshots_
RawBaseAssumption raw_base_assumption_
AliasTable non_aliasing_objects_
LateLoadEliminationAnalyzer(PipelineData *data, Graph &graph, Zone *phase_zone, JSHeapBroker *broker, RawBaseAssumption raw_base_assumption)
FixedOpIndexSidetable< Replacement > replacements_
std::map< OpIndex, base::SmallMap< std::map< OpIndex, OpIndex >, 4 > > int32_truncated_loads_
Replacement GetReplacement(OpIndex index)
MemoryContentTable memory_
AliasTable::Snapshot AliasSnapshot
FixedBlockSidetable< std::optional< Snapshot > > block_to_snapshot_mapping_
ZoneVector< MemorySnapshot > predecessor_memory_snapshots_
ZoneVector< AliasSnapshot > predecessor_alias_snapshots_
MapTable::Snapshot MapSnapshot
bool IsInt32TruncationElimination() const
bool IsTaggedBitcastElimination() const
static LoadEliminationReplacement LoadElimination(OpIndex replacement)
bool IsTaggedLoadToInt32Load() const
static LoadEliminationReplacement TaggedLoadToInt32Load()
OpIndex replacement() const
@ kTaggedBitcastElimination
@ kInt32TruncationElimination
static LoadEliminationReplacement Int32TruncationElimination(OpIndex replacement)
static LoadEliminationReplacement None()
static LoadEliminationReplacement TaggedBitcastElimination()
bool IsLoadElimination() const
LoadEliminationReplacement()
LoadEliminationReplacement(Kind kind, OpIndex replacement)
void OnValueChange(Key key, OpIndex old_value, OpIndex new_value)
SparseOpIndexSnapshotTable< bool > & non_aliasing_objects_
void Insert(OpIndex base, OptionalOpIndex index, int32_t offset, uint8_t element_size_log2, uint8_t size, OpIndex value)
ZoneAbslFlatHashMap< int, v8::base::DoublyThreadedList< Key, OffsetListTraits > > offset_keys_
SparseOpIndexSnapshotTable< MapMaskAndOr > & object_maps_
OpIndex Find(const LoadOp &load)
void Insert(const StoreOp &store)
void AddKeyInBaseOffsetMaps(Key key)
void InsertImmutable(OpIndex base, OptionalOpIndex index, int32_t offset, uint8_t element_size_log2, uint8_t size, OpIndex value)
MemoryContentTable(Zone *zone, SparseOpIndexSnapshotTable< bool > &non_aliasing_objects, SparseOpIndexSnapshotTable< MapMaskAndOr > &object_maps, FixedOpIndexSidetable< Replacement > &replacements)
ZoneAbslFlatHashMap< MemoryAddress, Key > all_keys_
v8::base::DoublyThreadedList< Key, OffsetListTraits > index_keys_
ZoneAbslFlatHashMap< OpIndex, BaseData > base_keys_
void Insert(const LoadOp &load, OpIndex load_idx)
void Invalidate(OpIndex base, OptionalOpIndex index, int32_t offset)
void Invalidate(const StoreOp &store)
static constexpr size_t kMaxKeys
void OnNewKey(Key key, OpIndex value)
FixedOpIndexSidetable< Replacement > & replacements_
OpIndex ResolveBase(OpIndex base)
void InvalidateMaybeAliasing()
void RemoveKeyFromBaseOffsetMaps(Key key)
void InvalidateAtOffset(int32_t offset, OpIndex base)
static constexpr OpIndex Invalid()
constexpr bool valid() const
Value Get(OpIndex idx) const
typename SnapshotTable< bool, NoKeyData >::Key Key
JSHeapBroker *const broker_
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
TurboshaftPipelineKind pipeline_kind
MapMaskAndOr ComputeMinMaxHash(ZoneRefSet< Map > maps)
std::ostream & operator<<(std::ostream &os, PaddingSpace padding)
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
MapMaskAndOr CombineMinMax(MapMaskAndOr a, MapMaskAndOr b)
V8_INLINE size_t fast_hash_combine()
V8_INLINE size_t hash_value(OpIndex op)
bool IsInt32TruncatedLoadPattern(const Graph &graph, OpIndex change_idx, const ChangeOp &change, OpIndex *bitcast_idx, OpIndex *load_idx)
bool CouldHaveSameMap(MapMaskAndOr a, MapMaskAndOr b)
MapMask ComputeMapHash(MapRef map)
void Print(Tagged< Object > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define V8_EXPORT_PRIVATE
v8::base::DoublyThreadedList< Key, BaseListTraits > with_offsets
v8::base::DoublyThreadedList< Key, BaseListTraits > with_indices
static bool non_empty(T t)
MemorySnapshot memory_snapshot
AliasSnapshot alias_snapshot
MapSnapshot maps_snapshot
bool operator==(const MapMaskAndOr &other) const
bool operator!=(const MapMaskAndOr &other) const
uint8_t element_size_log2
friend H AbslHashValue(H h, const MemoryAddress &mem)
bool operator==(const MemoryAddress &other) const
static bool non_empty(T t)