15 if (v8_flags.trace_wasm_typer) PrintF(__VA_ARGS__); \
30 if (
IsReachable(block) && last->destination->IsLoop() &&
31 last->destination->LastPredecessor() == &block) {
32 TRACE(
"[b%u] Reprocessing loop header b%u at backedge #%u\n",
33 block.index().id(), last->destination->index().id(),
35 const Block& loop_header = *last->destination;
49 TRACE(
"[b%u] Loop header b%u reprocessed at backedge #%u: %s\n",
50 block.index().id(), last->destination->index().id(),
52 needs_revisit ?
"Scheduling loop body revisitation"
53 :
"No revisit of loop body needed");
62 if (block.index() != loop_header.
index()) {
98 bool block_was_previously_reachable =
IsReachable(block);
99 if (!block_was_previously_reachable) {
100 TRACE(
"[b%u] Removing unreachable flag as block is re-evaluated\n",
105 if (block.HasPredecessors() == 0) {
109 }
else if (block.IsLoop()) {
110 const Block& forward_predecessor =
116 "[b%uu] Loop unreachable as forward predecessor b%u is unreachable\n",
117 block.index().id(), forward_predecessor.
index().
id());
122 if (back_edge_snap.has_value() && block_was_previously_reachable) {
136 "[b%u%s] First loop header evaluation: Ignoring all backedges on "
144 }
else if (block.IsBranchTarget()) {
152 if (branch !=
nullptr) {
156 TRACE(
"[b%uu] Block unreachable as sole predecessor b%u is unreachable\n",
157 block.index().id(), predecessor.
index().
id());
170 case Opcode::kWasmTypeCast:
173 case Opcode::kWasmTypeCheck:
176 case Opcode::kAssertNotNull:
182 case Opcode::kIsNull:
185 case Opcode::kParameter:
188 case Opcode::kStructGet:
191 case Opcode::kStructSet:
194 case Opcode::kArrayGet:
197 case Opcode::kArrayLength:
200 case Opcode::kGlobalGet:
203 case Opcode::kWasmRefFunc:
206 case Opcode::kWasmAllocateArray:
209 case Opcode::kWasmAllocateStruct:
215 case Opcode::kWasmTypeAnnotation:
218 case Opcode::kBranch:
241 const AssertNotNullOp& assert_not_null) {
242 V<Object> object = assert_not_null.object();
267 struct_get.type->field(struct_get.field_index).Unpacked(),
284 array_get.array_type->element_type().Unpacked(),
302 module_->functions[ref_func.function_index].sig_index;
309 const WasmAllocateArrayOp& allocate_array) {
311 graph_.
Get(allocate_array.rtt()).Cast<RttCanonOp>().type_index;
318 const WasmAllocateStructOp& allocate_struct) {
321 if (RttCanonOp* canon = rtt.
TryCast<RttCanonOp>()) {
322 type_index = canon->type_index;
324 DCHECK(load->kind.tagged_base && load->offset == WasmStruct::kHeaderSize);
325 OpIndex descriptor = load->base();
356 if (
current_block_->begin().id() <= input.id() && input.id() < phi_id.
id()) {
383 for (
int i = 1;
i < phi.input_count; ++
i) {
393 union_type = input_type;
400 for (
int i = 0;
i < phi.input_count; ++
i) {
403 TRACE(
"- phi input %d: #%u(%s) -> %s\n",
i, input.
id(),
410 const WasmTypeAnnotationOp& type_annotation) {
416 const Block& target) {
420 case Opcode::kWasmTypeCheck: {
421 const WasmTypeCheckOp& check =
condition.Cast<WasmTypeCheckOp>();
422 if (branch.
if_true == &target) {
434 "[b%uu] Block unreachable as #%u(%s) used in #%u(%s) is always "
436 target.index().id(), branch.
condition().id(),
442 case Opcode::kIsNull: {
443 const IsNullOp& is_null =
condition.Cast<IsNullOp>();
444 if (branch.
if_true == &target) {
450 "[b%uu] Block unreachable as #%u(%s) used in #%u(%s) is always "
452 target.index().id(), branch.
condition().id(),
481 bool all_predecessors_unreachable =
true;
484 bool predecessor_reachable =
IsReachable(*predecessor);
485 reachable.
push_back(predecessor_reachable);
486 all_predecessors_unreachable &= !predecessor_reachable;
488 if (all_predecessors_unreachable) {
489 TRACE(
"[b%u] Block unreachable as all predecessors are unreachable\n",
492 }
else if (
v8_flags.trace_wasm_typer) {
493 std::stringstream str;
496 if (
i != 0) str <<
", ";
497 str <<
'b' << predecessor->index().id() << (reachable[
i] ?
"" :
"u");
500 TRACE(
"[b%u] Predecessors reachability: %s\n", block.index().id(),
506 std::reverse(snapshots.begin(), snapshots.end());
507 std::reverse(reachable.
begin(), reachable.
end());
517 bool types_are_equivalent =
true;
519 predecessors, [
this, &types_are_equivalent, reachable](
526 for (;
i < reachable.
size(); ++
i) {
532 if (reachable[
i] && !predecessors[
i].is_uninhabited()) {
533 first = predecessors[
i];
540 for (;
i < reachable.
size(); ++
i) {
541 if (!reachable[
i])
continue;
547 DCHECK(!type.is_uninhabited());
548 if (type.is_uninhabited())
continue;
549 types_are_equivalent &= first ==
type;
558 return !types_are_equivalent;
570 if (intersection_type == previous_value)
return previous_value;
572 TRACE(
"[b%u%s] #%u(%s): Refine type for object #%u(%s) -> %s%s\n",
586 return previous_value;
596 TRACE(
"[b%u%s] #%u(%s): Refine type for object #%u(%s) -> %s%s\n",
610 return previous_value;
617 case Opcode::kWasmTypeCast:
618 object = op->
Cast<WasmTypeCastOp>().
object();
620 case Opcode::kAssertNotNull:
621 object = op->
Cast<AssertNotNullOp>().
object();
623 case Opcode::kWasmTypeAnnotation:
624 object = op->
Cast<WasmTypeAnnotationOp>().
value();
constexpr size_t size() const
bool Contains(int i) const
void MarkLoopForRevisit()
void MarkLoopForRevisitSkipHeader()
Block * NeighboringPredecessor() const
NeighboringPredecessorIterable PredecessorsIterable() const
Block * LastPredecessor() const
base::iterator_range< OpIndexIterator > OperationIndices(const Block &block) const
OpIndex Index(const Operation &op) const
V8_INLINE const Operation & Get(OpIndex i) const
constexpr uint32_t id() const
void StartNewSnapshot(base::Vector< const Snapshot > predecessors, const ChangeCallback &change_callback={})
bool Set(OpIndex idx, Value new_value)
Value GetPredecessorValue(OpIndex idx, int predecessor_index)
Value Get(OpIndex idx) const
typename SnapshotTable< wasm::ValueType, NoKeyData >::Key Key
bool is_first_loop_header_evaluation_
void ProcessTypeAnnotation(const WasmTypeAnnotationOp &type_annotation)
void ProcessGlobalGet(const GlobalGetOp &global_get)
TypeSnapshotTable::MaybeSnapshot MaybeSnapshot
void ProcessAllocateArray(const WasmAllocateArrayOp &allocate_array)
wasm::ValueType RefineTypeKnowledgeNotNull(OpIndex object, const Operation &op)
void ProcessOperations(const Block &block)
const wasm::WasmModule * module_
bool IsReachable(const Block &block) const
void ProcessArrayGet(const ArrayGetOp &array_get)
wasm::ValueType GetTypeForPhiInput(const PhiOp &phi, int input_index)
const wasm::FunctionSig * signature_
ZoneUnorderedMap< OpIndex, wasm::ValueType > input_type_map_
void ProcessTypeCast(const WasmTypeCastOp &type_cast)
void ProcessTypeCheck(const WasmTypeCheckOp &type_check)
void ProcessParameter(const ParameterOp ¶meter)
FixedBlockSidetable< MaybeSnapshot > block_to_snapshot_
void ProcessNull(const NullOp &null)
const Block * current_block_
void CreateMergeSnapshot(const Block &block)
void ProcessStructGet(const StructGetOp &struct_get)
void ProcessBlock(const Block &block)
void ProcessAssertNotNull(const AssertNotNullOp &type_cast)
void ProcessRefFunc(const WasmRefFuncOp &ref_func)
wasm::ValueType GetResolvedType(OpIndex object) const
TypeSnapshotTable types_table_
void ProcessAllocateStruct(const WasmAllocateStructOp &allocate_struct)
wasm::ValueType RefineTypeKnowledge(OpIndex object, wasm::ValueType new_type, const Operation &op)
void ProcessStructSet(const StructSetOp &struct_set)
void ProcessIsNull(const IsNullOp &is_null)
OpIndex ResolveAliases(OpIndex object) const
TypeSnapshotTable::Snapshot Snapshot
void ProcessBranchOnTarget(const BranchOp &branch, const Block &target)
void StartNewSnapshotFor(const Block &block)
void ProcessPhi(const PhiOp &phi)
void ProcessArrayLength(const ArrayLengthOp &array_length)
BitVector block_is_unreachable_
constexpr bool is_non_nullable() const
constexpr bool has_index() const
constexpr bool is_uninhabited() const
V8_EXPORT_PRIVATE std::string name() const
static constexpr ValueType Ref(ModuleTypeIndex index, bool shared, RefTypeKind kind)
constexpr ModuleTypeIndex ref_index() const
constexpr ValueType AsNonNull() const
constexpr Vector< T > VectorOf(T *start, size_t size)
const char * OpcodeName(Opcode opcode)
ValueType ToNullSentinel(TypeInModule type)
constexpr int kWasmInstanceDataParameterIndex
TypeInModule Intersection(ValueType type1, ValueType type2, const WasmModule *module1, const WasmModule *module2)
V8_EXPORT_PRIVATE TypeInModule Union(ValueType type1, ValueType type2, const WasmModule *module1, const WasmModule *module2)
constexpr IndependentHeapType kWasmStructRef
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
constexpr IndependentHeapType kWasmBottom
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
V< Word32 > condition() const
static const Opcode opcode
const underlying_operation_t< Op > * TryCast() const
underlying_operation_t< Op > & Cast()
bool is_descriptor() const
ModuleTypeIndex describes