v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-gc-typed-optimization-reducer.h
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_TURBOSHAFT_WASM_GC_TYPED_OPTIMIZATION_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_WASM_GC_TYPED_OPTIMIZATION_REDUCER_H_
7
8#if !V8_ENABLE_WEBASSEMBLY
9#error This header should only be included if WebAssembly is enabled.
10#endif // !V8_ENABLE_WEBASSEMBLY
11
18
20
21// The WasmGCTypedOptimizationReducer infers type information based on the input
22// graph and reduces type checks and casts based on that information.
23//
24// This is done in two steps:
25// 1) The WasmGCTypeAnalyzer infers the types based on the input graph, e.g.:
26// func (param anyref) (result i32)
27// local.get 0
28// ref.test $MyType
29// if // local 0 is known to be (a subtype of) $MyType
30// local.get 0
31// ref.cast $MyType // the input of this cast is a subtype of $MyType
32// // it can be removed during reduction
33// struct.get $MyType 0
34// return
35// end // local 0 is still anyref
36// i32.const 0
37//
38// 2) The WasmGCTypedOptimizationReducer reduces the graph to a new graph
39// potentially removing, simplifying (e.g. replacing a cast with a null
40// check) or refining (setting the from type to a more specific type) type
41// operations.
42
44 public:
46 : data_(data), graph_(graph), phase_zone_(zone) {
47 // If we ever want to run this analyzer for Wasm wrappers, we'll need
48 // to make it handle their {CanonicalSig} signatures.
50 }
51
52 void Run();
53
54 // Returns the input type for the operation or bottom if the operation shall
55 // always trap.
57 auto iter = input_type_map_.find(op);
58 DCHECK_NE(iter, input_type_map_.end());
59 return iter->second;
60 }
61
62 private:
64 using Snapshot = TypeSnapshotTable::Snapshot;
65 using MaybeSnapshot = TypeSnapshotTable::MaybeSnapshot;
66
67 void StartNewSnapshotFor(const Block& block);
68 void ProcessOperations(const Block& block);
69 void ProcessBlock(const Block& block);
70 void ProcessBranchOnTarget(const BranchOp& branch, const Block& target);
71
72 void ProcessTypeCast(const WasmTypeCastOp& type_cast);
73 void ProcessTypeCheck(const WasmTypeCheckOp& type_check);
74 void ProcessAssertNotNull(const AssertNotNullOp& type_cast);
75 void ProcessNull(const NullOp& null);
76 void ProcessIsNull(const IsNullOp& is_null);
77 void ProcessParameter(const ParameterOp& parameter);
78 void ProcessStructGet(const StructGetOp& struct_get);
79 void ProcessStructSet(const StructSetOp& struct_set);
80 void ProcessArrayGet(const ArrayGetOp& array_get);
81 void ProcessArrayLength(const ArrayLengthOp& array_length);
82 void ProcessGlobalGet(const GlobalGetOp& global_get);
83 void ProcessRefFunc(const WasmRefFuncOp& ref_func);
84 void ProcessAllocateArray(const WasmAllocateArrayOp& allocate_array);
85 void ProcessAllocateStruct(const WasmAllocateStructOp& allocate_struct);
86 void ProcessPhi(const PhiOp& phi);
87 void ProcessTypeAnnotation(const WasmTypeAnnotationOp& type_annotation);
88
89 wasm::ValueType GetTypeForPhiInput(const PhiOp& phi, int input_index);
90
91 void CreateMergeSnapshot(const Block& block);
93 base::Vector<const bool> reachable);
94
95 // Updates the knowledge in the side table about the type of {object},
96 // returning the previous known type. Returns bottom if the refined type is
97 // uninhabited. In this case the operation shall always trap.
99 const Operation& op);
100 // Updates the knowledge in the side table to be a non-nullable type for
101 // {object}, returning the previous known type. Returns bottom if the refined
102 // type is uninhabited. In this case the operation shall always trap.
104 const Operation& op);
105
106 OpIndex ResolveAliases(OpIndex object) const;
108
109 // Returns the reachability status of a block. For any predecessor, this marks
110 // whether the *end* of the block is reachable, for the current block it marks
111 // whether the current instruction is reachable. (For successors the
112 // reachability is unknown.)
113 bool IsReachable(const Block& block) const;
114
118 const wasm::WasmModule* module_ = data_->wasm_module();
119 const wasm::FunctionSig* signature_ = data_->wasm_module_sig();
120 // Contains the snapshots for all blocks in the CFG.
122 // Maps the block id to a snapshot in the table defining the type knowledge
123 // at the end of the block.
126
127 // Tracks reachability of blocks throughout the analysis. Marking a block as
128 // unreachable means that the block in question is unreachable from the
129 // current "point of view" of the analysis, e.g. marking the current block as
130 // "unreachable" means that from "now on" all succeeding statements can treat
131 // it as unreachable, not that the beginning of the block was unreachable.
134
135 const Block* current_block_ = nullptr;
136 // For any operation that could potentially refined, this map stores an entry
137 // to the inferred input type based on the analysis.
139 // Marker wheteher it is the first time visiting a loop header. In that case,
140 // loop phis can only use type information based on the forward edge of the
141 // loop. The value is false outside of loop headers.
143};
144
146
147template <class Next>
149 public:
150 TURBOSHAFT_REDUCER_BOILERPLATE(WasmGCTypedOptimization)
151
152 void Analyze() {
153 analyzer_.Run();
154 Next::Analyze();
155 }
156
158 const WasmTypeCastOp& cast_op) {
159 LABEL_BLOCK(no_change) {
160 return Next::ReduceInputGraphWasmTypeCast(op_idx, cast_op);
161 }
162 if (ShouldSkipOptimizationStep()) goto no_change;
163
165 if (type.is_uninhabited()) {
166 // We are either already in unreachable code (then this instruction isn't
167 // even emitted) or the type analyzer inferred that this instruction will
168 // always trap. In either case emitting an unconditional trap to increase
169 // the chances of logic errors just leading to wrong behaviors but not
170 // resulting in security issues.
171 __ TrapIf(1, TrapId::kTrapIllegalCast);
172 __ Unreachable();
173 return OpIndex::Invalid();
174 }
175 if (type != wasm::ValueType()) {
176 CHECK(!type.is_uninhabited());
177 CHECK(wasm::IsSameTypeHierarchy(type.heap_type(),
178 cast_op.config.to.heap_type(), module_));
179 bool to_nullable = cast_op.config.to.is_nullable();
180 if (wasm::IsHeapSubtypeOf(type.heap_type(), cast_op.config.to.heap_type(),
181 module_, module_)) {
182 if (to_nullable || type.is_non_nullable()) {
183 // The inferred type is already as specific as the cast target, the
184 // cast is guaranteed to always succeed and can therefore be removed.
185 return __ MapToNewGraph(cast_op.object());
186 } else {
187 // The inferred heap type is already as specific as the cast target,
188 // but the source can be nullable and the target cannot be, so a null
189 // check is still required.
190 return __ AssertNotNull(__ MapToNewGraph(cast_op.object()), type,
191 TrapId::kTrapIllegalCast);
192 }
193 }
194 if (wasm::HeapTypesUnrelated(type.heap_type(),
195 cast_op.config.to.heap_type(), module_,
196 module_)) {
197 // A cast between unrelated types can only succeed if the argument is
198 // null. Otherwise, it always fails.
199 V<Word32> non_trapping_condition =
200 type.is_nullable() && to_nullable ? __ IsNull(__ MapToNewGraph(
201 cast_op.object()),
202 type)
203 : __ Word32Constant(0);
204 __ TrapIfNot(non_trapping_condition, TrapId::kTrapIllegalCast);
205 if (!to_nullable) {
206 __ Unreachable();
207 }
208 return __ MapToNewGraph(cast_op.object());
209 }
210
211 // If the cast resulted in an uninhabitable type, the analyzer should have
212 // returned a sentinel (bottom) type as {type}.
213 CHECK(!wasm::Intersection(type, cast_op.config.to, module_, module_)
215
216 // The cast cannot be replaced. Still, we can refine the source type, so
217 // that the lowering could potentially skip null or smi checks.
218 wasm::ValueType from_type =
219 wasm::Intersection(type, cast_op.config.from, module_, module_).type;
220 DCHECK(!from_type.is_uninhabited());
221 WasmTypeCheckConfig config{from_type, cast_op.config.to,
222 cast_op.config.exactness};
223 return __ WasmTypeCast(__ MapToNewGraph(cast_op.object()),
224 __ MapToNewGraph(cast_op.rtt()), config);
225 }
226 goto no_change;
227 }
228
230 V<Word32> op_idx, const WasmTypeCheckOp& type_check) {
231 LABEL_BLOCK(no_change) {
232 return Next::ReduceInputGraphWasmTypeCheck(op_idx, type_check);
233 }
234 if (ShouldSkipOptimizationStep()) goto no_change;
235
237 if (type.is_uninhabited()) {
238 __ Unreachable();
239 return OpIndex::Invalid();
240 }
241 if (type != wasm::ValueType()) {
242 CHECK(!type.is_uninhabited());
244 type.heap_type(), type_check.config.to.heap_type(), module_));
245 bool to_nullable = type_check.config.to.is_nullable();
246 if (wasm::IsHeapSubtypeOf(type.heap_type(),
247 type_check.config.to.heap_type(), module_,
248 module_)) {
249 if (to_nullable || type.is_non_nullable()) {
250 // The inferred type is guaranteed to be a subtype of the checked
251 // type.
252 return __ Word32Constant(1);
253 } else {
254 // The inferred type is guaranteed to be a subtype of the checked
255 // type if it is not null.
256 return __ Word32Equal(
257 __ IsNull(__ MapToNewGraph(type_check.object()), type), 0);
258 }
259 }
260 if (wasm::HeapTypesUnrelated(type.heap_type(),
261 type_check.config.to.heap_type(), module_,
262 module_)) {
263 if (to_nullable && type.is_nullable()) {
264 return __ IsNull(__ MapToNewGraph(type_check.object()), type);
265 } else {
266 return __ Word32Constant(0);
267 }
268 }
269
270 // If there isn't a type that matches our known input type and the
271 // type_check.config.to type, the type check always fails.
272 wasm::ValueType true_type =
273 wasm::Intersection(type, type_check.config.to, module_, module_).type;
274 if (true_type.is_uninhabited()) {
275 return __ Word32Constant(0);
276 }
277
278 // The check cannot be replaced. Still, we can refine the source type, so
279 // that the lowering could potentially skip null or smi checks.
280 wasm::ValueType from_type =
281 wasm::Intersection(type, type_check.config.from, module_, module_)
282 .type;
283 DCHECK(!from_type.is_uninhabited());
284 WasmTypeCheckConfig config{from_type, type_check.config.to,
285 type_check.config.exactness};
286 return __ WasmTypeCheck(__ MapToNewGraph(type_check.object()),
287 __ MapToNewGraph(type_check.rtt()), config);
288 }
289 goto no_change;
290 }
291
293 V<Object> op_idx, const AssertNotNullOp& assert_not_null) {
294 LABEL_BLOCK(no_change) {
295 return Next::ReduceInputGraphAssertNotNull(op_idx, assert_not_null);
296 }
297 if (ShouldSkipOptimizationStep()) goto no_change;
298
300 if (type.is_uninhabited()) {
301 // We are either already in unreachable code (then this instruction isn't
302 // even emitted) or the type analyzer inferred that this instruction will
303 // always trap. In either case emitting an unconditional trap to increase
304 // the chances of logic errors just leading to wrong behaviors but not
305 // resulting in security issues.
306 __ TrapIf(1, assert_not_null.trap_id);
307 __ Unreachable();
308 return OpIndex::Invalid();
309 }
310 if (type.is_non_nullable()) {
311 return __ MapToNewGraph(assert_not_null.object());
312 }
313 goto no_change;
314 }
315
317 const IsNullOp& is_null) {
318 LABEL_BLOCK(no_change) {
319 return Next::ReduceInputGraphIsNull(op_idx, is_null);
320 }
321 if (ShouldSkipOptimizationStep()) goto no_change;
322
324 if (type.is_uninhabited()) {
325 __ Unreachable();
326 return OpIndex::Invalid();
327 }
328 if (type.is_non_nullable()) {
329 return __ Word32Constant(0);
330 }
331 if (type != wasm::ValueType() &&
333 return __ Word32Constant(1);
334 }
335 goto no_change;
336 }
337
339 V<Object> op_idx, const WasmTypeAnnotationOp& type_annotation) {
340 // Remove type annotation operations as they are not needed any more.
341 return __ MapToNewGraph(type_annotation.value());
342 }
343
345 const StructGetOp& struct_get) {
346 LABEL_BLOCK(no_change) {
347 return Next::ReduceInputGraphStructGet(op_idx, struct_get);
348 }
349 if (ShouldSkipOptimizationStep()) goto no_change;
350
352 if (type.is_uninhabited()) {
353 // We are either already in unreachable code (then this instruction isn't
354 // even emitted) or the type analyzer inferred that this instruction will
355 // always trap. In either case emitting an unconditional trap to increase
356 // the chances of logic errors just leading to wrong behaviors but not
357 // resulting in security issues.
358 __ TrapIf(1, TrapId::kTrapNullDereference);
359 __ Unreachable();
360 return OpIndex::Invalid();
361 }
362 // Remove the null check if it is known to be not null.
363 if (struct_get.null_check == kWithNullCheck && type.is_non_nullable()) {
364 return __ StructGet(__ MapToNewGraph(struct_get.object()),
365 struct_get.type, struct_get.type_index,
366 struct_get.field_index, struct_get.is_signed,
368 }
369 goto no_change;
370 }
371
373 const StructSetOp& struct_set) {
374 LABEL_BLOCK(no_change) {
375 return Next::ReduceInputGraphStructSet(op_idx, struct_set);
376 }
377 if (ShouldSkipOptimizationStep()) goto no_change;
378
380 if (type.is_uninhabited()) {
381 // We are either already in unreachable code (then this instruction isn't
382 // even emitted) or the type analyzer inferred that this instruction will
383 // always trap. In either case emitting an unconditional trap to increase
384 // the chances of logic errors just leading to wrong behaviors but not
385 // resulting in security issues.
386 __ TrapIf(1, TrapId::kTrapNullDereference);
387 __ Unreachable();
388 return OpIndex::Invalid();
389 }
390 // Remove the null check if it is known to be not null.
391 if (struct_set.null_check == kWithNullCheck && type.is_non_nullable()) {
392 __ StructSet(__ MapToNewGraph(struct_set.object()),
393 __ MapToNewGraph(struct_set.value()), struct_set.type,
394 struct_set.type_index, struct_set.field_index,
396 return OpIndex::Invalid();
397 }
398 goto no_change;
399 }
400
402 const ArrayLengthOp& array_length) {
403 LABEL_BLOCK(no_change) {
404 return Next::ReduceInputGraphArrayLength(op_idx, array_length);
405 }
406 if (ShouldSkipOptimizationStep()) goto no_change;
407
409 // Remove the null check if it is known to be not null.
410 if (array_length.null_check == kWithNullCheck && type.is_non_nullable()) {
411 return __ ArrayLength(__ MapToNewGraph(array_length.array()),
413 }
414 goto no_change;
415 }
416
417 // TODO(14108): This isn't a type optimization and doesn't fit well into this
418 // reducer.
420 LABEL_BLOCK(no_change) { return Next::ReduceAnyConvertExtern(object); }
421 if (ShouldSkipOptimizationStep()) goto no_change;
422
423 if (object.valid()) {
424 const ExternConvertAnyOp* externalize =
425 __ output_graph().Get(object).template TryCast<ExternConvertAnyOp>();
426 if (externalize != nullptr) {
427 // Directly return the object as
428 // any.convert_extern(extern.convert_any(x)) == x.
429 return externalize->object();
430 }
431 }
432 goto no_change;
433 }
434
435 private:
436 Graph& graph_ = __ modifiable_input_graph();
437 const wasm::WasmModule* module_ = __ data() -> wasm_module();
439};
440
442
443} // namespace v8::internal::compiler::turboshaft
444
445#endif // V8_COMPILER_TURBOSHAFT_WASM_GC_TYPED_OPTIMIZATION_REDUCER_H_
#define REDUCE(operation)
#define REDUCE_INPUT_GRAPH(operation)
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
static constexpr OpIndex Invalid()
Definition index.h:88
void ProcessTypeAnnotation(const WasmTypeAnnotationOp &type_annotation)
void ProcessAllocateArray(const WasmAllocateArrayOp &allocate_array)
wasm::ValueType RefineTypeKnowledgeNotNull(OpIndex object, const Operation &op)
wasm::ValueType GetTypeForPhiInput(const PhiOp &phi, int input_index)
void ProcessAllocateStruct(const WasmAllocateStructOp &allocate_struct)
wasm::ValueType RefineTypeKnowledge(OpIndex object, wasm::ValueType new_type, const Operation &op)
void ProcessBranchOnTarget(const BranchOp &branch, const Block &target)
V< Word32 > REDUCE_INPUT_GRAPH WasmTypeCheck(V< Word32 > op_idx, const WasmTypeCheckOp &type_check)
V< Object > REDUCE_INPUT_GRAPH WasmTypeAnnotation(V< Object > op_idx, const WasmTypeAnnotationOp &type_annotation)
V< Any > REDUCE_INPUT_GRAPH StructGet(V< Any > op_idx, const StructGetOp &struct_get)
V< Object > REDUCE_INPUT_GRAPH AssertNotNull(V< Object > op_idx, const AssertNotNullOp &assert_not_null)
V< None > REDUCE_INPUT_GRAPH StructSet(V< None > op_idx, const StructSetOp &struct_set)
V< Word32 > REDUCE_INPUT_GRAPH ArrayLength(V< Word32 > op_idx, const ArrayLengthOp &array_length)
V< Word32 > REDUCE_INPUT_GRAPH IsNull(V< Word32 > op_idx, const IsNullOp &is_null)
V< Object > REDUCE_INPUT_GRAPH WasmTypeCast(V< Object > op_idx, const WasmTypeCastOp &cast_op)
constexpr Exactness exactness() const
Definition value-type.h:399
constexpr bool is_uninhabited() const
Definition value-type.h:455
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
Definition assembler.h:823
#define LABEL_BLOCK(label)
Definition assembler.h:910
V8_EXPORT_PRIVATE bool ShouldSkipOptimizationStep()
Definition utils.h:84
ValueType ToNullSentinel(TypeInModule type)
bool IsSameTypeHierarchy(HeapType type1, HeapType type2, const WasmModule *module)
V8_INLINE bool IsHeapSubtypeOf(HeapType subtype, HeapType supertype, const WasmModule *sub_module, const WasmModule *super_module)
TypeInModule Intersection(ValueType type1, ValueType type2, const WasmModule *module1, const WasmModule *module2)
V8_INLINE bool HeapTypesUnrelated(HeapType heap1, HeapType heap2, const WasmModule *module1, const WasmModule *module2)
bool TryCast(Tagged< From > value, Tagged< To > *out)
Definition casting.h:77
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
wasm::ValueType type