v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
type-inference-reducer.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_TURBOSHAFT_TYPE_INFERENCE_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_TYPE_INFERENCE_REDUCER_H_
7
8#include <limits>
9#include <optional>
10
11#include "src/base/logging.h"
12#include "src/base/vector.h"
25
27
29
30template <typename Op>
31V8_INLINE bool CanBeTyped(const Op& operation) {
32 return operation.outputs_rep().size() > 0;
33}
34
36 : base::ContextualClass<TypeInferenceReducerArgs> {
37 enum class InputGraphTyping {
38 kNone, // Do not compute types for the input graph.
39 kPrecise, // Run a complete fixpoint analysis on the input graph.
40 };
41 enum class OutputGraphTyping {
42 kNone, // Do not compute types for the output graph.
43 kPreserveFromInputGraph, // Reuse types of the input graph where
44 // possible.
45 kRefineFromInputGraph, // Reuse types of the input graph and compute types
46 // for new nodes and more precise types where
47 // possible.
48 };
51
56};
57
58// TypeInferenceReducer is the central component to infer types for Turboshaft
59// graphs. It comes with different options for how the input and output graph
60// should be typed:
61//
62// - InputGraphTyping::kNone: No types are computed for the input graph.
63// - InputGraphTyping::kPrecise: We run a full fixpoint analysis on the input
64// graph to infer the most precise types possible (see TypeInferenceAnalysis).
65//
66// - OutputGraphTyping::kNone: No types will be set for the output graph.
67// - OutputGraphTyping::kPreserveFromInputGraph: Types from the input graph will
68// be preserved for the output graph. Where this is not possible (e.g. new
69// operations introduced during lowering), the output operation will be untyped.
70// - OutputGraphTyping::kRefineFromInputGraph: Types from the input graph will
71// be used where they provide additional precision (e.g loop phis). For new
72// operations, the reducer reruns the typer to make sure that the output graph
73// is fully typed.
74//
75// NOTE: The TypeInferenceReducer has to be the last reducer in the stack!
76template <class Next>
78 : public UniformReducerAdapter<TypeInferenceReducer, Next> {
81
82 public:
84
87
89 // It is not reasonable to try to reuse input graph types if there are none.
93 }
94
95 void Analyze() {
97#ifdef DEBUG
99 block_refinements(Asm().input_graph().block_count(), {},
100 Asm().phase_zone());
101 input_graph_types_ = analyzer_.Run(&block_refinements);
102 Tracing::Get().PrintPerBlockData(
103 "Type Refinements", Asm().input_graph(),
104 [&](std::ostream& stream, const turboshaft::Graph& graph,
105 turboshaft::BlockIndex index) -> bool {
106 const std::vector<std::pair<turboshaft::OpIndex, turboshaft::Type>>&
107 refinements = block_refinements[index];
108 if (refinements.empty()) return false;
109 stream << "\\n";
110 for (const auto& [op, type] : refinements) {
111 stream << op << " : " << type << "\\n";
112 }
113 return true;
114 });
115#else
117#endif // DEBUG
118 Tracing::Get().PrintPerOperationData(
119 "Types", Asm().input_graph(),
120 [&](std::ostream& stream, const turboshaft::Graph& graph,
121 turboshaft::OpIndex index) -> bool {
123 if (!type.IsInvalid() && !type.IsNone()) {
124 type.PrintTo(stream);
125 return true;
126 }
127 return false;
128 });
129 }
130 Next::Analyze();
131 }
132
134 return input_graph_types_[ig_index];
135 }
136
137 Type GetOutputGraphType(OpIndex og_index) { return GetType(og_index); }
138
139 template <Opcode opcode, typename Continuation, typename... Ts>
141 OpIndex index = Continuation{this}.Reduce(args...);
142 if (!NeedsTyping(index)) return index;
143
144 const Operation& op = Asm().output_graph().Get(index);
145 if (CanBeTyped(op)) {
147 Asm().output_graph().Get(index).outputs_rep(), Asm().graph_zone());
148 SetType(index, type, true);
149 }
150 return index;
151 }
152
153 template <typename Op, typename Continuation>
154 OpIndex ReduceInputGraphOperation(OpIndex ig_index, const Op& operation) {
155 OpIndex og_index = Continuation{this}.ReduceInputGraph(ig_index, operation);
156 if (!og_index.valid()) return og_index;
158 return og_index;
159 }
160 if (!CanBeTyped(operation)) return og_index;
161
162 Type ig_type = GetInputGraphType(ig_index);
164 !ig_type.IsInvalid());
165 if (!ig_type.IsInvalid()) {
166 Type og_type = GetType(og_index);
167 // If the type we have from the input graph is more precise, we keep it.
168 if (og_type.IsInvalid() ||
169 (ig_type.IsSubtypeOf(og_type) && !og_type.IsSubtypeOf(ig_type))) {
170 RefineTypeFromInputGraph(og_index, og_type, ig_type);
171 }
172 }
173 return og_index;
174 }
175
176 void Bind(Block* new_block) {
177 Next::Bind(new_block);
178
179 // Seal the current block first.
180 if (table_.IsSealed()) {
182 } else {
183 // If we bind a new block while the previous one is still unsealed, we
184 // finalize it.
186 DCHECK(current_block_->index().valid());
188 current_block_ = nullptr;
189 }
190
191 // Collect the snapshots of all predecessors.
192 {
194 for (const Block* pred : new_block->PredecessorsIterable()) {
195 std::optional<table_t::Snapshot> pred_snapshot =
196 block_to_snapshot_mapping_[pred->index()];
197 DCHECK(pred_snapshot.has_value());
198 predecessors_.push_back(pred_snapshot.value());
199 }
200 std::reverse(predecessors_.begin(), predecessors_.end());
201 }
202
203 // Start a new snapshot for this block by merging information from
204 // predecessors.
205 {
206 auto MergeTypes = [&](table_t::Key,
207 base::Vector<const Type> predecessors) -> Type {
208 DCHECK_GT(predecessors.size(), 0);
209 Type result_type = predecessors[0];
210 for (size_t i = 1; i < predecessors.size(); ++i) {
211 result_type = Type::LeastUpperBound(result_type, predecessors[i],
212 Asm().graph_zone());
213 }
214 return result_type;
215 };
216
218 }
219
220 // Check if the predecessor is a branch that allows us to refine a few
221 // types.
224 if (new_block->PredecessorCount() == 1) {
225 Block* predecessor = new_block->LastPredecessor();
226 const Operation& terminator =
227 predecessor->LastOperation(Asm().output_graph());
228 if (const BranchOp* branch = terminator.TryCast<BranchOp>()) {
229 DCHECK(branch->if_true == new_block || branch->if_false == new_block);
230 RefineTypesAfterBranch(branch, new_block,
231 branch->if_true == new_block);
232 }
233 }
234 }
235 current_block_ = new_block;
236 }
237
238 void RefineTypesAfterBranch(const BranchOp* branch, Block* new_block,
239 bool then_branch) {
240 const std::string branch_str = branch->ToString().substr(0, 40);
241 USE(branch_str);
242 TURBOSHAFT_TRACE_TYPING_OK("Br %3d:%-40s\n",
243 Asm().output_graph().Index(*branch).id(),
244 branch_str.c_str());
245
246 Typer::BranchRefinements refinements(
247 [this](OpIndex index) { return GetType(index); },
248 [&](OpIndex index, const Type& refined_type) {
249 RefineOperationType(new_block, index, refined_type,
250 then_branch ? 'T' : 'F');
251 });
252
253 // Inspect branch condition.
254 const Operation& condition = Asm().output_graph().Get(branch->condition());
255 refinements.RefineTypes(condition, then_branch, Asm().graph_zone());
256 }
257
258 void RefineOperationType(Block* new_block, OpIndex op, const Type& type,
259 char case_for_tracing) {
260 DCHECK(op.valid());
261 DCHECK(!type.IsInvalid());
262
264 " %c: %3d:%-40s ~~> %s\n", case_for_tracing, op.id(),
265 Asm().output_graph().Get(op).ToString().substr(0, 40).c_str(),
266 type.ToString().c_str());
267
268 auto key_opt = op_to_key_mapping_[op];
269 // We might not have a key for this value, because we are running in a mode
270 // where we don't type all operations.
271 if (key_opt.has_value()) {
272 table_.Set(*key_opt, type);
273
274#ifdef DEBUG
275 std::vector<std::pair<OpIndex, Type>>& refinement =
276 Asm().output_graph().block_type_refinement()[new_block->index()];
277 refinement.push_back(std::make_pair(op, type));
278#endif
279
280 // TODO(nicohartmann@): One could push the refined type deeper into the
281 // operations.
282 }
283 }
284
286 OpIndex index = Next::ReducePendingLoopPhi(first, rep);
287 if (!NeedsTyping(index)) return index;
288
289 // There is not much we can do for pending loop phis, because we don't know
290 // the type of the backedge yet, so we have to assume maximal type. If we
291 // run with a typed input graph, we can refine this type using the input
292 // graph's type (see ReduceInputGraphOperation).
294 return index;
295 }
296
299 OpIndex index = Next::ReducePhi(inputs, rep);
300 if (!NeedsTyping(index)) return index;
301
302 Type type = Type::None();
303 for (const OpIndex input : inputs) {
304 type = Type::LeastUpperBound(type, GetType(input), Asm().graph_zone());
305 }
306 SetType(index, type);
307 return index;
308 }
309
311 OpIndex index = Next::ReduceConstant(kind, value);
312 if (!NeedsTyping(index)) return index;
313
314 Type type = Typer::TypeConstant(kind, value);
315 SetType(index, type);
316 return index;
317 }
318
322 OpIndex index = Next::ReduceComparison(left, right, kind, rep);
323 if (!NeedsTyping(index)) return index;
324
325 Type type = Typer::TypeComparison(GetType(left), GetType(right), rep, kind,
326 Asm().graph_zone());
327 SetType(index, type);
328 return index;
329 }
330
331 V<Any> REDUCE(Projection)(V<Any> input, uint16_t idx,
333 V<Any> index = Next::ReduceProjection(input, idx, rep);
334 if (!NeedsTyping(index)) return index;
335
336 Type type = Typer::TypeProjection(GetType(input), idx);
337 SetType(index, type);
338 return index;
339 }
340
342 WordRepresentation rep) {
343 V<Word> index = Next::ReduceWordBinop(left, right, kind, rep);
344 if (!NeedsTyping(index)) return index;
345
346 Type type = Typer::TypeWordBinop(GetType(left), GetType(right), kind, rep,
347 Asm().graph_zone());
348 SetType(index, type);
349 return index;
350 }
351
354 WordRepresentation rep) {
355 OpIndex index = Next::ReduceOverflowCheckedBinop(left, right, kind, rep);
356 if (!NeedsTyping(index)) return index;
357
359 kind, rep, Asm().graph_zone());
360 SetType(index, type);
361 return index;
362 }
363
367 V<Float> index = Next::ReduceFloatBinop(left, right, kind, rep);
368 if (!NeedsTyping(index)) return index;
369
370 Type type = Typer::TypeFloatBinop(GetType(left), GetType(right), kind, rep,
371 Asm().graph_zone());
372 SetType(index, type);
373 return index;
374 }
375
378 bool successful) {
379 Type input_type = GetType(input);
380 if (input_type.IsSubtypeOf(type)) {
381 OpIndex index = Next::ReduceCheckTurboshaftTypeOf(input, rep, type, true);
383 "CTOF %3d:%-40s\n P: %3d:%-40s ~~> %s\n", index.id(),
384 Asm().output_graph().Get(index).ToString().substr(0, 40).c_str(),
385 input.id(),
386 Asm().output_graph().Get(input).ToString().substr(0, 40).c_str(),
387 input_type.ToString().c_str());
388 return index;
389 }
390 if (successful) {
391 FATAL(
392 "Checking type %s of operation %d:%s failed after it passed in a "
393 "previous phase",
394 type.ToString().c_str(), input.id(),
395 Asm().output_graph().Get(input).ToString().c_str());
396 }
397 OpIndex index =
398 Next::ReduceCheckTurboshaftTypeOf(input, rep, type, successful);
400 "CTOF %3d:%-40s\n F: %3d:%-40s ~~> %s\n", index.id(),
401 Asm().output_graph().Get(index).ToString().substr(0, 40).c_str(),
402 input.id(),
403 Asm().output_graph().Get(input).ToString().substr(0, 40).c_str(),
404 input_type.ToString().c_str());
405 return index;
406 }
407
408 void RemoveLast(OpIndex index_of_last_operation) {
409 if (op_to_key_mapping_[index_of_last_operation]) {
410 op_to_key_mapping_[index_of_last_operation] = std::nullopt;
412 "REM %3d:%-40s %-40s\n", index_of_last_operation.id(),
413 Asm()
414 .output_graph()
415 .Get(index_of_last_operation)
416 .ToString()
417 .substr(0, 40)
418 .c_str(),
419 GetType(index_of_last_operation).ToString().substr(0, 40).c_str());
420 output_graph_types_[index_of_last_operation] = Type::Invalid();
421 }
422 Next::RemoveLast(index_of_last_operation);
423 }
424
425 private:
426 void RefineTypeFromInputGraph(OpIndex index, const Type& og_type,
427 const Type& ig_type) {
428 // Refinement should happen when we just lowered the corresponding
429 // operation, so we should be at the point where the operation is defined
430 // (e.g. not in a refinement after a branch). So the current block must
431 // contain the operation.
432 DCHECK(!ig_type.IsInvalid());
433
435 "Refi %3d:%-40s\n I: %-40s ~~> %-40s\n", index.id(),
436 Asm().output_graph().Get(index).ToString().substr(0, 40).c_str(),
437 (og_type.IsInvalid() ? "invalid" : og_type.ToString().c_str()),
438 ig_type.ToString().c_str());
439
440 RefineOperationType(Asm().current_block(), index, ig_type, 'I');
441 }
442
444 if (auto key = op_to_key_mapping_[index]) return table_.Get(*key);
445 return Type::Invalid();
446 }
447
448 Type GetTupleType(const TupleOp& tuple) {
449 base::SmallVector<Type, 4> tuple_types;
450 for (OpIndex input : tuple.inputs()) {
451 tuple_types.push_back(GetType(input));
452 }
453 return TupleType::Tuple(base::VectorOf(tuple_types), Asm().graph_zone());
454 }
455
457 Type type = GetTypeOrInvalid(index);
458 if (type.IsInvalid()) {
459 const Operation& op = Asm().output_graph().Get(index);
460 if (op.Is<TupleOp>()) {
461 return GetTupleType(op.Cast<TupleOp>());
462 } else {
464 Asm().graph_zone());
465 }
466 }
467 return type;
468 }
469
470 void SetType(OpIndex index, const Type& result_type,
471 bool is_fallback_for_unsupported_operation = false) {
472 DCHECK(!result_type.IsInvalid());
473
474 if (auto key_opt = op_to_key_mapping_[index]) {
475 table_.Set(*key_opt, result_type);
476 DCHECK(result_type.IsSubtypeOf(output_graph_types_[index]));
477 output_graph_types_[index] = result_type;
478 DCHECK(!output_graph_types_[index].IsInvalid());
479 } else {
480 auto key = table_.NewKey(Type::None());
482 table_.Set(key, result_type);
483 output_graph_types_[index] = result_type;
484 }
485
486 if (!is_fallback_for_unsupported_operation) {
488 "Type %3d:%-40s ==> %s\n", index.id(),
489 Asm().output_graph().Get(index).ToString().substr(0, 40).c_str(),
490 result_type.ToString().c_str());
491 } else {
492 // TODO(nicohartmann@): Remove the fallback case once all operations are
493 // supported.
495 "TODO %3d:%-40s ==> %s\n", index.id(),
496 Asm().output_graph().Get(index).ToString().substr(0, 40).c_str(),
497 result_type.ToString().c_str());
498 }
499 }
500
501// Verification is more difficult, now that the output graph uses types from the
502// input graph. It is generally not possible to verify that the output graph's
503// type is a subtype of the input graph's type, because the typer might not
504// support a precise typing of the operations after the lowering.
505// TODO(nicohartmann@): Evaluate new strategies for verification.
506#if 0
507#ifdef DEBUG
508 void Verify(OpIndex input_index, OpIndex output_index) {
509 DCHECK(input_index.valid());
510 DCHECK(output_index.valid());
511
512 const auto& input_type = Asm().input_graph().operation_types()[input_index];
513 const auto& output_type = types_[output_index];
514
515 if (input_type.IsInvalid()) return;
516 DCHECK(!output_type.IsInvalid());
517
518 const bool is_okay = output_type.IsSubtypeOf(input_type);
519
521 "\033[%s %3d:%-40s %-40s\n %3d:%-40s %-40s\033[0m\n",
522 is_okay ? "32mOK " : "31mFAIL", input_index.id(),
523 Asm().input_graph().Get(input_index).ToString().substr(0, 40).c_str(),
524 input_type.ToString().substr(0, 40).c_str(), output_index.id(),
525 Asm().output_graph().Get(output_index).ToString().substr(0, 40).c_str(),
526 output_type.ToString().substr(0, 40).c_str());
527
528 if (V8_UNLIKELY(!is_okay)) {
529 FATAL(
530 "\033[%s %3d:%-40s %-40s\n %3d:%-40s %-40s\033[0m\n",
531 is_okay ? "32mOK " : "31mFAIL", input_index.id(),
532 Asm().input_graph().Get(input_index).ToString().substr(0, 40).c_str(),
533 input_type.ToString().substr(0, 40).c_str(), output_index.id(),
534 Asm()
535 .output_graph()
536 .Get(output_index)
537 .ToString()
538 .substr(0, 40)
539 .c_str(),
540 output_type.ToString().substr(0, 40).c_str());
541 }
542 }
543#endif
544#endif
545
546 bool NeedsTyping(OpIndex index) const {
547 return index.valid() && args_.output_graph_typing ==
549 }
550
553 &Asm().input_graph()};
555 Asm().output_graph().operation_types()};
556 table_t table_{Asm().phase_zone()};
557 const Block* current_block_ = nullptr;
559 Asm().phase_zone(), &Asm().output_graph()};
561 block_to_snapshot_mapping_{Asm().input_graph().block_count(),
562 std::nullopt, Asm().phase_zone()};
563 // {predecessors_} is used during merging, but we use an instance variable for
564 // it, in order to save memory and not reallocate it for each merge.
566 TypeInferenceAnalysis analyzer_{Asm().modifiable_input_graph(),
567 Asm().phase_zone()};
568};
569
571
572} // namespace v8::internal::compiler::turboshaft
573
574#endif // V8_COMPILER_TURBOSHAFT_TYPE_INFERENCE_REDUCER_H_
#define REDUCE(operation)
Builtins::Kind kind
Definition builtins.cc:40
static VarType & Get()
Definition contextual.h:64
void push_back(const T &value)
const Operation & LastOperation(const Graph &graph) const
Definition graph.h:1242
NeighboringPredecessorIterable PredecessorsIterable() const
Definition graph.h:340
constexpr uint32_t id() const
Definition index.h:61
Key NewKey(KeyData data, Value initial_value=Value{})
void StartNewSnapshot(base::Vector< const Snapshot > predecessors, const ChangeCallback &change_callback={})
static TupleType Tuple(const Type &element0, const Type &element1, Zone *zone)
Definition types.h:813
GrowingOpIndexSidetable< Type > Run(GrowingBlockSidetable< std::vector< std::pair< OpIndex, Type > > > *block_refinements=nullptr)
void RefineTypeFromInputGraph(OpIndex index, const Type &og_type, const Type &ig_type)
void RefineTypesAfterBranch(const BranchOp *branch, Block *new_block, bool then_branch)
OpIndex REDUCE Phi(base::Vector< const OpIndex > inputs, RegisterRepresentation rep)
void RefineOperationType(Block *new_block, OpIndex op, const Type &type, char case_for_tracing)
V< Word32 > REDUCE Comparison(V< Any > left, V< Any > right, ComparisonOp::Kind kind, RegisterRepresentation rep)
OpIndex ReduceInputGraphOperation(OpIndex ig_index, const Op &operation)
OpIndex REDUCE CheckTurboshaftTypeOf(OpIndex input, RegisterRepresentation rep, Type type, bool successful)
V< Float > REDUCE FloatBinop(V< Float > left, V< Float > right, FloatBinopOp::Kind kind, FloatRepresentation rep)
GrowingBlockSidetable< std::optional< table_t::Snapshot > > block_to_snapshot_mapping_
V< Word > REDUCE WordBinop(V< Word > left, V< Word > right, WordBinopOp::Kind kind, WordRepresentation rep)
void SetType(OpIndex index, const Type &result_type, bool is_fallback_for_unsupported_operation=false)
V< Any > REDUCE Projection(V< Any > input, uint16_t idx, RegisterRepresentation rep)
GrowingOpIndexSidetable< std::optional< table_t::Key > > op_to_key_mapping_
OpIndex REDUCE PendingLoopPhi(OpIndex first, RegisterRepresentation rep)
OpIndex REDUCE OverflowCheckedBinop(V< Word > left, V< Word > right, OverflowCheckedBinopOp::Kind kind, WordRepresentation rep)
bool IsSubtypeOf(const Type &other) const
Definition types.cc:51
static Type LeastUpperBound(const Type &lhs, const Type &rhs, Zone *zone)
Definition types.cc:118
static Type TypeConstant(ConstantOp::Kind kind, ConstantOp::Storage value)
Definition typer.h:1173
static Type TypeWordBinop(Type left_type, Type right_type, WordBinopOp::Kind kind, WordRepresentation rep, Zone *zone)
Definition typer.h:1203
static Type TypeOverflowCheckedBinop(const Type &left_type, const Type &right_type, OverflowCheckedBinopOp::Kind kind, WordRepresentation rep, Zone *zone)
Definition typer.h:1347
static Type TypeProjection(const Type &input, uint16_t idx)
Definition typer.h:1195
static Type TypeComparison(const Type &lhs, const Type &rhs, RegisterRepresentation rep, ComparisonOp::Kind kind, Zone *zone)
Definition typer.h:1409
static Type TypeForRepresentation(RegisterRepresentation rep)
Definition typer.h:1144
static Type TypeFloatBinop(Type left_type, Type right_type, FloatBinopOp::Kind kind, FloatRepresentation rep, Zone *zone)
Definition typer.h:1272
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
Definition assembler.h:823
#define TURBOSHAFT_TRACE_TYPING_OK(str,...)
Definition types.h:42
#define TURBOSHAFT_TRACE_TYPING(...)
Definition types.h:40
#define TURBOSHAFT_TRACE_TYPING_FAIL(str,...)
Definition types.h:43
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
Zone * graph_zone
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition vector.h:360
V8_INLINE bool CanBeTyped(const Op &operation)
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
Definition graph.h:1231
std::string ToString(const BytecodeLivenessState &liveness)
return value
Definition map-inl.h:893
#define FATAL(...)
Definition logging.h:47
#define DCHECK_NULL(val)
Definition logging.h:491
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define USE(...)
Definition macros.h:293
const underlying_operation_t< Op > * TryCast() const
Definition operations.h:990
underlying_operation_t< Op > & Cast()
Definition operations.h:980
base::Vector< const RegisterRepresentation > outputs_rep() const
TypeInferenceReducerArgs(InputGraphTyping input_graph_typing, OutputGraphTyping output_graph_typing)
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660
wasm::ValueType type