v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
structural-optimization-reducer.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_TURBOSHAFT_STRUCTURAL_OPTIMIZATION_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_STRUCTURAL_OPTIMIZATION_REDUCER_H_
7
8#include <cstdio>
9
13#include "src/zone/zone.h"
14
15// The StructuralOptimizationReducer reducer is suitable for changing the
16// graph in a way that doesn't reduce individual operations, rather changes
17// the structure of the graph.
18//
19// We currently support a reduction which transforms if-else cascades
20// that check if a given value is equal to a 32-bit constant from a given set
21// into a switch with cases corresponding to the constants in the set.
22//
23// So for example code like:
24// [only pure ops 1]
25// if (x == 3) {
26// B1;
27// } else {
28// [only pure ops 2]
29// if (x == 5) {
30// B2;
31// } else {
32// B3;
33// }
34// }
35//
36// will be transformed to:
37// [only pure ops 1]
38// [only pure ops 2]
39// switch (x) {
40// case 3:
41// B1;
42// case 5:
43// B2;
44// default:
45// B3;
46// }
47//
48// Or represented graphically:
49// [only pure ops 1]
50// [only pure ops 1] [only pure ops 2]
51// x == 3 Switch(x)
52// Branch | | |
53// | | ----- | ------
54// ----- ------ case 3 | | | default
55// | | | | |
56// T | | F v | |
57// v v B1 | v
58// B1 [only pure ops 2] becomes | B3
59// x == 5 ======> case 5 |
60// Branch v
61// | | B2
62// ----- ------
63// | |
64// T | | F
65// v v
66// B2 B3
67//
68
69// TODO(mslekova): Introduce a flag and move to a common graph place.
70// #define TRACE_REDUCTIONS
71#ifdef TRACE_REDUCTIONS
72#define TRACE(str, ...) \
73 { PrintF(str, ##__VA_ARGS__); }
74#else // TRACE_REDUCTIONS
75#define TRACE(str, ...)
76
77#endif // TRACE_REDUCTIONS
78
80
81template <class Next>
82class StructuralOptimizationReducer : public Next {
83 public:
84 TURBOSHAFT_REDUCER_BOILERPLATE(StructuralOptimization)
85
86 OpIndex ReduceInputGraphBranch(OpIndex input_index, const BranchOp& branch) {
87 LABEL_BLOCK(no_change) {
88 return Next::ReduceInputGraphBranch(input_index, branch);
89 }
90 if (ShouldSkipOptimizationStep()) goto no_change;
91
92 TRACE("[structural] Calling ReduceInputGraphBranch for index: %u\n",
93 static_cast<unsigned int>(input_index.id()));
94
97
98 Block* current_if_true;
99 Block* current_if_false;
100 const BranchOp* current_branch = &branch;
101 BranchHint current_branch_hint;
102 BranchHint next_hint = BranchHint::kNone;
103
104 OpIndex switch_var = OpIndex::Invalid();
105 uint32_t value;
106 while (true) {
107 // If we encounter a condition that is not equality, we can't turn it
108 // into a switch case.
109 const Operation& cond =
110 Asm().input_graph().Get(current_branch->condition());
111
112 if (!cond.template Is<ComparisonOp>()) {
113 // 'if(x==0)' may be optimized to 'if(x)', we should take this into
114 // consideration.
115
116 // The "false" destination will be inlined before the switch is emitted,
117 // so it should only contain pure operations.
118 if (!ContainsOnlyPureOps(current_branch->if_true,
119 Asm().input_graph())) {
120 TRACE("\t [break] End of only-pure-ops cascade reached.\n");
121 break;
122 }
123
124 OpIndex current_var = current_branch->condition();
125 if (!switch_var.valid()) {
126 switch_var = current_var;
127 } else if (switch_var != current_var) {
128 TRACE("\t [bailout] Not all branches compare the same variable.\n");
129 break;
130 }
131 value = 0;
132 // The true/false of 'if(x)' is reversed from 'if(x==0)'
133 current_if_true = current_branch->if_false;
134 current_if_false = current_branch->if_true;
135 const BranchHint hint = current_branch->hint;
136 current_branch_hint = hint == BranchHint::kNone ? BranchHint::kNone
139 } else {
140 const ComparisonOp* equal =
141 cond.template TryCast<Opmask::kWord32Equal>();
142 if (!equal) {
143 TRACE(
144 "\t [bailout] Branch with different condition than Word32 "
145 "Equal.\n");
146 break;
147 }
148 // MachineOptimizationReducer should normalize equality to put constants
149 // right.
150 const Operation& right_op = Asm().input_graph().Get(equal->right());
151 if (!right_op.Is<Opmask::kWord32Constant>()) {
152 TRACE(
153 "\t [bailout] No Word32 constant on the right side of Equal.\n");
154 break;
155 }
156
157 // The "false" destination will be inlined before the switch is emitted,
158 // so it should only contain pure operations.
159 if (!ContainsOnlyPureOps(current_branch->if_false,
160 Asm().input_graph())) {
161 TRACE("\t [break] End of only-pure-ops cascade reached.\n");
162 break;
163 }
164 const ConstantOp& const_op = right_op.Cast<ConstantOp>();
165 value = const_op.word32();
166
167 // If we encounter equal to a different value, we can't introduce
168 // a switch.
169 OpIndex current_var = equal->left();
170 if (!switch_var.valid()) {
171 switch_var = current_var;
172 } else if (switch_var != current_var) {
173 TRACE("\t [bailout] Not all branches compare the same variable.\n");
174 break;
175 }
176
177 current_if_true = current_branch->if_true;
178 current_if_false = current_branch->if_false;
179 current_branch_hint = current_branch->hint;
180 }
181
182 DCHECK(current_if_true && current_if_false);
183
184 // We can't just use `current_branch->hint` for every case. Consider:
185 //
186 // if (a) { }
187 // else if (b) { }
188 // else if (likely(c)) { }
189 // else if (d) { }
190 // else { }
191 //
192 // The fact that `c` is Likely doesn't tell anything about the likelyness
193 // of `a` and `b` compared to `c`, which means that `c` shouldn't have the
194 // Likely hint in the switch. However, since `c` is likely here, it means
195 // that `d` and "default" are both unlikely, even in the switch.
196 //
197 // So, for the 1st case, we use `current_branch->hint`.
198 // Then, when we encounter a Likely hint, we mark all of the subsequent
199 // cases are Unlikely, but don't mark the current one as Likely. This is
200 // done with the `next_hint` variable, which is initially kNone, but
201 // because kFalse when we encounter a Likely branch.
202 // We never set `next_hint` as kTrue as it would only apply to subsequent
203 // cases and not to already-emitted cases. The only case that could thus
204 // have a kTrue annotation is the 1st one.
205 DCHECK_NE(next_hint, BranchHint::kTrue);
206 BranchHint hint = next_hint;
207 if (cases.size() == 0) {
208 // The 1st case gets its original hint.
209 hint = current_branch_hint;
210 } else if (current_branch_hint == BranchHint::kFalse) {
211 // For other cases, if the branch has a kFalse hint, we do use it,
212 // regardless of `next_hint`.
213 hint = BranchHint::kNone;
214 }
215 if (current_branch_hint == BranchHint::kTrue) {
216 // This branch is likely true, which means that all subsequent cases are
217 // unlikely.
218 next_hint = BranchHint::kFalse;
219 }
220
221 // The current_if_true block becomes the corresponding switch case block.
222 cases.emplace_back(value, Asm().MapToNewGraph(current_if_true), hint);
223
224 // All pure ops from the if_false block should be executed before
225 // the switch, except the last Branch operation (which we drop).
226 false_blocks.push_back(current_if_false);
227
228 // If we encounter a if_false block that doesn't end with a Branch,
229 // this means we've reached the end of the cascade.
230 const Operation& maybe_branch =
231 current_if_false->LastOperation(Asm().input_graph());
232 if (!maybe_branch.Is<BranchOp>()) {
233 TRACE("\t [break] Reached end of the if-else cascade.\n");
234 break;
235 }
236
237 // Iterate to the next if_false block in the cascade.
238 current_branch = &maybe_branch.template Cast<BranchOp>();
239 }
240
241 // Probably better to keep short if-else cascades as they are.
242 if (cases.size() <= 2) {
243 TRACE("\t [bailout] Cascade with less than 2 levels of nesting.\n");
244 goto no_change;
245 }
246 CHECK_EQ(cases.size(), false_blocks.size());
247
248 // Sorting the cases because it will help figure out if there is a duplicate
249 // case (in which case we bailout, since this is not well handled by the
250 // code generator).
251 // Note that this isn't wasted work: there is a good chance that the
252 // instruction selector will emit a binary search for this switch, which
253 // will require the cases to be sorted.
254 std::stable_sort(
255 cases.begin(), cases.end(),
256 [](SwitchOp::Case a, SwitchOp::Case b) { return a.value < b.value; });
257 auto it = std::adjacent_find(
258 cases.begin(), cases.end(),
259 [](SwitchOp::Case a, SwitchOp::Case b) { return a.value == b.value; });
260 if (it != cases.end()) {
261 TRACE("\t [bailout] Multiple cases with the value %d.\n", (*it).value);
262 goto no_change;
263 }
264
265 TRACE("[reduce] Successfully emit a Switch with %zu cases.\n",
266 cases.size());
267 return EmitSwitch(switch_var, cases, false_blocks, current_if_false,
268 next_hint);
269 }
270
271 private:
272 static bool ContainsOnlyPureOps(const Block* block, const Graph& graph) {
273 for (const auto& op : base::IterateWithoutLast(graph.operations(*block))) {
274 // We are moving the block content to before the switch, effectively
275 // moving it before the previously existing branches.
276 if (!op.Effects().hoistable_before_a_branch()) {
277 return false;
278 }
279 }
280 return true;
281 }
282
283 // Visits and emits {input_block} right now (ie, in the current block)
284 // until the one before the last operation is reached.
285 void InlineAllOperationsWithoutLast(const Block* input_block) {
287 Asm().input_graph().OperationIndices(*input_block);
288
289 for (OpIndex op : base::IterateWithoutLast(all_ops)) {
290 Asm().InlineOp(op, input_block);
291 }
292 }
293
297 Block* current_if_false, BranchHint next_hint) {
298 // We're skipping the last false block, as it becomes the default block.
299 for (size_t i = 0; i < false_blocks.size() - 1; ++i) {
300 const Block* block = false_blocks[i];
302 }
303
304 // The last current_if_true block that ends the cascade becomes the default
305 // case.
306 Block* default_block = current_if_false;
307 Asm().Switch(
308 Asm().MapToNewGraph(switch_var),
309 Asm().output_graph().graph_zone()->CloneVector(base::VectorOf(cases)),
310 Asm().MapToNewGraph(default_block), next_hint);
311 return OpIndex::Invalid();
312 }
313};
314
315} // namespace v8::internal::compiler::turboshaft
316
317#undef TRACE
318
319#endif // V8_COMPILER_TURBOSHAFT_STRUCTURAL_OPTIMIZATION_REDUCER_H_
size_t size() const
void emplace_back(Args &&... args)
const Operation & LastOperation(const Graph &graph) const
Definition graph.h:1242
static constexpr OpIndex Invalid()
Definition index.h:88
OpIndex ReduceInputGraphBranch(OpIndex input_index, const BranchOp &branch)
static bool ContainsOnlyPureOps(const Block *block, const Graph &graph)
V< None > EmitSwitch(OpIndex switch_var, base::SmallVector< SwitchOp::Case, 16 > &cases, base::SmallVector< const Block *, 16 > &false_blocks, Block *current_if_false, BranchHint next_hint)
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
Definition assembler.h:823
#define LABEL_BLOCK(label)
Definition assembler.h:910
Zone * graph_zone
#define TRACE(...)
auto IterateWithoutLast(T &t)
Definition iterator.h:130
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition vector.h:360
V8_EXPORT_PRIVATE bool ShouldSkipOptimizationStep()
Definition utils.h:84
bool TryCast(Tagged< From > value, Tagged< To > *out)
Definition casting.h:77
bool Is(IndirectHandle< U > value)
Definition handles-inl.h:51
base::Vector< T > CloneVector(Zone *zone, base::Vector< const T > other)
Definition zone-utils.h:18
return value
Definition map-inl.h:893
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
underlying_operation_t< Op > & Cast()
Definition operations.h:980