5#ifndef V8_COMPILER_TURBOSHAFT_BRANCH_ELIMINATION_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_BRANCH_ELIMINATION_REDUCER_H_
203 Next::Bind(new_block);
222 if (new_block->IsBranchTarget()) {
225 DCHECK_EQ(new_block->PredecessorCount(), 1);
227 new_block->LastPredecessor()->LastOperation(
__ output_graph());
230 bool condition_value = branch->if_true == new_block;
241 return Next::ReduceBranch(cond, if_true, if_false, hint);
245 if (
const Block* if_true_origin =
__ OriginForBlockStart(if_true)) {
246 if (
const Block* if_false_origin =
__ OriginForBlockStart(if_false)) {
248 if_true_origin->FirstOperation(
__ input_graph());
250 if_false_origin->FirstOperation(
__ input_graph());
255 if (true_goto && false_goto &&
258 if (!merge_block->
HasPhis(
__ input_graph())) {
260 __ Goto(
__ MapToNewGraph(merge_block));
271 __ Goto(*cond_value ? if_true : if_false);
282 return Next::ReduceSelect(cond, vtrue, vfalse, rep, hint, implem);
303 if (!destination_origin || !destination_origin->
IsMerge()) {
317 static constexpr int kMaxOpCountForCloning = 13;
328 __ template MapToNewGraph<true>(branch->condition());
331 if (!condition_value.has_value()) {
341 __ CloneBlockAndGoto(destination_origin);
349 if (destination_origin->
Contains(branch->condition())) {
350 if (
__ input_graph().
Get(branch->condition()).template
Is<PhiOp>()) {
351 __ CloneBlockAndGoto(destination_origin);
354 destination_origin)) {
358 __ CloneBlockAndGoto(destination_origin);
380 if (Asm().current_block()->PredecessorCount() == 1 &&
381 Asm().current_block()->
begin() ==
382 __ output_graph().next_operation_index()) {
392 Asm().CloneAndInlineBlock(destination_origin);
403 return Next::ReduceDeoptimizeIf(
condition, frame_state, negated,
409 if (!condition_value.has_value()) {
414 if ((*condition_value && !negated) || (!*condition_value && negated)) {
416 return Next::ReduceDeoptimize(frame_state, parameters);
423#if V8_ENABLE_WEBASSEMBLY
425 bool negated,
const TrapId trap_id) {
427 return Next::ReduceTrapIf(
condition, frame_state, negated, trap_id);
432 if (!condition_value.has_value()) {
441 V<Word32> static_condition =
__ Word32Constant(*condition_value);
443 __ TrapIfNot(static_condition, frame_state, trap_id);
445 __ TrapIf(static_condition, frame_state, trap_id);
461 target = target->GetDominator();
466 target = target->GetDominator();
534 for (
auto it = missing_blocks.
rbegin(); it != missing_blocks.
rend(); ++it) {
538 if (block->IsBranchTarget()) {
540 block->LastPredecessor()->LastOperation(
__ output_graph());
542 DCHECK(branch->if_true->index() == block->index() ||
543 branch->if_false->index() == block->index());
544 bool condition_value =
545 branch->if_true->index().valid()
546 ? branch->if_true->index() == block->index()
547 : branch->if_false->index() != block->index();
561 bool has_phi =
false,
int depth = 0) {
564 static constexpr int kMaxDepth = 4;
565 if (depth > kMaxDepth)
return false;
567 if (!cond_input_block->
Contains(idx)) {
574 __ current_block()->OriginForBlockEnd());
579 cond_input_block,
true, depth);
#define REDUCE(operation)
bool Contains(OpIndex op_idx) const
bool HasPhis(const Graph &graph) const
const Operation & LastOperation(const Graph &graph) const
int GetPredecessorIndex(const Block *target) const
Block * LastPredecessor() const
int OpCountUpperBound() const
ZoneVector< Block * > dominator_path_
void StartLayer(Block *block)
V< None > REDUCE DeoptimizeIf(V< Word32 > condition, V< FrameState > frame_state, bool negated, const DeoptimizeParameters *parameters)
void ClearCurrentEntries()
bool CanBeConstantFolded(OpIndex idx, const Block *cond_input_block, bool has_phi=false, int depth=0)
V< None > REDUCE Branch(V< Word32 > cond, Block *if_true, Block *if_false, BranchHint hint)
V< None > REDUCE Goto(Block *destination, bool is_backedge)
void ReplayMissingPredecessors(Block *new_block)
void ResetToBlock(Block *block)
LayeredHashMap< V< Word32 >, bool > known_conditions_
void Bind(Block *new_block)
Derived * GetDominator() const
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
#define LABEL_BLOCK(label)
InstructionOperand destination
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
any_of(const Args &...) -> any_of< Args... >
V8_EXPORT_PRIVATE bool ShouldSkipOptimizationStep()
Node::Uses::const_iterator begin(const Node::Uses &uses)
bool TryCast(Tagged< From > value, Tagged< To > *out)
bool Is(IndirectHandle< U > value)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
bool can_be_constant_folded() const
V8_INLINE OpIndex input(size_t i) const
const uint16_t input_count
const underlying_operation_t< Op > * TryCast() const
OpEffects Effects() const