14 if (v8_flags.trace_turbo_jt) PrintF(__VA_ARGS__); \
19struct JumpThreadingState {
22 ZoneStack<RpoNumber>& stack;
24 void Clear(
size_t count) {
result.assign(count, unvisited()); }
25 void PushIfUnvisited(RpoNumber num) {
26 if (result[num.ToInt()] == unvisited()) {
28 result[num.ToInt()] = onstack();
31 void Forward(RpoNumber to) {
32 RpoNumber from = stack.top();
33 RpoNumber to_to =
result[to.ToInt()];
36 TRACE(
" xx %d\n", from.ToInt());
38 }
else if (to_to == unvisited()) {
39 TRACE(
" fw %d -> %d (recurse)\n", from.ToInt(), to.ToInt());
41 result[to.ToInt()] = onstack();
43 }
else if (to_to == onstack()) {
44 TRACE(
" fw %d -> %d (cycle)\n", from.ToInt(), to.ToInt());
48 TRACE(
" fw %d -> %d (forward)\n", from.ToInt(), to.ToInt());
49 result[from.ToInt()] = to_to;
66 struct RpoNumberHash {
67 std::size_t operator()(
const RpoNumber&
key)
const {
68 return std::hash<int>()(
key.ToInt());
72 bool CanForwardGapJump(Instruction*
instr, RpoNumber instr_block,
73 RpoNumber target_block, RpoNumber* forward_to) {
75 bool can_forward =
false;
78 for (Record&
record : search->second) {
79 Instruction* record_instr =
record.instr;
80 DCHECK_EQ(record_instr->arch_opcode(), kArchJmp);
81 bool is_same_instr =
true;
86 ParallelMove* record_move = record_instr->GetParallelMove(
pos);
87 ParallelMove* instr_move =
instr->GetParallelMove(
pos);
88 if (record_move ==
nullptr && instr_move ==
nullptr)
continue;
89 if (((record_move ==
nullptr) != (instr_move ==
nullptr)) ||
90 !record_move->Equals(*instr_move)) {
91 is_same_instr =
false;
97 *forward_to =
record.block;
105 search->second.push_back({instr_block,
instr});
112 ins.first->second.reserve(4);
113 ins.first->second.push_back({instr_block,
instr});
120 ZoneUnorderedMap<RpoNumber, ZoneVector<Record>, RpoNumberHash>
129 bool frame_at_start) {
131 JumpThreadingState state = {
false, *
result, stack};
132 state.Clear(code->InstructionBlockCount());
134 int32_t empty_deconstruct_frame_return_size;
136 int32_t empty_no_deconstruct_frame_return_size;
137 GapJumpRecord
record(local_zone);
140 for (
auto const instruction_block : code->instruction_blocks()) {
141 RpoNumber current = instruction_block->rpo_number();
142 state.PushIfUnvisited(current);
145 while (!state.stack.empty()) {
148 TRACE(
"jt [%d] B%d\n",
static_cast<int>(stack.size()),
149 block->rpo_number().ToInt());
151 for (
int i = block->code_start(); i < block->code_end(); ++
i) {
153 if (!
instr->AreMovesRedundant()) {
154 TRACE(
" parallel move");
157 if (
instr->arch_opcode() == kArchJmp) {
160 if ((frame_at_start || !(block->must_deconstruct_frame() ||
161 block->must_construct_frame())) &&
162 record.CanForwardGapJump(
instr, block->rpo_number(),
163 code->InputRpo(
instr, 0),
167 TRACE(
"\n merge B%d into B%d", block->rpo_number().ToInt(),
175 }
else if (
instr->IsNop()) {
179 }
else if (
instr->arch_opcode() == kArchJmp) {
187 if (frame_at_start || !(block->must_deconstruct_frame() ||
188 block->must_construct_frame())) {
189 fw = code->InputRpo(
instr, 0);
191 }
else if (
instr->IsRet()) {
194 block->must_deconstruct_frame());
199 if (
instr->InputAt(0)->IsImmediate()) {
200 int32_t return_size =
201 ImmediateOperand::cast(
instr->InputAt(0))->inline_int32_value();
204 if (block->must_deconstruct_frame()) {
205 if (empty_deconstruct_frame_return_block ==
207 empty_deconstruct_frame_return_block = block->rpo_number();
208 empty_deconstruct_frame_return_size = return_size;
209 }
else if (empty_deconstruct_frame_return_size == return_size) {
210 fw = empty_deconstruct_frame_return_block;
211 block->clear_must_deconstruct_frame();
214 if (empty_no_deconstruct_frame_return_block ==
216 empty_no_deconstruct_frame_return_block = block->rpo_number();
217 empty_no_deconstruct_frame_return_size = return_size;
218 }
else if (empty_no_deconstruct_frame_return_size ==
220 fw = empty_no_deconstruct_frame_return_block;
241 for (
int i = 0; i < static_cast<int>(
result->size());
i++) {
243 int to = (*result)[
i].ToInt();
245 TRACE(
"-> B%d\n", to);
252 return state.forwarded;
262 for (
auto const block : code->ao_blocks()) {
263 RpoNumber block_rpo = block->rpo_number();
264 int block_num = block_rpo.
ToInt();
268 if (result_rpo != block_rpo) {
272 if (code->InstructionBlockAt(block_rpo)->IsHandler()) {
273 code->InstructionBlockAt(result_rpo)->MarkHandler();
275 if (code->InstructionBlockAt(block_rpo)->IsSwitchTarget()) {
276 code->InstructionBlockAt(result_rpo)->set_switch_target(
true);
281 for (
int instr_idx = block->code_start(); instr_idx < block->code_end();
285 if (
instr->arch_opcode() == kArchJmp ||
286 instr->arch_opcode() == kArchRet) {
288 TRACE(
"jt-fw nop @%d\n", instr_idx);
289 instr->OverwriteWithNop();
296 if (instr_move !=
nullptr) {
302 code->InstructionBlockAt(block_rpo)->UnmarkHandler();
303 code->InstructionBlockAt(block_rpo)->set_switch_target(
false);
304 code->InstructionBlockAt(block_rpo)->set_omitted_by_jump_threading();
317 for (
size_t i = 0;
i < rpo_immediates.
size();
i++) {
321 if (fw != rpo) rpo_immediates[
i] = fw;
static constexpr T decode(U value)
static bool ComputeForwarding(Zone *local_zone, ZoneVector< RpoNumber > *result, InstructionSequence *code, bool frame_at_start)
static void ApplyForwarding(Zone *local_zone, ZoneVector< RpoNumber > const &forwarding, InstructionSequence *code)
static RpoNumber Invalid()
static RpoNumber FromInt(int index)
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
ZoneUnorderedMap< RpoNumber, ZoneVector< Record >, RpoNumberHash > gap_jump_records_
V8_EXPORT_PRIVATE FlagValues v8_flags
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)