v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
bytecode-analysis.cc
Go to the documentation of this file.
1// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <utility>
8
14#include "src/utils/ostreams.h"
15
16namespace v8 {
17namespace internal {
18namespace compiler {
19
21using interpreter::BytecodeOperands;
22using interpreter::Bytecodes;
25using interpreter::Register;
26
28 int register_count, Zone* zone)
29 : parameter_count_(parameter_count),
30 bit_vector_(
31 zone->New<BitVector>(parameter_count + register_count, zone)) {}
32
34 if (r.is_parameter()) {
35 bit_vector_->Add(r.ToParameterIndex());
36 } else {
38 }
39}
40
42 if (r.is_parameter()) {
43 for (uint32_t i = 0; i < count; i++) {
45 bit_vector_->Add(r.ToParameterIndex() + i);
46 }
47 } else {
48 for (uint32_t i = 0; i < count; i++) {
50 bit_vector_->Add(parameter_count_ + r.index() + i);
51 }
52 }
53}
54
55
57 bit_vector_->Union(*other.bit_vector_);
58}
59
61 DCHECK_GE(index, 0);
62 DCHECK_LT(index, parameter_count());
63 return bit_vector_->Contains(index);
64}
65
67 DCHECK_GE(index, 0);
68 DCHECK_LT(index, local_count());
69 return bit_vector_->Contains(parameter_count_ + index);
70}
71
72ResumeJumpTarget::ResumeJumpTarget(int suspend_id, int target_offset,
73 int final_target_offset)
74 : suspend_id_(suspend_id),
75 target_offset_(target_offset),
76 final_target_offset_(final_target_offset) {}
77
81
83 const ResumeJumpTarget& next) {
84 return ResumeJumpTarget(next.suspend_id(), loop_header_offset,
85 next.target_offset());
86}
87
88namespace {
89
90template <Bytecode bytecode, OperandType operand_type, size_t i>
91void UpdateInLivenessForOutOperand(
92 BytecodeLivenessState* in_liveness,
93 const interpreter::BytecodeArrayIterator& iterator) {
94 if constexpr (operand_type == OperandType::kRegOut ||
95 operand_type == OperandType::kRegInOut) {
96 Register r = iterator.GetRegisterOperand(i);
97 if (!r.is_parameter()) {
98 in_liveness->MarkRegisterDead(r.index());
99 }
100 } else if constexpr (operand_type == OperandType::kRegOutList) {
101 Register r = iterator.GetRegisterOperand(i);
102 uint32_t reg_count = iterator.GetRegisterCountOperand(i + 1);
103 if (!r.is_parameter()) {
104 for (uint32_t j = 0; j < reg_count; ++j) {
105 DCHECK(!Register(r.index() + j).is_parameter());
106 in_liveness->MarkRegisterDead(r.index() + j);
107 }
108 }
109 } else if constexpr (operand_type == OperandType::kRegOutPair) {
110 Register r = iterator.GetRegisterOperand(i);
111 if (!r.is_parameter()) {
112 DCHECK(!Register(r.index() + 1).is_parameter());
113 in_liveness->MarkRegisterDead(r.index());
114 in_liveness->MarkRegisterDead(r.index() + 1);
115 }
116 } else if constexpr (operand_type == OperandType::kRegOutTriple) {
117 Register r = iterator.GetRegisterOperand(i);
118 if (!r.is_parameter()) {
119 DCHECK(!Register(r.index() + 1).is_parameter());
120 DCHECK(!Register(r.index() + 2).is_parameter());
121 in_liveness->MarkRegisterDead(r.index());
122 in_liveness->MarkRegisterDead(r.index() + 1);
123 in_liveness->MarkRegisterDead(r.index() + 2);
124 }
125 } else {
127 }
128}
129
130template <Bytecode bytecode, OperandType operand_type, size_t i>
131void UpdateInLivenessForInOperand(
132 BytecodeLivenessState* in_liveness,
133 const interpreter::BytecodeArrayIterator& iterator) {
134 if constexpr (operand_type == OperandType::kReg ||
135 operand_type == OperandType::kRegInOut) {
136 Register r = iterator.GetRegisterOperand(i);
137 if (!r.is_parameter()) {
138 in_liveness->MarkRegisterLive(r.index());
139 }
140 } else if constexpr (operand_type == OperandType::kRegPair) {
141 Register r = iterator.GetRegisterOperand(i);
142 if (!r.is_parameter()) {
143 DCHECK(!Register(r.index() + 1).is_parameter());
144 in_liveness->MarkRegisterLive(r.index());
145 in_liveness->MarkRegisterLive(r.index() + 1);
146 }
147 } else if constexpr (operand_type == OperandType::kRegList) {
148 Register r = iterator.GetRegisterOperand(i);
149 uint32_t reg_count = iterator.GetRegisterCountOperand(i + 1);
150 if (!r.is_parameter()) {
151 for (uint32_t j = 0; j < reg_count; ++j) {
152 DCHECK(!interpreter::Register(r.index() + j).is_parameter());
153 in_liveness->MarkRegisterLive(r.index() + j);
154 }
155 }
156 } else {
158 }
159}
160
161template <Bytecode bytecode, ImplicitRegisterUse implicit_register_use,
162 OperandType... operand_types, size_t... operand_index>
163void UpdateInLiveness(BytecodeLivenessState* in_liveness,
164 const interpreter::BytecodeArrayIterator& iterator,
165 std::index_sequence<operand_index...>) {
166 // Special case Suspend and Resume to just pass through liveness.
167 if constexpr (bytecode == Bytecode::kSuspendGenerator) {
168 // The generator object has to be live.
169 in_liveness->MarkRegisterLive(iterator.GetRegisterOperand(0).index());
170 // Suspend additionally reads and returns the accumulator
172 in_liveness->MarkAccumulatorLive();
173 return;
174 } else if constexpr (bytecode == Bytecode::kResumeGenerator) {
175 // The generator object has to be live.
176 in_liveness->MarkRegisterLive(iterator.GetRegisterOperand(0).index());
177 return;
178 }
179
180 // Otherwise, walk all accumulator and register writes and reads.
181 if constexpr (BytecodeOperands::WritesAccumulator(implicit_register_use)) {
182 in_liveness->MarkAccumulatorDead();
183 }
185 !in_liveness->AccumulatorIsLive());
186 (UpdateInLivenessForOutOperand<bytecode, operand_types, operand_index>(
187 in_liveness, iterator),
188 ...);
189
191 implicit_register_use)) {
192 in_liveness->MarkRegisterDead(Register::FromShortStar(bytecode).index());
193 }
194
195 if constexpr (BytecodeOperands::ReadsAccumulator(implicit_register_use)) {
196 in_liveness->MarkAccumulatorLive();
197 }
198 (UpdateInLivenessForInOperand<bytecode, operand_types, operand_index>(
199 in_liveness, iterator),
200 ...);
201}
202
203template <Bytecode bytecode, ImplicitRegisterUse implicit_register_use,
204 OperandType... operand_types>
205void UpdateInLiveness(BytecodeLivenessState* in_liveness,
206 const interpreter::BytecodeArrayIterator& iterator) {
207 UpdateInLiveness<bytecode, implicit_register_use, operand_types...>(
208 in_liveness, iterator,
209 std::make_index_sequence<sizeof...(operand_types)>());
210}
211
212#ifdef DEBUG
213void UpdateInLiveness(Bytecode bytecode, BytecodeLivenessState* in_liveness,
214 const interpreter::BytecodeArrayIterator& iterator) {
215 switch (bytecode) {
216#define BYTECODE_UPDATE_IN_LIVENESS(Name, ...) \
217 case Bytecode::k##Name: \
218 return UpdateInLiveness<Bytecode::k##Name, __VA_ARGS__>(in_liveness, \
219 iterator);
220 BYTECODE_LIST(BYTECODE_UPDATE_IN_LIVENESS, BYTECODE_UPDATE_IN_LIVENESS)
221#undef BYTECODE_UPDATE_IN_LIVENESS
222 }
223}
224#endif // DEBUG
225
226template <bool IsFirstUpdate = false>
227void EnsureOutLivenessIsNotAlias(
228 BytecodeLiveness& liveness,
229 BytecodeLivenessState* next_bytecode_in_liveness, Zone* zone) {
230 if (!IsFirstUpdate) {
231 // We should have copied the next bytecode's in liveness already in the
232 // first pass, so on subsequent passes this should already not be an alias.
233 DCHECK_NE(liveness.out, next_bytecode_in_liveness);
234 return;
235 }
236 if (liveness.out == next_bytecode_in_liveness) {
237 // If the out-liveness is aliasing the next bytecode's in-liveness,
238 // reallocate it and copy the data to the newly allocated state.
239 liveness.out =
240 zone->New<BytecodeLivenessState>(*next_bytecode_in_liveness, zone);
241 }
242}
243
244template <bool IsFirstUpdate, Bytecode bytecode>
245void UpdateOutLiveness(BytecodeLiveness& liveness,
246 BytecodeLivenessState* next_bytecode_in_liveness,
247 const interpreter::BytecodeArrayIterator& iterator,
248 DirectHandle<BytecodeArray> bytecode_array,
249 const BytecodeLivenessMap& liveness_map, Zone* zone) {
250 // On subsequent updates, only update out-liveness manually if it isn't
251 // already aliasing the next bytecode's in-liveness.
252 if (!IsFirstUpdate && liveness.out == next_bytecode_in_liveness) return;
253
254 // Special case Suspend and Resume to just pass through liveness.
255 if (bytecode == Bytecode::kSuspendGenerator ||
256 bytecode == Bytecode::kResumeGenerator) {
257 DCHECK_NOT_NULL(next_bytecode_in_liveness);
258 if (IsFirstUpdate) {
259 liveness.out = next_bytecode_in_liveness;
260 } else {
261 liveness.out->Union(*next_bytecode_in_liveness);
262 }
263 return;
264 }
265
266 // Special case SwitchOnGeneratorState to ignore resume liveness, since that's
267 // a pass through. Instead, just consider the fallthrough live, plus the
268 // generator register itself for the resumes.
269 if (bytecode == Bytecode::kSwitchOnGeneratorState) {
270 DCHECK_NOT_NULL(next_bytecode_in_liveness);
271 if (IsFirstUpdate) {
272 // The generator register won't be live in the fallthrough, so copy the
273 // liveness and make it live here.
274 int generator_reg_index = iterator.GetRegisterOperand(0).index();
275 DCHECK(!next_bytecode_in_liveness->RegisterIsLive(generator_reg_index));
276 liveness.out =
277 zone->New<BytecodeLivenessState>(*next_bytecode_in_liveness, zone);
278 liveness.out->MarkRegisterLive(generator_reg_index);
279 } else {
280 liveness.out->Union(*next_bytecode_in_liveness);
281 }
282 return;
283 }
284
285 // Update from next bytecode (unless there isn't one or this is an
286 // unconditional jump).
287 if (next_bytecode_in_liveness != nullptr &&
289 !Bytecodes::Returns(bytecode) &&
291 if (IsFirstUpdate) {
292 // On first update, we can assume that this out-liveness is the same as
293 // the next liveness, and can directly alias it -- we'll allocate a new
294 // one using EnsureOutLivenessIsNotAlias if it needs to be mutated.
295 DCHECK_NULL(liveness.out);
296 liveness.out = next_bytecode_in_liveness;
297 } else {
298 liveness.out->Union(*next_bytecode_in_liveness);
299 }
300 } else if (IsFirstUpdate) {
301 // Otherwise, on the first allocation we need to make sure that there is an
302 // allocated out liveness.
303 DCHECK_NULL(liveness.out);
304 liveness.out = zone->New<BytecodeLivenessState>(
305 bytecode_array->register_count(), zone);
306 }
307
308 DCHECK_NOT_NULL(liveness.out);
309
310 // Update from jump target (if any). Skip loops, we update these manually in
311 // the liveness iterations.
312 if (Bytecodes::IsForwardJump(bytecode)) {
313 int target_offset = iterator.GetJumpTargetOffset();
314 EnsureOutLivenessIsNotAlias<IsFirstUpdate>(liveness,
315 next_bytecode_in_liveness, zone);
316 liveness.out->Union(*liveness_map.GetInLiveness(target_offset));
317 } else if (Bytecodes::IsSwitch(bytecode)) {
318 EnsureOutLivenessIsNotAlias<IsFirstUpdate>(liveness,
319 next_bytecode_in_liveness, zone);
320 for (interpreter::JumpTableTargetOffset entry :
321 iterator.GetJumpTableTargetOffsets()) {
322 liveness.out->Union(*liveness_map.GetInLiveness(entry.target_offset));
323 }
324 }
325
326 // Update from exception handler (if any).
328 // TODO(leszeks): We should look up this range only once per entry.
329 HandlerTable table(*bytecode_array);
330 int handler_index =
331 table.LookupHandlerIndexForRange(iterator.current_offset());
332
333 if (handler_index != HandlerTable::kNoHandlerFound) {
334 EnsureOutLivenessIsNotAlias<IsFirstUpdate>(
335 liveness, next_bytecode_in_liveness, zone);
336 bool was_accumulator_live = liveness.out->AccumulatorIsLive();
337 liveness.out->Union(
338 *liveness_map.GetInLiveness(table.GetRangeHandler(handler_index)));
339 liveness.out->MarkRegisterLive(table.GetRangeData(handler_index));
340 if (!was_accumulator_live) {
341 // The accumulator is reset to the exception on entry into a handler,
342 // and so shouldn't be considered live coming out of this bytecode just
343 // because it's live coming into the handler. So, kill the accumulator
344 // if the handler is the only thing that made it live.
345 liveness.out->MarkAccumulatorDead();
346
347 // TODO(leszeks): Ideally the accumulator wouldn't be considered live at
348 // the start of the handler, but looking up if the current bytecode is
349 // the start of a handler is not free, so we should only do it if we
350 // decide it's necessary.
351 }
352 }
353 }
354}
355
356template <bool IsFirstUpdate = false>
357void UpdateOutLiveness(Bytecode bytecode, BytecodeLiveness& liveness,
358 BytecodeLivenessState* next_bytecode_in_liveness,
359 const interpreter::BytecodeArrayIterator& iterator,
360 DirectHandle<BytecodeArray> bytecode_array,
361 const BytecodeLivenessMap& liveness_map, Zone* zone) {
362 switch (bytecode) {
363#define BYTECODE_UPDATE_OUT_LIVENESS(Name, ...) \
364 case Bytecode::k##Name: \
365 return UpdateOutLiveness<IsFirstUpdate, Bytecode::k##Name>( \
366 liveness, next_bytecode_in_liveness, iterator, bytecode_array, \
367 liveness_map, zone);
369#undef BYTECODE_UPDATE_OUT_LIVENESS
370 }
371}
372
373template <bool IsFirstUpdate, Bytecode bytecode,
374 ImplicitRegisterUse implicit_register_use,
375 OperandType... operand_types>
376void UpdateLiveness(BytecodeLiveness& liveness,
377 BytecodeLivenessState** next_bytecode_in_liveness,
378 const interpreter::BytecodeArrayIterator& iterator,
379 DirectHandle<BytecodeArray> bytecode_array,
380 const BytecodeLivenessMap& liveness_map, Zone* zone) {
381 UpdateOutLiveness<IsFirstUpdate, bytecode>(
382 liveness, *next_bytecode_in_liveness, iterator, bytecode_array,
383 liveness_map, zone);
384 if (IsFirstUpdate) {
385 // On the first update, allocate the in-liveness as a copy of the
386 // out-liveness.
387 DCHECK_NULL(liveness.in);
388 liveness.in = zone->New<BytecodeLivenessState>(*liveness.out, zone);
389 } else {
390 // On subsequent updates, copy liveness from the out vector.
391 // TODO(leszeks): If this copy doesn't change liveness, we could
392 // opportunistically terminate early.
393 liveness.in->CopyFrom(*liveness.out);
394 }
395 UpdateInLiveness<bytecode, implicit_register_use, operand_types...>(
396 liveness.in, iterator);
397
398 *next_bytecode_in_liveness = liveness.in;
399}
400
401template <bool IsFirstUpdate = false>
402void UpdateLiveness(Bytecode bytecode, BytecodeLiveness& liveness,
403 BytecodeLivenessState** next_bytecode_in_liveness,
404 const interpreter::BytecodeArrayIterator& iterator,
405 DirectHandle<BytecodeArray> bytecode_array,
406 const BytecodeLivenessMap& liveness_map, Zone* zone) {
407 switch (bytecode) {
408#define BYTECODE_UPDATE_LIVENESS(Name, ...) \
409 case Bytecode::k##Name: \
410 return UpdateLiveness<IsFirstUpdate, Bytecode::k##Name, __VA_ARGS__>( \
411 liveness, next_bytecode_in_liveness, iterator, bytecode_array, \
412 liveness_map, zone);
414#undef BYTECODE_UPDATE_LIVENESS
415 }
416}
417
418void UpdateAssignments(Bytecode bytecode, BytecodeLoopAssignments* assignments,
419 const interpreter::BytecodeArrayIterator& iterator) {
420 int num_operands = Bytecodes::NumberOfOperands(bytecode);
421 const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
422
423 for (int i = 0; i < num_operands; ++i) {
424 switch (operand_types[i]) {
425 case OperandType::kRegInOut:
426 case OperandType::kRegOut: {
427 assignments->Add(iterator.GetRegisterOperand(i));
428 break;
429 }
430 case OperandType::kRegOutList: {
431 interpreter::Register r = iterator.GetRegisterOperand(i++);
432 uint32_t reg_count = iterator.GetRegisterCountOperand(i);
433 assignments->AddList(r, reg_count);
434 break;
435 }
436 case OperandType::kRegOutPair: {
437 assignments->AddList(iterator.GetRegisterOperand(i), 2);
438 break;
439 }
440 case OperandType::kRegOutTriple: {
441 assignments->AddList(iterator.GetRegisterOperand(i), 3);
442 break;
443 }
444 default:
446 break;
447 }
448 }
449
450 if (Bytecodes::WritesImplicitRegister(bytecode)) {
451 assignments->Add(interpreter::Register::FromShortStar(bytecode));
452 }
453}
454
455} // namespace
456
458 public:
459 std::ostream& PrintLivenessTo(std::ostream& os) const;
460
469
470 void Analyze();
471
472 private:
473 template <Bytecode BC>
474 inline void AnalyzeBCInLoop(int current_offset, LoopInfo* current_loop_info) {
475 }
476
477 void PushLoop(int loop_header, int loop_end) {
478 DCHECK_LT(loop_header, loop_end);
479 DCHECK_LT(loop_stack_.top().header_offset, loop_header);
480 DCHECK_EQ(res_.end_to_header_.find(loop_end), res_.end_to_header_.end());
481 DCHECK_EQ(res_.header_to_info_.find(loop_header),
482 res_.header_to_info_.end());
483
484 int parent_offset = loop_stack_.top().header_offset;
485
486 res_.end_to_header_.insert({loop_end, loop_header});
487 auto it = res_.header_to_info_.insert(
488 {loop_header, LoopInfo(parent_offset, loop_header, loop_end,
490 bytecode_array()->register_count(), zone())});
491 // Get the loop info pointer from the output of insert.
492 LoopInfo* loop_info = &it.first->second;
493
494 if (loop_stack_.top().loop_info) {
495 loop_stack_.top().loop_info->mark_not_innermost();
496 }
497 loop_stack_.push({loop_header, loop_info});
498 }
499
500#if DEBUG
501 bool ResumeJumpTargetsAreValid();
502 bool ResumeJumpTargetLeavesResolveSuspendIds(
503 int parent_offset,
505 std::map<int, int>* unresolved_suspend_ids);
506
507 bool LivenessIsValid();
508#endif
509
510 bool analyze_liveness() const { return res_.analyze_liveness_; }
511 Zone* zone() const { return zone_; }
514
519
526};
527
528template <>
530 Bytecode::kSuspendGenerator>(int current_offset,
531 LoopInfo* current_loop_info) {
532 int suspend_id = iterator_.GetUnsignedImmediateOperand(3);
533 int resume_offset = current_offset + iterator_.current_bytecode_size();
534 current_loop_info->AddResumeTarget(
535 ResumeJumpTarget::Leaf(suspend_id, resume_offset));
536}
537
538template <>
540 Bytecode::kResumeGenerator>(int current_offset,
541 LoopInfo* current_loop_info) {
542 current_loop_info->mark_resumable();
543}
544
548
549 loop_stack_.push({-1, nullptr});
550
551 BytecodeLivenessState* next_bytecode_in_liveness = nullptr;
552 int osr_loop_end_offset_ = res_.osr_bailout_id_.ToInt();
553 DCHECK_EQ(osr_loop_end_offset_ < 0, res_.osr_bailout_id_.IsNone());
554
555 if (analyze_liveness()) {
557 }
558
561 int current_offset = iterator_.current_offset();
562
563 if (bytecode == Bytecode::kJumpLoop) {
564 // Every byte up to and including the last byte within the backwards jump
565 // instruction is considered part of the loop, set loop end accordingly.
566 int loop_end = current_offset + iterator_.current_bytecode_size();
567 int loop_header = iterator_.GetJumpTargetOffset();
568 PushLoop(loop_header, loop_end);
569
570 if (current_offset == osr_loop_end_offset_) {
571 res_.osr_entry_point_ = loop_header;
572 } else if (current_offset < osr_loop_end_offset_) {
573 // Assert that we've found the osr_entry_point if we've gone past the
574 // osr_loop_end_offset. Note, we are iterating the bytecode in reverse,
575 // so the less-than in the above condition is correct.
577 }
578
579 // Save the index so that we can do another pass later.
580 if (analyze_liveness()) {
582 }
583 }
584
585 // We have to pop from loop_stack_ if:
586 // 1) We entered the body of the loop
587 // 2) If we have a JumpLoop that jumps to itself (i.e an empty loop)
588 bool in_loop = loop_stack_.size() > 1 &&
589 (bytecode != Bytecode::kJumpLoop ||
590 iterator_.GetJumpTargetOffset() == current_offset);
591
592 if (in_loop) {
593 LoopStackEntry& current_loop = loop_stack_.top();
594 LoopInfo* current_loop_info = current_loop.loop_info;
595
596 // TODO(leszeks): Ideally, we'd only set values that were assigned in
597 // the loop *and* are live when the loop exits. However, this requires
598 // tracking the out-liveness of *all* loop exits, which is not
599 // information we currently have.
600 UpdateAssignments(bytecode, &current_loop_info->assignments(), iterator_);
601
602 switch (bytecode) {
603#define CASE(BC, ...) \
604 case Bytecode::k##BC: \
605 AnalyzeBCInLoop<Bytecode::k##BC>(current_offset, current_loop_info); \
606 break;
608#undef CASE
609 }
610
611 // If we've reached the header of the loop, pop it off the stack.
612 if (current_offset == current_loop.header_offset) {
613 loop_stack_.pop();
614 if (loop_stack_.size() > 1) {
615 // If there is still an outer loop, propagate inner loop assignments.
616 LoopInfo* parent_loop_info = loop_stack_.top().loop_info;
617
618 if (current_loop_info->resumable()) {
619 parent_loop_info->mark_resumable();
620 }
621
622 parent_loop_info->assignments().Union(
623 current_loop_info->assignments());
624
625 // Also, propagate resume targets. Instead of jumping to the target
626 // itself, the outer loop will jump to this loop header for any
627 // targets that are inside the current loop, so that this loop stays
628 // reducible. Hence, a nested loop of the form:
629 //
630 // switch (#1 -> suspend1, #2 -> suspend2)
631 // loop {
632 // suspend1: suspend #1
633 // loop {
634 // suspend2: suspend #2
635 // }
636 // }
637 //
638 // becomes:
639 //
640 // switch (#1 -> loop1, #2 -> loop1)
641 // loop1: loop {
642 // switch (#1 -> suspend1, #2 -> loop2)
643 // suspend1: suspend #1
644 // loop2: loop {
645 // switch (#2 -> suspend2)
646 // suspend2: suspend #2
647 // }
648 // }
649 for (const auto& target : current_loop_info->resume_jump_targets()) {
650 parent_loop_info->AddResumeTarget(
651 ResumeJumpTarget::AtLoopHeader(current_offset, target));
652 }
653
654 } else {
655 // Otherwise, just propagate inner loop suspends to top-level.
656 for (const auto& target : current_loop_info->resume_jump_targets()) {
657 res_.resume_jump_targets_.push_back(
658 ResumeJumpTarget::AtLoopHeader(current_offset, target));
659 }
660 }
661 }
662 } else if (bytecode == Bytecode::kSuspendGenerator) {
663 // If we're not in a loop, we still need to look for suspends.
664 // TODO(leszeks): It would be nice to de-duplicate this with the in-loop
665 // case
666 int suspend_id = iterator_.GetUnsignedImmediateOperand(3);
667 int resume_offset = current_offset + iterator_.current_bytecode_size();
668 res_.resume_jump_targets_.push_back(
669 ResumeJumpTarget::Leaf(suspend_id, resume_offset));
670 }
671
672 if (analyze_liveness()) {
673 BytecodeLiveness& liveness =
674 liveness_map().InsertNewLiveness(current_offset);
675 UpdateLiveness<true>(bytecode, liveness, &next_bytecode_in_liveness,
677 }
678 }
679
680 DCHECK_EQ(loop_stack_.size(), 1u);
681 DCHECK_EQ(loop_stack_.top().header_offset, -1);
682
683 DCHECK(ResumeJumpTargetsAreValid());
684
685 if (!analyze_liveness()) return;
686
687 // At this point, every bytecode has a valid in and out liveness, except for
688 // propagating liveness across back edges (i.e. JumpLoop). Subsequent liveness
689 // analysis iterations can only add additional liveness bits that are pulled
690 // across these back edges.
691 //
692 // Furthermore, a loop header's in-liveness can only change based on any
693 // bytecodes *after* the loop end -- it cannot change as a result of the
694 // JumpLoop liveness being updated, as the only liveness bits than can be
695 // added to the loop body are those of the loop header.
696 //
697 // So, if we know that the liveness of bytecodes after a loop header won't
698 // change (e.g. because there are no loops in them, or we have already ensured
699 // those loops are valid), we can safely update the loop end and pass over the
700 // loop body, and then never have to pass over that loop end again, because we
701 // have shown that its target, the loop header, can't change from the entries
702 // after the loop, and can't change from any loop body pass.
703 //
704 // This means that in a pass, we can iterate backwards over the bytecode
705 // array, process any loops that we encounter, and on subsequent passes we can
706 // skip processing those loops (though we still have to process inner loops).
707 //
708 // Equivalently, we can queue up loop ends from back to front, and pass over
709 // the loops in that order, as this preserves both the bottom-to-top and
710 // outer-to-inner requirements.
711
712 for (int loop_end_index : loop_end_index_queue_) {
713 iterator_.GoToIndex(loop_end_index);
714
715 DCHECK_EQ(iterator_.current_bytecode(), Bytecode::kJumpLoop);
716
717 int header_offset = iterator_.GetJumpTargetOffset();
718 int end_offset = iterator_.current_offset();
719
720 BytecodeLiveness& header_liveness =
721 liveness_map().GetLiveness(header_offset);
722 BytecodeLiveness& end_liveness = liveness_map().GetLiveness(end_offset);
723
724 if (!end_liveness.out->UnionIsChanged(*header_liveness.in)) {
725 // Only update the loop body if the loop end liveness changed.
726 continue;
727 }
728 end_liveness.in->CopyFrom(*end_liveness.out);
729 next_bytecode_in_liveness = end_liveness.in;
730
731 // Advance into the loop body.
732 --iterator_;
733 for (; iterator_.current_offset() > header_offset; --iterator_) {
735 int current_offset = iterator_.current_offset();
736 BytecodeLiveness& liveness = liveness_map().GetLiveness(current_offset);
737 UpdateLiveness(bytecode, liveness, &next_bytecode_in_liveness, iterator_,
739 }
740 // Now we are at the loop header. Since the in-liveness of the header can't
741 // change, we need only to update the out-liveness.
742 UpdateOutLiveness(iterator_.current_bytecode(), header_liveness,
743 next_bytecode_in_liveness, iterator_, bytecode_array(),
744 liveness_map(), zone());
745 }
746
748 if (v8_flags.trace_environment_liveness) {
749 StdoutStream of;
750 PrintLivenessTo(of);
751 }
752
753 DCHECK(LivenessIsValid());
754}
755
757 return header_to_info_.find(offset) != header_to_info_.end();
758}
759
761 auto loop_end_to_header = end_to_header_.upper_bound(offset);
762 // If there is no next end => offset is not in a loop.
763 if (loop_end_to_header == end_to_header_.end()) {
764 return -1;
765 }
766 // If the header precedes the offset, this is the loop
767 //
768 // .> header <--loop_end_to_header
769 // |
770 // | <--offset
771 // |
772 // `- end
773 if (loop_end_to_header->second <= offset) {
774 return loop_end_to_header->second;
775 }
776 // Otherwise there is a (potentially nested) loop after this offset.
777 //
778 // <--offset
779 //
780 // .> header
781 // |
782 // | .> header <--loop_end_to_header
783 // | |
784 // | `- end
785 // |
786 // `- end
787 // We just return the parent of the next loop (might be -1).
788 DCHECK(header_to_info_.upper_bound(offset) != header_to_info_.end());
789
790 return header_to_info_.upper_bound(offset)->second.parent_offset();
791}
792
794 DCHECK(GetLoopInfoFor(header_offset).innermost());
795 auto loop_end_to_header = end_to_header_.upper_bound(header_offset + 1);
796 DCHECK_EQ(loop_end_to_header->second, header_offset);
797 return loop_end_to_header->first;
798}
799
800const LoopInfo& BytecodeAnalysis::GetLoopInfoFor(int header_offset) const {
801 DCHECK(IsLoopHeader(header_offset));
802
803 return header_to_info_.find(header_offset)->second;
804}
805
806const LoopInfo* BytecodeAnalysis::TryGetLoopInfoFor(int header_offset) const {
807 auto it = header_to_info_.find(header_offset);
808 if (it == header_to_info_.end()) return nullptr;
809 return &it->second;
810}
811
813 int offset) const {
814 if (!analyze_liveness_) return nullptr;
815
817}
818
820 int offset) const {
821 if (!analyze_liveness_) return nullptr;
822
824}
825
827 std::ostream& os) const {
828 interpreter::BytecodeArrayIterator iterator(bytecode_array_);
829
830 for (; !iterator.done(); iterator.Advance()) {
831 int current_offset = iterator.current_offset();
832
833 const BytecodeLivenessState* in_liveness =
834 res_.GetInLivenessFor(current_offset);
835 const BytecodeLivenessState* out_liveness =
836 res_.GetOutLivenessFor(current_offset);
837
838 os << ToString(*in_liveness) << " -> " << ToString(*out_liveness) << " | "
839 << current_offset << ": ";
840 iterator.PrintTo(os) << std::endl;
841 }
842
843 return os;
844}
845
846#if DEBUG
847bool BytecodeAnalysis::BytecodeAnalysisImpl::ResumeJumpTargetsAreValid() {
848 bool valid = true;
849
850 // Find the generator switch.
851 interpreter::BytecodeArrayRandomIterator iterator(bytecode_array_, zone());
852 for (iterator.GoToStart(); iterator.IsValid(); ++iterator) {
853 if (iterator.current_bytecode() == Bytecode::kSwitchOnGeneratorState) {
854 break;
855 }
856 }
857
858 // If the iterator is invalid, we've reached the end without finding the
859 // generator switch. So, ensure there are no jump targets and exit.
860 if (!iterator.IsValid()) {
861 // Check top-level.
862 if (!res_.resume_jump_targets().empty()) {
863 PrintF(stderr,
864 "Found %zu top-level resume targets but no resume switch\n",
865 res_.resume_jump_targets().size());
866 valid = false;
867 }
868 // Check loops.
869 for (const std::pair<const int, LoopInfo>& loop_info :
870 res_.header_to_info_) {
871 if (!loop_info.second.resume_jump_targets().empty()) {
872 PrintF(stderr,
873 "Found %zu resume targets at loop at offset %d, but no resume "
874 "switch\n",
875 loop_info.second.resume_jump_targets().size(), loop_info.first);
876 valid = false;
877 }
878 }
879
880 return valid;
881 }
882
883 // Otherwise, we've found the resume switch. Check that the top level jumps
884 // only to leaves and loop headers, then check that each loop header handles
885 // all the unresolved jumps, also jumping only to leaves and inner loop
886 // headers.
887
888 // First collect all required suspend ids.
889 std::map<int, int> unresolved_suspend_ids;
890 for (interpreter::JumpTableTargetOffset offset :
891 iterator.GetJumpTableTargetOffsets()) {
892 int suspend_id = offset.case_value;
893 int resume_offset = offset.target_offset;
894
895 unresolved_suspend_ids[suspend_id] = resume_offset;
896 }
897
898 // Check top-level.
899 if (!ResumeJumpTargetLeavesResolveSuspendIds(-1, res_.resume_jump_targets(),
900 &unresolved_suspend_ids)) {
901 valid = false;
902 }
903 // Check loops.
904 for (const std::pair<const int, LoopInfo>& loop_info : res_.header_to_info_) {
905 if (!ResumeJumpTargetLeavesResolveSuspendIds(
906 loop_info.first, loop_info.second.resume_jump_targets(),
907 &unresolved_suspend_ids)) {
908 valid = false;
909 }
910 }
911
912 // Check that everything is resolved.
913 if (!unresolved_suspend_ids.empty()) {
914 PrintF(stderr,
915 "Found suspend ids that are not resolved by a final leaf resume "
916 "jump:\n");
917
918 for (const std::pair<const int, int>& target : unresolved_suspend_ids) {
919 PrintF(stderr, " %d -> %d\n", target.first, target.second);
920 }
921 valid = false;
922 }
923
924 return valid;
925}
926
927bool BytecodeAnalysis::BytecodeAnalysisImpl::
928 ResumeJumpTargetLeavesResolveSuspendIds(
929 int parent_offset,
930 const ZoneVector<ResumeJumpTarget>& resume_jump_targets,
931 std::map<int, int>* unresolved_suspend_ids) {
932 bool valid = true;
933 for (const ResumeJumpTarget& target : resume_jump_targets) {
934 std::map<int, int>::iterator it =
935 unresolved_suspend_ids->find(target.suspend_id());
936 if (it == unresolved_suspend_ids->end()) {
937 PrintF(
938 stderr,
939 "No unresolved suspend found for resume target with suspend id %d\n",
940 target.suspend_id());
941 valid = false;
942 continue;
943 }
944 int expected_target = it->second;
945
946 if (target.is_leaf()) {
947 // Leaves should have the expected target as their target.
948 if (target.target_offset() != expected_target) {
949 PrintF(
950 stderr,
951 "Expected leaf resume target for id %d to have target offset %d, "
952 "but had %d\n",
953 target.suspend_id(), expected_target, target.target_offset());
954 valid = false;
955 } else {
956 // Make sure we're resuming to a Resume bytecode
957 interpreter::BytecodeArrayIterator iterator(bytecode_array_,
958 target.target_offset());
959 if (iterator.current_bytecode() != Bytecode::kResumeGenerator) {
960 PrintF(stderr,
961 "Expected resume target for id %d, offset %d, to be "
962 "ResumeGenerator, but found %s\n",
963 target.suspend_id(), target.target_offset(),
964 Bytecodes::ToString(iterator.current_bytecode()));
965
966 valid = false;
967 }
968 }
969 // We've resolved this suspend id, so erase it to make sure we don't
970 // resolve it twice.
971 unresolved_suspend_ids->erase(it);
972 } else {
973 // Non-leaves should have a direct inner loop header as their target.
974 if (!res_.IsLoopHeader(target.target_offset())) {
975 PrintF(stderr,
976 "Expected non-leaf resume target for id %d to have a loop "
977 "header at target offset %d\n",
978 target.suspend_id(), target.target_offset());
979 valid = false;
980 } else {
981 LoopInfo loop_info = res_.GetLoopInfoFor(target.target_offset());
982 if (loop_info.parent_offset() != parent_offset) {
983 PrintF(stderr,
984 "Expected non-leaf resume target for id %d to have a direct "
985 "inner loop at target offset %d\n",
986 target.suspend_id(), target.target_offset());
987 valid = false;
988 }
989 // If the target loop is a valid inner loop, we'll check its validity
990 // when we analyze its resume targets.
991 }
992 }
993 }
994 return valid;
995}
996
997bool BytecodeAnalysis::BytecodeAnalysisImpl::LivenessIsValid() {
998 interpreter::BytecodeArrayRandomIterator iterator(bytecode_array_, zone());
999
1000 BytecodeLivenessState previous_liveness(bytecode_array_->register_count(),
1001 zone());
1002
1003 int invalid_offset = -1;
1004 int which_invalid = -1;
1005 BytecodeLivenessState invalid_liveness(bytecode_array_->register_count(),
1006 zone());
1007
1008 BytecodeLivenessState* next_bytecode_in_liveness = nullptr;
1009
1010 // Ensure that there are no liveness changes if we iterate one more time.
1011 for (iterator.GoToEnd(); iterator.IsValid(); --iterator) {
1012 Bytecode bytecode = iterator.current_bytecode();
1013
1014 int current_offset = iterator.current_offset();
1015
1016 BytecodeLiveness& liveness = liveness_map().GetLiveness(current_offset);
1017
1018 previous_liveness.CopyFrom(*liveness.out);
1019
1020 UpdateOutLiveness(bytecode, liveness, next_bytecode_in_liveness, iterator,
1021 bytecode_array_, liveness_map(), zone());
1022 // UpdateOutLiveness skips kJumpLoop, so we update it manually.
1023 if (bytecode == Bytecode::kJumpLoop) {
1024 int target_offset = iterator.GetJumpTargetOffset();
1025 liveness.out->Union(*liveness_map().GetInLiveness(target_offset));
1026 }
1027
1028 if (!liveness.out->Equals(previous_liveness)) {
1029 invalid_liveness.CopyFrom(*liveness.out);
1030 // Reset the invalid liveness.
1031 liveness.out->CopyFrom(previous_liveness);
1032 invalid_offset = current_offset;
1033 which_invalid = 1;
1034 break;
1035 }
1036
1037 previous_liveness.CopyFrom(*liveness.in);
1038
1039 liveness.in->CopyFrom(*liveness.out);
1040 UpdateInLiveness(bytecode, liveness.in, iterator);
1041
1042 if (!liveness.in->Equals(previous_liveness)) {
1043 invalid_liveness.CopyFrom(*liveness.in);
1044 // Reset the invalid liveness.
1045 liveness.in->CopyFrom(previous_liveness);
1046 invalid_offset = current_offset;
1047 which_invalid = 0;
1048 break;
1049 }
1050
1051 next_bytecode_in_liveness = liveness.in;
1052 }
1053
1054 // Ensure that the accumulator is not live when jumping out of a loop, or on
1055 // the back-edge of a loop.
1056 for (iterator.GoToStart(); iterator.IsValid() && invalid_offset == -1;
1057 ++iterator) {
1058 Bytecode bytecode = iterator.current_bytecode();
1059 int current_offset = iterator.current_offset();
1060 int loop_header = res_.GetLoopOffsetFor(current_offset);
1061
1062 // We only care if we're inside a loop.
1063 if (loop_header == -1) continue;
1064
1065 // We only care about jumps.
1066 if (!Bytecodes::IsJump(bytecode)) continue;
1067
1068 int jump_target = iterator.GetJumpTargetOffset();
1069
1070 // If this is a forward jump to somewhere else in the same loop, ignore it.
1071 if (Bytecodes::IsForwardJump(bytecode) &&
1072 res_.GetLoopOffsetFor(jump_target) == loop_header) {
1073 continue;
1074 }
1075
1076 // The accumulator must be dead at the start of the target of the jump.
1077 if (liveness_map().GetLiveness(jump_target).in->AccumulatorIsLive()) {
1078 invalid_offset = jump_target;
1079 which_invalid = 0;
1080 break;
1081 }
1082 }
1083
1084 if (invalid_offset != -1) {
1085 OFStream of(stderr);
1086 of << "Invalid liveness:" << std::endl;
1087
1088 // Dump the bytecode, annotated with the liveness and marking loops.
1089
1090 int loop_indent = 0;
1091
1092 interpreter::BytecodeArrayIterator forward_iterator(bytecode_array_);
1093 for (; !forward_iterator.done(); forward_iterator.Advance()) {
1094 int current_offset = forward_iterator.current_offset();
1095 const BytecodeLivenessState* in_liveness =
1096 res_.GetInLivenessFor(current_offset);
1097 const BytecodeLivenessState* out_liveness =
1098 res_.GetOutLivenessFor(current_offset);
1099
1100 std::string in_liveness_str = ToString(*in_liveness);
1101 std::string out_liveness_str = ToString(*out_liveness);
1102
1103 of << in_liveness_str << " | " << out_liveness_str << " : "
1104 << current_offset << " : ";
1105
1106 // Draw loop back edges by indentin everything between loop headers and
1107 // jump loop instructions.
1108 if (forward_iterator.current_bytecode() == Bytecode::kJumpLoop) {
1109 loop_indent--;
1110 }
1111 for (int i = 0; i < loop_indent; ++i) {
1112 of << "| ";
1113 }
1114 if (forward_iterator.current_bytecode() == Bytecode::kJumpLoop) {
1115 of << "`-";
1116 } else if (res_.IsLoopHeader(current_offset)) {
1117 of << ".>";
1118 loop_indent++;
1119 }
1120 forward_iterator.PrintTo(of);
1121 if (Bytecodes::IsJump(forward_iterator.current_bytecode())) {
1122 of << " (@" << forward_iterator.GetJumpTargetOffset() << ")";
1123 }
1124 of << std::endl;
1125
1126 if (current_offset == invalid_offset) {
1127 // Underline the invalid liveness.
1128 char in_underline = which_invalid == 0 ? '^' : ' ';
1129 char out_underline = which_invalid == 0 ? ' ' : '^';
1130 of << std::string(in_liveness_str.size(), in_underline) << " "
1131 << std::string(out_liveness_str.size(), out_underline);
1132
1133 // Make sure to draw the loop indentation marks on this additional line.
1134 of << " : " << current_offset << " : ";
1135 for (int i = 0; i < loop_indent; ++i) {
1136 of << "| ";
1137 }
1138
1139 of << std::endl;
1140
1141 // Print the invalid liveness.
1142 if (which_invalid == 0) {
1143 of << ToString(invalid_liveness) << " "
1144 << std::string(out_liveness_str.size(), ' ');
1145 } else {
1146 of << std::string(in_liveness_str.size(), ' ') << " "
1147 << ToString(invalid_liveness);
1148 }
1149
1150 // Make sure to draw the loop indentation marks on this additional line.
1151 of << " : " << current_offset << " : ";
1152 for (int i = 0; i < loop_indent; ++i) {
1153 of << "| ";
1154 }
1155
1156 of << std::endl;
1157 }
1158 }
1159 }
1160
1161 return invalid_offset == -1;
1162}
1163#endif
1164
1167 bool analyze_liveness)
1169 analyze_liveness_(analyze_liveness),
1171 end_to_header_(zone),
1172 header_to_info_(zone),
1173 osr_entry_point_(-1) {
1174 BytecodeAnalysisImpl analysis(*this, bytecode_array, zone);
1175 analysis.Analyze();
1178}
1179
1180} // namespace compiler
1181} // namespace internal
1182} // namespace v8
friend Zone
Definition asm-types.cc:195
int16_t parameter_count
Definition builtins.cc:67
interpreter::Bytecode bytecode
Definition builtins.cc:43
#define BYTECODE_UPDATE_OUT_LIVENESS(Name,...)
#define BYTECODE_UPDATE_LIVENESS(Name,...)
#define BYTECODE_LIST(V, V_TSA)
Definition bytecodes.h:479
bool Contains(int i) const
Definition bit-vector.h:180
void Union(const BitVector &other)
Definition bit-vector.h:202
constexpr bool IsNone() const
Definition utils.h:684
constexpr int ToInt() const
Definition utils.h:673
static const int kNoHandlerFound
void push_back(const T &value)
void AnalyzeBCInLoop(int current_offset, LoopInfo *current_loop_info)
BytecodeAnalysisImpl(BytecodeAnalysis &res, Handle< BytecodeArray > bytecode_array, Zone *zone)
const BytecodeLivenessState * GetInLivenessFor(int offset) const
const ZoneVector< ResumeJumpTarget > & resume_jump_targets() const
const BytecodeLivenessState * GetOutLivenessFor(int offset) const
std::optional< BytecodeLivenessMap > liveness_map_
const LoopInfo & GetLoopInfoFor(int header_offset) const
const LoopInfo * TryGetLoopInfoFor(int header_offset) const
BytecodeAnalysis(Handle< BytecodeArray > bytecode_array, Zone *zone, BytecodeOffset osr_bailout_id, bool analyze_liveness)
int GetLoopEndOffsetForInnermost(int header_offset) const
ZoneVector< ResumeJumpTarget > resume_jump_targets_
BytecodeLivenessState * GetInLiveness(int offset)
BytecodeLivenessState * GetOutLiveness(int offset)
void CopyFrom(const BytecodeLivenessState &other)
bool UnionIsChanged(const BytecodeLivenessState &other)
void Union(const BytecodeLoopAssignments &other)
BytecodeLoopAssignments(int parameter_count, int register_count, Zone *zone)
void AddList(interpreter::Register r, uint32_t count)
ResumeJumpTarget(int suspend_id, int target_offset, int final_target_offset)
static ResumeJumpTarget Leaf(int suspend_id, int target_offset)
static ResumeJumpTarget AtLoopHeader(int loop_header_offset, const ResumeJumpTarget &next)
uint32_t GetUnsignedImmediateOperand(int operand_index) const
static constexpr bool WritesImplicitRegister(ImplicitRegisterUse implicit_register_use)
static constexpr bool ReadsAccumulator(ImplicitRegisterUse implicit_register_use)
static constexpr bool ClobbersAccumulator(ImplicitRegisterUse implicit_register_use)
static constexpr bool WritesAccumulator(ImplicitRegisterUse implicit_register_use)
static constexpr bool UnconditionallyThrows(Bytecode bytecode)
Definition bytecodes.h:879
static constexpr bool Returns(Bytecode bytecode)
Definition bytecodes.h:872
static const OperandType * GetOperandTypes(Bytecode bytecode)
Definition bytecodes.h:903
static constexpr bool IsWithoutExternalSideEffects(Bytecode bytecode)
Definition bytecodes.h:826
static constexpr bool IsForwardJump(Bytecode bytecode)
Definition bytecodes.h:805
static constexpr bool IsSwitch(Bytecode bytecode)
Definition bytecodes.h:819
static bool IsRegisterOutputOperandType(OperandType operand_type)
Definition bytecodes.cc:241
static bool ReadsAccumulator(Bytecode bytecode)
Definition bytecodes.h:687
static bool WritesImplicitRegister(Bytecode bytecode)
Definition bytecodes.h:711
static bool IsRegisterInputOperandType(OperandType operand_type)
Definition bytecodes.cc:222
static int NumberOfOperands(Bytecode bytecode)
Definition bytecodes.h:886
static constexpr bool IsJump(Bytecode bytecode)
Definition bytecodes.h:798
static const char * ToString(Bytecode bytecode)
Definition bytecodes.cc:123
static constexpr bool IsUnconditionalJump(Bytecode bytecode)
Definition bytecodes.h:770
static constexpr Register FromShortStar(Bytecode bytecode)
uint32_t count
int32_t offset
int r
Definition mul-fft.cc:298
std::string ToString(const BytecodeLivenessState &liveness)
void PrintF(const char *format,...)
Definition utils.cc:39
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
void AddResumeTarget(const ResumeJumpTarget &target)
const ZoneVector< ResumeJumpTarget > & resume_jump_targets() const
BytecodeLoopAssignments & assignments()