v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
frames.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <cstdint>
8#include <memory>
9#include <optional>
10#include <sstream>
11
13#include "src/api/api-natives.h"
14#include "src/base/bits.h"
21#include "src/common/globals.h"
27#include "src/ic/ic-stats.h"
30#include "src/objects/code.h"
32#include "src/objects/slots.h"
33#include "src/objects/smi.h"
35#include "src/roots/roots.h"
39
40#if V8_ENABLE_WEBASSEMBLY
42#include "src/wasm/stacks.h"
47#if V8_ENABLE_DRUMBRAKE
49#endif // V8_ENABLE_DRUMBRAKE
50#endif // V8_ENABLE_WEBASSEMBLY
51
52namespace v8 {
53namespace internal {
54
56 nullptr;
57
58namespace {
59
60Address AddressOf(const StackHandler* handler) {
61 Address raw = handler->address();
62#ifdef V8_USE_ADDRESS_SANITIZER
63 // ASan puts C++-allocated StackHandler markers onto its fake stack.
64 // We work around that by storing the real stack address in the "padding"
65 // field. StackHandlers allocated from generated code have 0 as padding.
66 Address padding =
68 if (padding != 0) return padding;
69#endif
70 return raw;
71}
72
73} // namespace
74
75// Iterator that supports traversing the stack handlers of a
76// particular frame. Needs to know the top of the handler chain.
78 public:
80 : limit_(frame->fp()), handler_(handler) {
81#if V8_ENABLE_WEBASSEMBLY
82#if !V8_ENABLE_DRUMBRAKE && !USE_SIMULATOR
83 // Make sure the handler has already been unwound to this frame. With stack
84 // switching this is not equivalent to the inequality below, because the
85 // frame and the handler could be in different stacks.
86 DCHECK_IMPLIES(frame->isolate()->wasm_stacks().empty(),
87 frame->InFastCCall() || frame->sp() <= AddressOf(handler));
88#endif // !V8_ENABLE_DRUMBRAKE || !USE_SIMULATOR
89
90 // For CWasmEntry frames, the handler was registered by the last C++
91 // frame (Execution::CallWasm), so even though its address is already
92 // beyond the limit, we know we always want to unwind one handler.
93 if (frame->is_c_wasm_entry()) {
95#if V8_ENABLE_DRUMBRAKE
96 // Do the same for GenericWasmToJsInterpreterWrapper frames.
97 } else if (v8_flags.wasm_jitless && frame->is_wasm_to_js()) {
99#ifdef USE_SIMULATOR
100 // If we are running in the simulator, the handler_ address here will
101 // refer to the 'actual' stack, not to the 'simulated' stack, so we need
102 // to fix 'limit_' to make sure that the StackHandlerIterator won't skip
103 // any handler.
104 limit_ = 0;
105#endif // USE_SIMULATOR
106#endif // V8_ENABLE_DRUMBRAKE
107 }
108#else
109 // Make sure the handler has already been unwound to this frame.
110 DCHECK_LE(frame->sp(), AddressOf(handler));
111#endif // V8_ENABLE_WEBASSEMBLY
112 }
113
114 StackHandler* handler() const { return handler_; }
115
116 bool done() { return handler_ == nullptr || AddressOf(handler_) > limit_; }
117 void Advance() {
118 DCHECK(!done());
120 }
121
122 private:
123#if V8_ENABLE_DRUMBRAKE && USE_SIMULATOR
125#else
127#endif // V8_ENABLE_DRUMBRAKE && USE_SIMULATOR
128
130};
131
132// -------------------------------------------------------------------------
133
135 : isolate_(isolate), frame_(nullptr), handler_(nullptr) {}
136
138 : StackFrameIterator(isolate, isolate->thread_local_top()) {}
139
144
145#if V8_ENABLE_WEBASSEMBLY
147 FirstStackOnly)
148 : StackFrameIteratorBase(isolate) {
149 first_stack_only_ = true;
150 Reset(t);
151}
152
154 wasm::StackMemory* stack)
155 : StackFrameIteratorBase(isolate) {
156 first_stack_only_ = true;
157 Reset(isolate->thread_local_top(), stack);
158}
159#endif
160
162 DCHECK(!done());
163 // Compute the state of the calling frame before restoring
164 // callee-saved registers and unwinding handlers. This allows the
165 // frame code that computes the caller state to access the top
166 // handler and the value of any callee-saved register if needed.
169#if V8_ENABLE_WEBASSEMBLY
170 if (frame_->type() == StackFrame::STACK_SWITCH &&
171 Memory<Address>(frame_->fp() +
172 StackSwitchFrameConstants::kCallerFPOffset) ==
173 kNullAddress &&
174 !first_stack_only_) {
175 // Handle stack switches here.
176 // Note: both the "callee" frame (outermost frame of the child stack) and
177 // the "caller" frame (top frame of the parent stack) have frame type
178 // STACK_SWITCH. We use the caller FP to distinguish them: the callee frame
179 // does not have a caller fp.
180 wasm_stack_ = wasm_stack()->jmpbuf()->parent;
181 CHECK_NOT_NULL(wasm_stack_);
182 CHECK_EQ(wasm_stack_->jmpbuf()->state, wasm::JumpBuffer::Inactive);
183 StackSwitchFrame::GetStateForJumpBuffer(wasm_stack_->jmpbuf(), &state);
184 SetNewFrame(StackFrame::STACK_SWITCH, &state);
185 return;
186 }
187#endif
188 type = frame_->GetCallerState(&state);
189
190 // {StackHandlerIterator} assumes that frame pointers strictly go from lower
191 // to higher addresses as we iterate the stack. This breaks with
192 // stack-switching, so only unwind the stack handlers for frames that are
193 // known to use them.
194 if (frame_->type() == StackFrame::ENTRY ||
195 frame_->type() == StackFrame::CONSTRUCT_ENTRY
196#if V8_ENABLE_WEBASSEMBLY
197 || frame_->type() == StackFrame::C_WASM_ENTRY
198#endif
199 ) {
201 while (!it.done()) it.Advance();
202 handler_ = it.handler();
203 }
204
205 // Advance to the calling frame.
206 SetNewFrame(type, &state);
207 // When we're done iterating over the stack frames, the handler
208 // chain must have been completely unwound. Except if we are only iterating
209 // the first stack of the chain for wasm stack-switching.
210#if V8_ENABLE_WEBASSEMBLY
211 DCHECK_IMPLIES(done() && !first_stack_only_, handler_ == nullptr);
212#else
213 DCHECK_IMPLIES(done(), handler_ == nullptr);
214#endif
215}
216
220 SetNewFrame(type, &state);
221 return frame();
222}
223
224namespace {
225StackFrame::Type GetStateForFastCCallCallerFP(Isolate* isolate, Address fp,
226 Address pc, Address pc_address,
227 StackFrame::State* state) {
228 // 'Fast C calls' are a special type of C call where we call directly from
229 // JS to C without an exit frame in between. The CEntryStub is responsible
230 // for setting Isolate::c_entry_fp, meaning that it won't be set for fast C
231 // calls. To keep the stack iterable, we store the FP and PC of the caller
232 // of the fast C call on the isolate. This is guaranteed to be the topmost
233 // JS frame, because fast C calls cannot call back into JS. We start
234 // iterating the stack from this topmost JS frame.
236 state->fp = fp;
237 state->sp = kNullAddress;
238 state->pc_address = reinterpret_cast<Address*>(pc_address);
239 state->callee_pc = kNullAddress;
240 state->constant_pool_address = nullptr;
241#if V8_ENABLE_WEBASSEMBLY
242 if (wasm::WasmCode* code =
243 wasm::GetWasmCodeManager()->LookupCode(isolate, pc)) {
244 if (code->kind() == wasm::WasmCode::kWasmToJsWrapper) {
245 return StackFrame::WASM_TO_JS;
246 }
248 return StackFrame::WASM;
249 }
250#endif // V8_ENABLE_WEBASSEMBLY
251 return StackFrame::TURBOFAN_JS;
252}
253} // namespace
254
258
259 const Address fast_c_call_caller_fp =
260 isolate_->isolate_data()->fast_c_call_caller_fp();
261 if (fast_c_call_caller_fp != kNullAddress) {
262 const Address caller_pc = isolate_->isolate_data()->fast_c_call_caller_pc();
263 const Address caller_pc_address =
264 isolate_->isolate_data()->fast_c_call_caller_pc_address();
265 type = GetStateForFastCCallCallerFP(isolate_, fast_c_call_caller_fp,
266 caller_pc, caller_pc_address, &state);
267 } else {
269 }
270#if V8_ENABLE_WEBASSEMBLY
271 wasm_stack_ = isolate_->isolate_data()->active_stack();
272#endif
274 SetNewFrame(type, &state);
275}
276
277#if V8_ENABLE_WEBASSEMBLY
279 if (stack->jmpbuf()->state == wasm::JumpBuffer::Retired) {
280 return;
281 }
282 StackFrame::State state;
283 StackSwitchFrame::GetStateForJumpBuffer(stack->jmpbuf(), &state);
285 wasm_stack_ = stack;
286 SetNewFrame(StackFrame::STACK_SWITCH, &state);
287}
288#endif
289
296
298 switch (type) {
299#define FRAME_TYPE_CASE(type, class) \
300 case StackFrame::type: \
301 frame_ = new (&class##_) class(this); \
302 return;
304#undef FRAME_TYPE_CASE
305
307 // We don't expect to see NUMBER_OF_TYPES or MANUAL, but stay robust against
308 // them rather than being UNREACHABLE in case stack frame iteration gets
309 // wonky.
310 case StackFrame::NUMBER_OF_TYPES:
312 break;
313 }
314 frame_ = nullptr;
315}
316
317// -------------------------------------------------------------------------
318
323
324// -------------------------------------------------------------------------
325
327 // The frame contains the actual argument count (intptr) that should not
328 // be visited.
329 FullObjectSlot argc(
330 &Memory<Address>(fp() + ConstructFrameConstants::kLengthOffset));
331 const int last_object_offset = ConstructFrameConstants::kLastObjectOffset;
332 FullObjectSlot base(&Memory<Address>(sp()));
333 FullObjectSlot limit(&Memory<Address>(fp() + last_object_offset) + 1);
334 v->VisitRootPointers(Root::kStackRoots, nullptr, base, argc);
335 v->VisitRootPointers(Root::kStackRoots, nullptr, argc + 1, limit);
337}
338
339// -------------------------------------------------------------------------
340
342 do {
344 } while (!iterator_.done() && !iterator_.frame()->is_javascript());
345}
346
347// -------------------------------------------------------------------------
348
350 : iterator_(isolate) {
351 if (!done() && !IsValidFrame(iterator_.frame())) Advance();
352}
353
359
361 do {
363 } while (!done() && !IsValidFrame(iterator_.frame()));
364}
365
367 DCHECK(!done());
368 if (!iterator_.frame()->is_optimized_js()) return 1;
369 std::vector<Tagged<SharedFunctionInfo>> infos;
370 TurbofanJSFrame::cast(iterator_.frame())->GetFunctions(&infos);
371 return static_cast<int>(infos.size());
372}
373
375 DCHECK(!done());
376 // Like FrameSummary::GetTop, but additionally observes
377 // DebuggableStackFrameIterator filtering semantics.
378 FrameSummaries summaries = frame()->Summarize();
379 if (is_javascript()) {
380 for (int i = summaries.size() - 1; i >= 0; i--) {
381 const FrameSummary& summary = summaries.frames[i];
382 if (summary.is_subject_to_debugging()) {
383 return summary;
384 }
385 }
386 UNREACHABLE();
387 }
388#if V8_ENABLE_WEBASSEMBLY
389 if (is_wasm()) return summaries.frames.back();
390#endif // V8_ENABLE_WEBASSEMBLY
391 UNREACHABLE();
392}
393
394// static
396 if (frame->is_javascript()) {
397 Tagged<JSFunction> function =
398 static_cast<JavaScriptFrame*>(frame)->function();
399 return function->shared()->IsSubjectToDebugging();
400 }
401#if V8_ENABLE_WEBASSEMBLY
402 if (frame->is_wasm()) return true;
403#endif // V8_ENABLE_WEBASSEMBLY
404 return false;
405}
406
407// -------------------------------------------------------------------------
408
409namespace {
410
411std::optional<bool> IsInterpreterFramePc(Isolate* isolate, Address pc,
412 StackFrame::State* state) {
414 if (builtin != Builtin::kNoBuiltinId &&
415 (builtin == Builtin::kInterpreterEntryTrampoline ||
416 builtin == Builtin::kInterpreterEnterAtBytecode ||
417 builtin == Builtin::kInterpreterEnterAtNextBytecode)) {
418 return true;
419 } else if (isolate->interpreted_frames_native_stack()) {
420 intptr_t marker = Memory<intptr_t>(
425 Tagged<Object> maybe_function = Tagged<Object>(
426 Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
427 // There's no need to run a full ContainsSlow if we know the frame can't be
428 // an InterpretedFrame, so we do these fast checks first
429 if (StackFrame::IsTypeMarker(marker) || IsSmi(maybe_function)) {
430 return false;
431 } else if (!isolate->heap()->InSpaceSlow(pc, CODE_SPACE)) {
432 return false;
433 }
435 return {};
436 }
437 Tagged<Code> interpreter_entry_trampoline =
438 isolate->heap()->FindCodeForInnerPointer(pc);
439 return interpreter_entry_trampoline->is_interpreter_trampoline_builtin();
440 } else {
441 return false;
442 }
443}
444
445} // namespace
446
448 Isolate* isolate, Address pc, Address fp) const {
450 if (pc < d.InstructionStartOfBytecodeHandlers() ||
451 pc >= d.InstructionEndOfBytecodeHandlers()) {
452 return false;
453 }
454
455 Address frame_type_address =
457 if (!IsValidStackAddress(frame_type_address)) {
458 return false;
459 }
460
461 // Check if top stack frame is a bytecode handler stub frame.
463 intptr_t marker = Memory<intptr_t>(frame_type_address);
464 if (StackFrame::IsTypeMarker(marker) &&
465 StackFrame::MarkerToType(marker) == StackFrame::STUB) {
466 // Bytecode handler built a frame.
467 return false;
468 }
469 return true;
470}
471
473 Isolate* isolate, Address pc, Address fp, Address sp, Address lr,
474 Address js_entry_sp)
475 : StackFrameIteratorBase(isolate),
476 low_bound_(sp),
477 high_bound_(js_entry_sp),
478 top_frame_type_(StackFrame::NO_FRAME_TYPE),
479 external_callback_scope_(isolate->external_callback_scope()),
480 top_link_register_(lr)
481#if V8_ENABLE_WEBASSEMBLY
482 ,
483 wasm_stacks_(isolate->wasm_stacks())
484#endif
485{
486 if (!isolate->isolate_data()->stack_is_iterable()) {
487 // The stack is not iterable in a short time interval during deoptimization.
488 // See also: ExternalReference::stack_is_iterable_address.
489 DCHECK(done());
490 return;
491 }
492
493 // For Advance below, we need frame_ to be set; and that only happens if the
494 // type is not NO_FRAME_TYPE.
495 // TODO(jgruber): Clean this up.
496 static constexpr StackFrame::Type kTypeForAdvance = StackFrame::TURBOFAN_JS;
497
498 StackFrame::State state;
499 state.is_profiler_entry_frame = true;
500 StackFrame::Type type;
501 ThreadLocalTop* const top = isolate->thread_local_top();
502 bool advance_frame = true;
503 const Address fast_c_fp = isolate->isolate_data()->fast_c_call_caller_fp();
504 if (fast_c_fp != kNullAddress) {
505 // 'Fast C calls' are a special type of C call where we call directly from
506 // JS to C without an exit frame in between. The CEntryStub is responsible
507 // for setting Isolate::c_entry_fp, meaning that it won't be set for fast C
508 // calls. To keep the stack iterable, we store the FP and PC of the caller
509 // of the fast C call on the isolate. This is guaranteed to be the topmost
510 // JS frame, because fast C calls cannot call back into JS. We start
511 // iterating the stack from this topmost JS frame.
512 DCHECK_NE(kNullAddress, isolate->isolate_data()->fast_c_call_caller_pc());
513 state.fp = fast_c_fp;
514 state.sp = sp;
515 state.pc_address = reinterpret_cast<Address*>(
516 isolate->isolate_data()->fast_c_call_caller_pc_address());
517
518 // ComputeStackFrameType will read both kContextOffset and
519 // kFunctionOffset, we check only that kFunctionOffset is within the stack
520 // bounds and do a compile time check that kContextOffset slot is pushed on
521 // the stack before kFunctionOffset.
524 if (IsValidStackAddress(state.fp +
526 type = ComputeStackFrameType(&state);
527 if (IsValidFrameType(type)) {
529 advance_frame = false;
530 }
531 } else {
532 // Cannot determine the actual type; the frame will be skipped below.
533 type = kTypeForAdvance;
534 }
535 } else if (IsValidTop(top)) {
538 } else if (IsValidStackAddress(fp)) {
540 state.fp = fp;
541 state.sp = sp;
542 state.pc_address =
545
546 bool can_lookup_frame_type =
547 // Ensure frame structure is not broken, otherwise it doesn't make
548 // sense to try to detect a frame type.
549 (sp < fp) &&
550 // Ensure there is a context/frame type value in the frame.
552
553 // If the current PC is in a bytecode handler, the top stack frame isn't
554 // the bytecode handler's frame and the top of stack or link register is a
555 // return address into the interpreter entry trampoline, then we are likely
556 // in a bytecode handler with elided frame. In that case, set the PC
557 // properly and make sure we do not drop the frame.
558 bool is_no_frame_bytecode_handler = false;
559 if (can_lookup_frame_type && IsNoFrameBytecodeHandlerPc(isolate, pc, fp)) {
560 Address* top_location = nullptr;
561 if (top_link_register_) {
562 top_location = &top_link_register_;
563 } else if (IsValidStackAddress(sp)) {
565 top_location = reinterpret_cast<Address*>(sp);
566 }
567
568 std::optional<bool> is_interpreter_frame_pc =
569 IsInterpreterFramePc(isolate, *top_location, &state);
570 // Since we're in a signal handler, the pc lookup might not be possible
571 // since the required locks are taken by the same thread.
572 if (!is_interpreter_frame_pc.has_value()) {
573 can_lookup_frame_type = false;
574 } else if (is_interpreter_frame_pc.value()) {
575 state.pc_address = top_location;
576 is_no_frame_bytecode_handler = true;
577 advance_frame = false;
578 }
579 }
580
581 // ComputeStackFrameType will read both kContextOffset and
582 // kFunctionOffset, we check only that kFunctionOffset is within the stack
583 // bounds and do a compile time check that kContextOffset slot is pushed on
584 // the stack before kFunctionOffset.
588 if (!can_lookup_frame_type) {
590 } else if (IsValidStackAddress(function_slot)) {
591 if (is_no_frame_bytecode_handler) {
592 type = StackFrame::INTERPRETED;
593 } else {
594 type = ComputeStackFrameType(&state);
595 }
597 } else {
598 // Cannot determine the actual type; the frame will be skipped below.
599 type = kTypeForAdvance;
600 }
601 } else {
602 // Not iterable.
603 DCHECK(done());
604 return;
605 }
606
607 SetNewFrame(type, &state);
608 if (advance_frame && !done()) {
609 Advance();
610 }
611}
612
614 Address c_entry_fp = Isolate::c_entry_fp(top);
615 if (!IsValidExitFrame(c_entry_fp)) return false;
616 // There should be at least one JS_ENTRY stack handler.
617 Address handler = Isolate::handler(top);
618 return handler != kNullAddress;
619}
620
622 DCHECK(!done());
623 StackFrame* last_frame = frame_;
624 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
625
626 // Before advancing to the next stack frame, perform pointer validity tests.
627 if (!IsValidState(last_frame->state_) ||
628 !HasValidExitIfEntryFrame(last_frame)) {
629 frame_ = nullptr;
630 return;
631 }
632
633 // Advance to the previous frame, and perform pointer validity tests there
634 // too.
636 last_frame->ComputeCallerState(&state);
637 if (!IsValidState(state)) {
638 frame_ = nullptr;
639 return;
640 }
641
643 SetNewFrame(type, &state);
644 if (!frame_) return;
645
646 // Check that we have actually moved to the previous frame in the stack.
647 if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) {
648 frame_ = nullptr;
649 }
650}
651
653 const StackFrame::State& state) const {
654 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp);
655}
656
658 const StackFrame* frame) const {
659 if (!frame->is_entry() && !frame->is_construct_entry()) return true;
660
661 // See EntryFrame::GetCallerState. It computes the caller FP address
662 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
663 // sure that caller FP address is valid.
664 Address next_exit_frame_fp_address =
666 // Profiling tick might be triggered in the middle of JSEntry builtin
667 // before the next_exit_frame_fp value is initialized. IsValidExitFrame()
668 // is able to deal with such a case, so just suppress the MSan warning.
669 MSAN_MEMORY_IS_INITIALIZED(next_exit_frame_fp_address, kSystemPointerSize);
670 Address next_exit_frame_fp = Memory<Address>(next_exit_frame_fp_address);
671 return IsValidExitFrame(next_exit_frame_fp);
672}
673
675 if (!IsAligned(fp, kSystemPointerSize)) return false;
676 if (!IsValidStackAddress(fp)) return false;
678 if (!IsValidStackAddress(sp)) return false;
680 ExitFrame::FillState(fp, sp, &state);
681 MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
682 return *state.pc_address != kNullAddress;
683}
684
686 while (true) {
688 if (done()) break;
689 ExternalCallbackScope* last_callback_scope = nullptr;
690 while (external_callback_scope_ != nullptr &&
692 frame_->fp()) {
693 // As long as the setup of a frame is not atomic, we may happen to be
694 // in an interval where an ExternalCallbackScope is already created,
695 // but the frame is not yet entered. So we are actually observing
696 // the previous frame.
697 // Skip all the ExternalCallbackScope's that are below the current fp.
698 last_callback_scope = external_callback_scope_;
700 }
701 if (frame_->is_javascript()) break;
702#if V8_ENABLE_WEBASSEMBLY
703 if (frame_->is_wasm() || frame_->is_wasm_to_js() ||
704 frame_->is_js_to_wasm()) {
705 break;
706 }
707#endif // V8_ENABLE_WEBASSEMBLY
708 if (frame_->is_exit() || frame_->is_builtin_exit() ||
710 // Some of the EXIT frames may have ExternalCallbackScope allocated on
711 // top of them. In that case the scope corresponds to the first EXIT
712 // frame beneath it. There may be other EXIT frames on top of the
713 // ExternalCallbackScope, just skip them as we cannot collect any useful
714 // information about them.
715 if (last_callback_scope) {
717 last_callback_scope->callback_entrypoint_address();
718 }
719 break;
720 }
721 }
722}
723
729
733
734// -------------------------------------------------------------------------
735
736namespace {
737
738std::optional<Tagged<GcSafeCode>> GetContainingCode(Isolate* isolate,
739 Address pc) {
740 return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
741}
742
743} // namespace
744
748
749std::pair<Tagged<GcSafeCode>, int> StackFrame::GcSafeLookupCodeAndOffset()
750 const {
752 std::optional<Tagged<GcSafeCode>> result = GetContainingCode(isolate(), pc);
753 return {result.value(),
754 result.value()->GetOffsetFromInstructionStart(isolate(), pc)};
755}
756
758 DCHECK_NE(isolate()->heap()->gc_state(), Heap::MARK_COMPACT);
759 return GcSafeLookupCode()->UnsafeCastToCode();
760}
761
762std::pair<Tagged<Code>, int> StackFrame::LookupCodeAndOffset() const {
763 DCHECK_NE(isolate()->heap()->gc_state(), Heap::MARK_COMPACT);
764 auto gc_safe_pair = GcSafeLookupCodeAndOffset();
765 return {gc_safe_pair.first->UnsafeCastToCode(), gc_safe_pair.second};
766}
767
768void StackFrame::IteratePc(RootVisitor* v, Address* constant_pool_address,
769 Tagged<GcSafeCode> holder) const {
770 const Address old_pc = maybe_unauthenticated_pc();
771 DCHECK_GE(old_pc, holder->InstructionStart(isolate(), old_pc));
772 DCHECK_LT(old_pc, holder->InstructionEnd(isolate(), old_pc));
773
774 // Keep the old pc offset before visiting the code since we need it to
775 // calculate the new pc after a potential InstructionStream move.
776 const uintptr_t pc_offset_from_start = old_pc - holder->instruction_start();
777
778 // Visit.
779 Tagged<GcSafeCode> visited_holder = holder;
780 PtrComprCageBase code_cage_base{isolate()->code_cage_base()};
781 const Tagged<Object> old_istream =
782 holder->raw_instruction_stream(code_cage_base);
783 Tagged<Object> visited_istream = old_istream;
784 v->VisitRunningCode(FullObjectSlot{&visited_holder},
785 FullObjectSlot{&visited_istream});
786 if (visited_istream == old_istream) {
787 // Note this covers two important cases:
788 // 1. the associated InstructionStream object did not move, and
789 // 2. `holder` is an embedded builtin and has no InstructionStream.
790 return;
791 }
792
793 DCHECK(visited_holder->has_instruction_stream());
794 // We can only relocate the InstructionStream object when we are able to patch
795 // the return address. We only know the location of the return address if the
796 // stack pointer is known. This means we cannot relocate InstructionStreams
797 // for fast c calls.
799 // Currently we turn off code space compaction fully when performing a GC in a
800 // fast C call.
802
804 GCSafeCast<InstructionStream>(visited_istream, isolate()->heap());
805 const Address new_pc = istream->instruction_start() + pc_offset_from_start;
806 // TODO(v8:10026): avoid replacing a signed pointer.
809 *constant_pool_address = istream->constant_pool();
810 }
811}
812
818
819namespace {
820
821StackFrame::Type ComputeBuiltinFrameType(Tagged<GcSafeCode> code) {
822 if (code->is_interpreter_trampoline_builtin() ||
823 code->is_baseline_trampoline_builtin()) {
824 // Frames for baseline entry trampolines on the stack are still interpreted
825 // frames.
826 return StackFrame::INTERPRETED;
827 } else if (code->is_baseline_leave_frame_builtin()) {
828 return StackFrame::BASELINE;
829 } else if (code->is_turbofanned()) {
830 // TODO(bmeurer): We treat frames for BUILTIN Code objects as
831 // OptimizedJSFrame for now (all the builtins with JavaScript linkage are
832 // actually generated with TurboFan currently, so this is sound).
833 return StackFrame::TURBOFAN_JS;
834 }
835 return StackFrame::BUILTIN;
836}
837
838StackFrame::Type SafeStackFrameType(StackFrame::Type candidate) {
839 DCHECK_LE(static_cast<uintptr_t>(candidate), StackFrame::NUMBER_OF_TYPES);
840 switch (candidate) {
841 case StackFrame::API_ACCESSOR_EXIT:
842 case StackFrame::API_CALLBACK_EXIT:
843 case StackFrame::BUILTIN_CONTINUATION:
844 case StackFrame::BUILTIN_EXIT:
845 case StackFrame::CONSTRUCT:
846 case StackFrame::FAST_CONSTRUCT:
847 case StackFrame::CONSTRUCT_ENTRY:
848 case StackFrame::ENTRY:
849 case StackFrame::EXIT:
850 case StackFrame::INTERNAL:
851 case StackFrame::IRREGEXP:
852 case StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION:
853 case StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
854 case StackFrame::STUB:
855 return candidate;
856
857#if V8_ENABLE_WEBASSEMBLY
858 case StackFrame::JS_TO_WASM:
859 case StackFrame::STACK_SWITCH:
860 case StackFrame::WASM:
861 case StackFrame::WASM_DEBUG_BREAK:
862 case StackFrame::WASM_EXIT:
863 case StackFrame::WASM_LIFTOFF_SETUP:
864 case StackFrame::WASM_TO_JS:
865 case StackFrame::WASM_SEGMENT_START:
866#if V8_ENABLE_DRUMBRAKE
867 case StackFrame::C_WASM_ENTRY:
868 case StackFrame::WASM_INTERPRETER_ENTRY:
869#endif // V8_ENABLE_DRUMBRAKE
870 return candidate;
871#endif // V8_ENABLE_WEBASSEMBLY
872
873 // Any other marker value is likely to be a bogus stack frame when being
874 // called from the profiler (in particular, JavaScript frames, including
875 // interpreted frames, should never have a StackFrame::Type marker).
876 // Consider these frames "native".
877 // TODO(jgruber): For the StackFrameIterator, I'm not sure this fallback
878 // makes sense. Shouldn't we know how to handle all frames we encounter
879 // there?
880 case StackFrame::BASELINE:
881 case StackFrame::BUILTIN:
882 case StackFrame::INTERPRETED:
883 case StackFrame::MAGLEV:
885 case StackFrame::NATIVE:
887 case StackFrame::NUMBER_OF_TYPES:
888 case StackFrame::TURBOFAN_JS:
889 case StackFrame::TURBOFAN_STUB_WITH_CONTEXT:
890#if V8_ENABLE_WEBASSEMBLY
891#if !V8_ENABLE_DRUMBRAKE
892 case StackFrame::C_WASM_ENTRY:
893#endif // !V8_ENABLE_DRUMBRAKE
894 case StackFrame::WASM_TO_JS_FUNCTION:
895#endif // V8_ENABLE_WEBASSEMBLY
896 return StackFrame::NATIVE;
897 }
898 UNREACHABLE();
899}
900
901} // namespace
902
904 StackFrame::State* state) const {
905#if V8_ENABLE_WEBASSEMBLY
906 if (state->fp == kNullAddress && first_stack_only_) {
907 DCHECK(!isolate_->wasm_stacks().empty()); // I.e., JSPI active
909 }
910#endif
911
912 const Address pc = StackFrame::ReadPC(state->pc_address);
913
914#if V8_ENABLE_WEBASSEMBLY
915 // If the {pc} does not point into WebAssembly code we can rely on the
916 // returned {wasm_code} to be null and fall back to {GetContainingCode}.
917 if (wasm::WasmCode* wasm_code =
918 wasm::GetWasmCodeManager()->LookupCode(isolate(), pc)) {
919 switch (wasm_code->kind()) {
921 return StackFrame::WASM;
923 return StackFrame::WASM_EXIT;
925 return StackFrame::WASM_TO_JS;
926#if V8_ENABLE_DRUMBRAKE
927 case wasm::WasmCode::kInterpreterEntry:
928 return StackFrame::WASM_INTERPRETER_ENTRY;
929#endif // V8_ENABLE_DRUMBRAKE
930 default:
931 UNREACHABLE();
932 }
933 }
934#endif // V8_ENABLE_WEBASSEMBLY
935
936 // Look up the code object to figure out the type of the stack frame.
937 std::optional<Tagged<GcSafeCode>> lookup_result =
938 GetContainingCode(isolate(), pc);
939 if (!lookup_result.has_value()) return StackFrame::NATIVE;
940
944 const intptr_t marker = Memory<intptr_t>(
946 switch (lookup_result.value()->kind()) {
947 case CodeKind::BUILTIN: {
948 if (StackFrame::IsTypeMarker(marker)) break;
949 return ComputeBuiltinFrameType(lookup_result.value());
950 }
951 case CodeKind::BASELINE:
952 // Baseline code can be deoptimized by DiscardBaselineCodeVisitor.
953 if (lookup_result.value()->marked_for_deoptimization())
954 return StackFrame::INTERPRETED;
955 return StackFrame::BASELINE;
956 case CodeKind::MAGLEV:
957 if (StackFrame::IsTypeMarker(marker)) {
958 // An INTERNAL frame can be set up with an associated Maglev code
959 // object when calling into runtime to handle tiering. In this case,
960 // all stack slots are tagged pointers and should be visited through
961 // the usual logic.
962 DCHECK_EQ(StackFrame::MarkerToType(marker), StackFrame::INTERNAL);
963 return StackFrame::INTERNAL;
964 }
965 return StackFrame::MAGLEV;
966 case CodeKind::TURBOFAN_JS:
967 return StackFrame::TURBOFAN_JS;
968#if V8_ENABLE_WEBASSEMBLY
969 case CodeKind::JS_TO_WASM_FUNCTION:
970 if (lookup_result.value()->builtin_id() == Builtin::kJSToWasmWrapperAsm) {
971 return StackFrame::JS_TO_WASM;
972 }
973#if V8_ENABLE_DRUMBRAKE
974 if (lookup_result.value()->builtin_id() ==
975 Builtin::kGenericJSToWasmInterpreterWrapper) {
976 return StackFrame::JS_TO_WASM;
977 }
978#endif // V8_ENABLE_DRUMBRAKE
979 return StackFrame::TURBOFAN_STUB_WITH_CONTEXT;
980 case CodeKind::C_WASM_ENTRY:
981 return StackFrame::C_WASM_ENTRY;
982 case CodeKind::WASM_TO_JS_FUNCTION:
983 return StackFrame::WASM_TO_JS_FUNCTION;
984 case CodeKind::WASM_FUNCTION:
985 case CodeKind::WASM_TO_CAPI_FUNCTION:
986 // These never appear as on-heap Code objects.
987 UNREACHABLE();
988#else
989 case CodeKind::C_WASM_ENTRY:
990 case CodeKind::JS_TO_WASM_FUNCTION:
991 case CodeKind::WASM_FUNCTION:
992 case CodeKind::WASM_TO_CAPI_FUNCTION:
993 case CodeKind::WASM_TO_JS_FUNCTION:
994 UNREACHABLE();
995#endif // V8_ENABLE_WEBASSEMBLY
996 case CodeKind::BYTECODE_HANDLER:
997 case CodeKind::FOR_TESTING:
998 case CodeKind::REGEXP:
999 case CodeKind::INTERPRETED_FUNCTION:
1000 // Fall back to the marker.
1001 break;
1002 }
1003
1004 return SafeStackFrameType(StackFrame::MarkerToType(marker));
1005}
1006
1008 StackFrame::State* state) const {
1009#if V8_ENABLE_WEBASSEMBLY
1010 if (state->fp == kNullAddress) {
1011 DCHECK(!isolate_->wasm_stacks().empty()); // I.e., JSPI active
1013 }
1014#endif
1015
1016 // We use unauthenticated_pc because it may come from
1017 // fast_c_call_caller_pc_address, for which authentication does not work.
1018 const Address pc = StackFrame::unauthenticated_pc(state->pc_address);
1019#if V8_ENABLE_WEBASSEMBLY
1020 Tagged<Code> wrapper =
1021 isolate()->builtins()->code(Builtin::kWasmToJsWrapperCSA);
1022 if (pc >= wrapper->instruction_start() && pc <= wrapper->instruction_end()) {
1023 return StackFrame::WASM_TO_JS;
1024 }
1025#endif // V8_ENABLE_WEBASSEMBLY
1026
1030 const intptr_t marker = Memory<intptr_t>(
1032 if (StackFrame::IsTypeMarker(marker)) {
1033 return SafeStackFrameType(StackFrame::MarkerToType(marker));
1034 }
1035
1038 Tagged<Object> maybe_function = Tagged<Object>(
1039 Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
1040 if (IsSmi(maybe_function)) {
1041 return StackFrame::NATIVE;
1042 }
1043
1044 std::optional<bool> is_interpreter_frame =
1045 IsInterpreterFramePc(isolate(), pc, state);
1046
1047 // We might not be able to lookup the frame type since we're inside a signal
1048 // handler and the required locks are taken.
1049 if (!is_interpreter_frame.has_value()) {
1051 }
1052
1053 if (is_interpreter_frame.value()) {
1054 return StackFrame::INTERPRETED;
1055 }
1056
1057 return StackFrame::TURBOFAN_JS;
1058}
1059
1061 ComputeCallerState(state);
1062 return iterator_->ComputeStackFrameType(state);
1063}
1064
1068
1070 state->sp = caller_sp();
1071 state->fp = Memory<Address>(fp() + CommonFrameConstants::kCallerFPOffset);
1072 state->pc_address = ResolveReturnAddressLocation(
1073 reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset));
1074 state->callee_pc = kNullAddress;
1075 state->constant_pool_address = nullptr;
1076}
1077
1079 return isolate()->builtins()->code(Builtin::kJSEntry);
1080}
1081
1083 GetCallerState(state);
1084}
1085
1087 const Address fast_c_call_caller_fp =
1089 if (fast_c_call_caller_fp != kNullAddress) {
1090 Address caller_pc_address =
1092 Address caller_pc = Memory<Address>(caller_pc_address);
1093 return GetStateForFastCCallCallerFP(isolate(), fast_c_call_caller_fp,
1094 caller_pc, caller_pc_address, state);
1095 }
1096 Address next_exit_frame_fp =
1098 return ExitFrame::GetStateForFramePointer(next_exit_frame_fp, state);
1099}
1100
1101#if V8_ENABLE_WEBASSEMBLY
1102StackFrame::Type CWasmEntryFrame::GetCallerState(State* state) const {
1103 const int offset = CWasmEntryFrameConstants::kCEntryFPOffset;
1104 Address fp = Memory<Address>(this->fp() + offset);
1105 return ExitFrame::GetStateForFramePointer(fp, state);
1106}
1107
1108#if V8_ENABLE_DRUMBRAKE
1109void CWasmEntryFrame::Iterate(RootVisitor* v) const {
1110 if (!v8_flags.wasm_jitless) {
1112 }
1113}
1114#endif // V8_ENABLE_DRUMBRAKE
1115
1116#endif // V8_ENABLE_WEBASSEMBLY
1117
1119 return isolate()->builtins()->code(Builtin::kJSConstructEntry);
1120}
1121
1123 // Set up the caller state.
1124 state->sp = caller_sp();
1125 state->fp = Memory<Address>(fp() + ExitFrameConstants::kCallerFPOffset);
1126 state->pc_address = ResolveReturnAddressLocation(
1127 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
1128 state->callee_pc = kNullAddress;
1130 state->constant_pool_address = reinterpret_cast<Address*>(
1132 }
1133}
1134
1136 // The arguments are traversed as part of the expression stack of
1137 // the calling frame.
1139}
1140
1142 if (fp == 0) return NO_FRAME_TYPE;
1144#if V8_ENABLE_WEBASSEMBLY
1145 Address sp = type == WASM_EXIT ? WasmExitFrame::ComputeStackPointer(fp)
1147#else
1149#endif // V8_ENABLE_WEBASSEMBLY
1150 FillState(fp, sp, state);
1151 DCHECK_NE(*state->pc_address, kNullAddress);
1152 return type;
1153}
1154
1156 // Distinguish between different exit frame types.
1157 // Default to EXIT in all hairy cases (e.g., when called from profiler).
1159 Tagged<Object> marker(Memory<Address>(fp + offset));
1160
1161 if (!IsSmi(marker)) {
1162 return EXIT;
1163 }
1164
1165 intptr_t marker_int = base::bit_cast<intptr_t>(marker);
1166
1167 StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1);
1168 switch (frame_type) {
1169 case BUILTIN_EXIT:
1170 case API_ACCESSOR_EXIT:
1171 case API_CALLBACK_EXIT:
1172#if V8_ENABLE_WEBASSEMBLY
1173 case WASM_EXIT:
1174 case STACK_SWITCH:
1175#endif // V8_ENABLE_WEBASSEMBLY
1176 return frame_type;
1177 default:
1178 return EXIT;
1179 }
1180}
1181
1187
1188#if V8_ENABLE_WEBASSEMBLY
1189Address WasmExitFrame::ComputeStackPointer(Address fp) {
1190 // For WASM_EXIT frames, {sp} is only needed for finding the PC slot,
1191 // everything else is handled via safepoint information.
1192 Address sp = fp + WasmExitFrameConstants::kWasmInstanceDataOffset;
1193 DCHECK_EQ(sp - 1 * kPCOnStackSize,
1194 fp + WasmExitFrameConstants::kCallingPCOffset);
1195 return sp;
1196}
1197#endif // V8_ENABLE_WEBASSEMBLY
1198
1200 state->sp = sp;
1201 state->fp = fp;
1202 state->pc_address = ResolveReturnAddressLocation(
1203 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
1204 state->callee_pc = kNullAddress;
1205 // The constant pool recorded in the exit frame is not associated
1206 // with the pc in this state (the return address into a C entry
1207 // stub). ComputeCallerState will retrieve the constant pool
1208 // together with the associated caller pc.
1209 state->constant_pool_address = nullptr;
1210}
1211
1216 int code_offset = -1;
1217 std::tie(code, code_offset) = LookupCodeAndOffset();
1219 isolate(), receiver(), function(), Cast<AbstractCode>(code), code_offset,
1220 IsConstructor(), *parameters);
1221 return FrameSummaries(summary);
1222}
1223
1227
1231
1238
1240 Tagged<Object> argc_slot = argc_slot_object();
1241 DCHECK(IsSmi(argc_slot));
1242 // Argc also counts the receiver and extra arguments for BuiltinExitFrame
1243 // (target, new target and argc itself), therefore the real argument count
1244 // has to be adjusted.
1245 int argc = Smi::ToInt(argc_slot) -
1247 DCHECK_GE(argc, 0);
1248 return argc;
1249}
1250
1252 if (V8_LIKELY(!v8_flags.detailed_error_stack_trace)) {
1253 return isolate()->factory()->empty_fixed_array();
1254 }
1255 int param_count = ComputeParametersCount();
1256 auto parameters = isolate()->factory()->NewFixedArray(param_count);
1257 for (int i = 0; i < param_count; i++) {
1258 parameters->set(i, GetParameter(i));
1259 }
1260 return parameters;
1261}
1262
1264 return !IsUndefined(new_target_slot_object(), isolate());
1265}
1266
1267// Ensure layout of v8::FunctionCallbackInfo is in sync with
1268// ApiCallbackExitFrameConstants.
1279
1281 Tagged<HeapObject> maybe_function = target();
1282 if (IsJSFunction(maybe_function)) {
1284 }
1285 DCHECK(IsFunctionTemplateInfo(maybe_function));
1286 DirectHandle<FunctionTemplateInfo> function_template_info(
1287 Cast<FunctionTemplateInfo>(maybe_function), isolate());
1288
1289 // Instantiate function for the correct context.
1290 DCHECK(IsContext(context()));
1293
1294 DirectHandle<JSFunction> function =
1296 function_template_info)
1297 .ToHandleChecked();
1298
1299 set_target(*function);
1300 return function;
1301}
1302
1305 Tagged<HeapObject> maybe_function = target();
1306 if (IsJSFunction(maybe_function)) {
1307 Tagged<SharedFunctionInfo> shared_info =
1308 Cast<JSFunction>(maybe_function)->shared();
1309 DCHECK(shared_info->IsApiFunction());
1310 return direct_handle(shared_info->api_func_data(), isolate());
1311 }
1312 DCHECK(IsFunctionTemplateInfo(maybe_function));
1313 return direct_handle(Cast<FunctionTemplateInfo>(maybe_function), isolate());
1314}
1315
1317 if (V8_LIKELY(!v8_flags.detailed_error_stack_trace)) {
1318 return isolate()->factory()->empty_fixed_array();
1319 }
1320 int param_count = ComputeParametersCount();
1321 auto parameters = isolate()->factory()->NewFixedArray(param_count);
1322 for (int i = 0; i < param_count; i++) {
1323 parameters->set(i, GetParameter(i));
1324 }
1325 return parameters;
1326}
1327
1333 int code_offset = -1;
1334 std::tie(code, code_offset) = LookupCodeAndOffset();
1336 isolate(), receiver(), *function, Cast<AbstractCode>(code), code_offset,
1337 IsConstructor(), *parameters);
1338 return FrameSummaries(summary);
1339}
1340
1341// Ensure layout of v8::PropertyCallbackInfo is in sync with
1342// ApiAccessorExitFrameConstants.
1343static_assert(
1346static_assert(
1349static_assert(
1356
1358 // This frame is not supposed to appear in exception stack traces.
1359 DCHECK(IsName(property_name()));
1360 DCHECK(IsJSReceiver(receiver()));
1361 DCHECK(IsJSReceiver(holder()));
1362 return FrameSummaries();
1363}
1364
1365namespace {
1366void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode,
1367 int index) {
1368 accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index);
1369}
1370
1371const char* StringForStackFrameType(StackFrame::Type type) {
1372 switch (type) {
1373#define CASE(value, name) \
1374 case StackFrame::value: \
1375 return #name;
1377#undef CASE
1379 return "NoFrameType";
1380 default:
1381 UNREACHABLE();
1382 }
1383}
1384} // namespace
1385
1387 int index) const {
1389 PrintIndex(accumulator, mode, index);
1390 accumulator->Add(StringForStackFrameType(type()));
1391 accumulator->Add(" [pc: %p]\n",
1392 reinterpret_cast<void*>(maybe_unauthenticated_pc()));
1393}
1394
1396 int index) const {
1399 Tagged<JSFunction> function = this->function();
1400 Tagged<SharedFunctionInfo> sfi = function->shared();
1401
1402 accumulator->PrintSecurityTokenIfChanged(isolate(), function);
1403 PrintIndex(accumulator, mode, index);
1404 accumulator->Add("BuiltinExitFrame ");
1405 if (sfi->HasBuiltinId()) {
1406 // API functions have builtin code but not builtin SFIs, so don't print the
1407 // builtins for those.
1408 accumulator->Add("[builtin: %s] ", Builtins::name(sfi->builtin_id()));
1409 }
1410 if (IsConstructor()) accumulator->Add("new ");
1411 accumulator->PrintFunction(isolate(), function, receiver);
1412
1413 accumulator->Add("(this=%o", receiver);
1414
1415 // Print the parameters.
1416 int parameters_count = ComputeParametersCount();
1417 for (int i = 0; i < parameters_count; i++) {
1418 accumulator->Add(",%o", GetParameter(i));
1419 }
1420
1421 accumulator->Add(")\n");
1422}
1423
1425 int index) const {
1429
1430 accumulator->PrintSecurityTokenIfChanged(isolate(), *function);
1431 PrintIndex(accumulator, mode, index);
1432 accumulator->Add("ApiCallbackExitFrame ");
1433 if (IsConstructor()) accumulator->Add("new ");
1434 accumulator->PrintFunction(isolate(), *function, receiver);
1435
1436 accumulator->Add("(this=%o", receiver);
1437
1438 // Print the parameters.
1439 int parameters_count = ComputeParametersCount();
1440 for (int i = 0; i < parameters_count; i++) {
1441 accumulator->Add(",%o", GetParameter(i));
1442 }
1443
1444 accumulator->Add(")\n\n");
1445}
1446
1448 int index) const {
1450
1451 PrintIndex(accumulator, mode, index);
1452 accumulator->Add("api accessor exit frame: ");
1453
1454 Tagged<Name> name = property_name();
1456 Tagged<Object> holder = this->holder();
1457 accumulator->Add("(this=%o, holder=%o, name=%o)\n", receiver, holder, name);
1458}
1459
1464
1469
1471 return ReadOnlyRoots(isolate()).undefined_value();
1472}
1473
1476 int code_offset = -1;
1477 std::tie(code, code_offset) = LookupCodeAndOffset();
1478 return code->SourcePosition(code_offset);
1479}
1480
1483 Address limit = sp() - kSystemPointerSize;
1484 DCHECK(base >= limit); // stack grows downwards
1485 // Include register-allocated locals in number of expressions.
1486 return static_cast<int>((base - limit) / kSystemPointerSize);
1487}
1488
1490 state->fp = caller_fp();
1491#if V8_ENABLE_WEBASSEMBLY
1492 if (state->fp == kNullAddress) {
1493 // An empty FP signals the first frame of a stack segment. The caller is
1494 // on a different stack, or is unbound (suspended stack).
1495 // DCHECK(isolate_->wasm_stacks() != nullptr); // I.e., JSPI active
1496 return;
1497 }
1498#endif
1499 state->sp = caller_sp();
1500 state->pc_address = ResolveReturnAddressLocation(reinterpret_cast<Address*>(
1502 state->callee_fp = fp();
1503 state->callee_pc = maybe_unauthenticated_pc();
1504 state->constant_pool_address = reinterpret_cast<Address*>(
1506}
1507
1509 // This should only be called on frames which override this method.
1510 UNREACHABLE();
1511}
1512
1513namespace {
1514void VisitSpillSlot(Isolate* isolate, RootVisitor* v,
1515 FullObjectSlot spill_slot) {
1516#ifdef V8_COMPRESS_POINTERS
1517 PtrComprCageBase cage_base(isolate);
1518 bool was_compressed = false;
1519
1520 // Spill slots may contain compressed values in which case the upper
1521 // 32-bits will contain zeros. In order to simplify handling of such
1522 // slots in GC we ensure that the slot always contains full value.
1523
1524 // The spill slot may actually contain weak references so we load/store
1525 // values using spill_slot.location() in order to avoid dealing with
1526 // FullMaybeObjectSlots here.
1528 // When external code space is enabled the spill slot could contain both
1529 // InstructionStream and non-InstructionStream references, which have
1530 // different cage bases. So unconditional decompression of the value might
1531 // corrupt InstructionStream pointers. However, given that 1) the
1532 // InstructionStream pointers are never compressed by design (because
1533 // otherwise we wouldn't know which cage base to apply for
1534 // decompression, see respective DCHECKs in
1535 // RelocInfo::target_object()),
1536 // 2) there's no need to update the upper part of the full pointer
1537 // because if it was there then it'll stay the same,
1538 // we can avoid updating upper part of the spill slot if it already
1539 // contains full value.
1540 // TODO(v8:11880): Remove this special handling by enforcing builtins
1541 // to use CodeTs instead of InstructionStream objects.
1542 Address value = *spill_slot.location();
1543 if (!HAS_SMI_TAG(value) && value <= 0xffffffff) {
1544 // We don't need to update smi values or full pointers.
1545 was_compressed = true;
1547 cage_base, static_cast<Tagged_t>(value));
1548 if (DEBUG_BOOL) {
1549 // Ensure that the spill slot contains correct heap object.
1550 Tagged<HeapObject> raw =
1552 MapWord map_word = raw->map_word(cage_base, kRelaxedLoad);
1554 ? map_word.ToForwardingAddress(raw)
1555 : raw;
1556 bool is_self_forwarded =
1558 if (is_self_forwarded) {
1559 // The object might be in a self-forwarding state if it's located
1560 // in new large object space. GC will fix this at a later stage.
1561 CHECK(
1562 MemoryChunk::FromHeapObject(forwarded)->InNewLargeObjectSpace() ||
1563 MemoryChunk::FromHeapObject(forwarded)->IsQuarantined());
1564 } else {
1565 Tagged<HeapObject> forwarded_map = forwarded->map(cage_base);
1566 // The map might be forwarded as well.
1567 MapWord fwd_map_map_word =
1568 forwarded_map->map_word(cage_base, kRelaxedLoad);
1569 if (fwd_map_map_word.IsForwardingAddress()) {
1570 forwarded_map = fwd_map_map_word.ToForwardingAddress(forwarded_map);
1571 }
1572 CHECK(IsMap(forwarded_map, cage_base));
1573 }
1574 }
1575 }
1576 } else {
1577 Address slot_contents = *spill_slot.location();
1578 Tagged_t compressed_value = static_cast<Tagged_t>(slot_contents);
1579 if (!HAS_SMI_TAG(compressed_value)) {
1580 was_compressed = slot_contents <= 0xFFFFFFFF;
1581 // We don't need to update smi values.
1583 cage_base, compressed_value);
1584 }
1585 }
1586#endif
1587 v->VisitRootPointer(Root::kStackRoots, nullptr, spill_slot);
1588#if V8_COMPRESS_POINTERS
1589 if (was_compressed) {
1590 // Restore compression. Generated code should be able to trust that
1591 // compressed spill slots remain compressed.
1592 *spill_slot.location() =
1594 }
1595#endif
1596}
1597
1598void VisitSpillSlots(Isolate* isolate, RootVisitor* v,
1599 FullObjectSlot first_slot_offset,
1600 base::Vector<const uint8_t> tagged_slots) {
1601 FullObjectSlot slot_offset = first_slot_offset;
1602 for (uint8_t bits : tagged_slots) {
1603 while (bits) {
1604 const int bit = base::bits::CountTrailingZeros(bits);
1605 bits &= ~(1 << bit);
1606 FullObjectSlot spill_slot = slot_offset + bit;
1607 VisitSpillSlot(isolate, v, spill_slot);
1608 }
1609 slot_offset += kBitsPerByte;
1610 }
1611}
1612
1613SafepointEntry GetSafepointEntryFromCodeCache(
1614 Isolate* isolate, Address inner_pointer,
1615 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry) {
1616 if (!entry->safepoint_entry.is_initialized()) {
1617 entry->safepoint_entry =
1618 SafepointTable::FindEntry(isolate, entry->code.value(), inner_pointer);
1619 DCHECK(entry->safepoint_entry.is_initialized());
1620 } else {
1621 DCHECK_EQ(
1622 entry->safepoint_entry,
1623 SafepointTable::FindEntry(isolate, entry->code.value(), inner_pointer));
1624 }
1625 return entry->safepoint_entry;
1626}
1627
1628MaglevSafepointEntry GetMaglevSafepointEntryFromCodeCache(
1629 Isolate* isolate, Address inner_pointer,
1630 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry) {
1631 if (!entry->maglev_safepoint_entry.is_initialized()) {
1632 entry->maglev_safepoint_entry = MaglevSafepointTable::FindEntry(
1633 isolate, entry->code.value(), inner_pointer);
1634 DCHECK(entry->maglev_safepoint_entry.is_initialized());
1635 } else {
1636 DCHECK_EQ(entry->maglev_safepoint_entry,
1637 MaglevSafepointTable::FindEntry(isolate, entry->code.value(),
1638 inner_pointer));
1639 }
1640 return entry->maglev_safepoint_entry;
1641}
1642
1643} // namespace
1644
1645#ifdef V8_ENABLE_WEBASSEMBLY
1646#if V8_ENABLE_DRUMBRAKE
1647// Class DrumBrakeWasmCode is an adapter class that exposes just the accessors
1648// of the original WasmCode class that are used in WasmFrame::Iterate. For non
1649// DrumBrake frames, the class calls the corresponding accessor in a contained
1650// WasmCode object, while for DrumBrake frames it returns dummy values. This is
1651// useful to minimize the merge issues in WasmFrame::Iterate.
1652class DrumBrakeWasmCode {
1653 public:
1654 explicit DrumBrakeWasmCode(wasm::WasmCode* wasm_code)
1655 : wasm_code_(wasm_code) {}
1656
1657 static std::unique_ptr<DrumBrakeWasmCode> Interpreted() {
1658 return std::make_unique<DrumBrakeWasmCode>(nullptr);
1659 }
1660 static std::unique_ptr<DrumBrakeWasmCode> Compiled(
1661 wasm::WasmCode* wasm_code) {
1662 return std::make_unique<DrumBrakeWasmCode>(wasm_code);
1663 }
1664
1665 bool is_liftoff() const {
1666 return wasm_code_ ? wasm_code_->is_liftoff() : false;
1667 }
1668 bool frame_has_feedback_slot() const {
1669 return wasm_code_ ? wasm_code_->frame_has_feedback_slot() : false;
1670 }
1671 int stack_slots() const { return wasm_code_ ? wasm_code_->stack_slots() : 0; }
1672 wasm::WasmCode::Kind kind() const {
1673 return wasm_code_ ? wasm_code_->kind() : wasm::WasmCode::kInterpreterEntry;
1674 }
1675 uint16_t first_tagged_parameter_slot() const {
1676 return wasm_code_ ? wasm_code_->first_tagged_parameter_slot() : 0;
1677 }
1678 uint16_t num_tagged_parameter_slots() const {
1679 return wasm_code_ ? wasm_code_->num_tagged_parameter_slots() : 0;
1680 }
1681
1682 private:
1683 const wasm::WasmCode* wasm_code_;
1684};
1685#endif // V8_ENABLE_DRUMBRAKE
1686
1687void WasmFrame::Iterate(RootVisitor* v) const {
1688 DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
1689
1690 // === WasmFrame ===
1691 // +-------------------------+-----------------------------------------
1692 // | out_param n | <-- parameters_base / sp
1693 // | ... |
1694 // | out_param 0 | (these can be tagged or untagged)
1695 // +-------------------------+-----------------------------------------
1696 // | spill_slot n | <-- parameters_limit ^
1697 // | ... | spill_slot_space
1698 // | spill_slot 0 | v
1699 // +-------------------------+-----------------------------------------
1700 // | WasmFeedback(*) | <-- frame_header_base ^
1701 // |- - - - - - - - - - - - -| |
1702 // | WasmTrustedInstanceData | |
1703 // |- - - - - - - - - - - - -| |
1704 // | Type Marker | |
1705 // |- - - - - - - - - - - - -| frame_header_size
1706 // | [Constant Pool] | |
1707 // |- - - - - - - - - - - - -| |
1708 // | saved frame ptr | <-- fp |
1709 // |- - - - - - - - - - - - -| |
1710 // | return addr | <-- tagged_parameter_limit v
1711 // +-------------------------+-----------------------------------------
1712 // | in_param n |
1713 // | ... |
1714 // | in_param 0 | <-- first_tagged_parameter_slot
1715 // +-------------------------+-----------------------------------------
1716 //
1717 // (*) Only if compiled by Liftoff and with --wasm-inlining.
1718
1719#if !V8_ENABLE_DRUMBRAKE
1721 isolate(), maybe_unauthenticated_pc());
1722 wasm::WasmCode* wasm_code = pair.first;
1723 SafepointEntry safepoint_entry = pair.second;
1724#else // !V8_ENABLE_DRUMBRAKE
1725 std::unique_ptr<DrumBrakeWasmCode> interpreter_wasm_code;
1726 SafepointEntry safepoint_entry;
1727 bool is_wasm_interpreter_frame =
1728 v8_flags.wasm_jitless &&
1729 (type() == WASM_INTERPRETER_ENTRY || type() == C_WASM_ENTRY);
1730 if (is_wasm_interpreter_frame) {
1731 interpreter_wasm_code = DrumBrakeWasmCode::Interpreted();
1732 } else {
1733 auto pair =
1735 wasm::WasmCode* wasm_code = pair.first;
1736 safepoint_entry = pair.second;
1737 DCHECK(wasm_code);
1738 interpreter_wasm_code = DrumBrakeWasmCode::Compiled(wasm_code);
1739 }
1740
1741 // Reuse the same name "wasm_code" for this variable, to use the
1742 // DrumBrakeWasmCode adapter and minimize merge issues in the following code.
1743 DrumBrakeWasmCode* wasm_code = interpreter_wasm_code.get();
1744#endif // !V8_ENABLE_DRUMBRAKE
1745
1746#ifdef DEBUG
1747 intptr_t marker =
1748 Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
1751 DCHECK(type == WASM_TO_JS || type == WASM || type == WASM_EXIT ||
1752 type == WASM_SEGMENT_START);
1753#endif
1754
1755 // Determine the fixed header and spill slot area size.
1756 // The last value in the frame header is the calling PC, which should
1757 // not be visited.
1758 static_assert(WasmExitFrameConstants::kFixedSlotCountFromFp ==
1759 WasmFrameConstants::kFixedSlotCountFromFp + 1,
1760 "WasmExitFrame has one slot more than WasmFrame");
1761
1762 int frame_header_size = WasmFrameConstants::kFixedFrameSizeFromFp;
1763 if (wasm_code->is_liftoff() && wasm_code->frame_has_feedback_slot()) {
1764 // Frame has Wasm feedback slot.
1765 frame_header_size += kSystemPointerSize;
1766 }
1767 int spill_slot_space =
1768 wasm_code->stack_slots() * kSystemPointerSize -
1770 // Fixed frame slots.
1771 FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
1772 FullObjectSlot frame_header_limit(
1773 &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
1774
1775 // Visit parameters passed to the callee.
1776 // Frame layout without stack switching (stack grows upwards):
1777 //
1778 // | callee |
1779 // | frame |
1780 // |-------------| <- sp()
1781 // | out params |
1782 // |-------------| <- frame_header_base - spill_slot_space
1783 // | spill slots |
1784 // |-------------| <- frame_header_base
1785 // | frame header|
1786 // |-------------| <- fp()
1787 //
1788 // With stack-switching:
1789 //
1790 // Secondary stack: Central stack:
1791 //
1792 // | callee |
1793 // | frame |
1794 // |------------| <- sp()
1795 // | out params |
1796 // |-------------| |------------| <- maybe_stack_switch.target_sp
1797 // | spill slots |
1798 // |-------------| <- frame_header_base
1799 // | frame header|
1800 // |-------------| <- fp()
1801 //
1802 // The base (lowest address) of the outgoing stack parameters area is always
1803 // sp(), and the limit (highest address) is either {frame_header_base -
1804 // spill_slot_space} or {maybe_stack_switch.target_sp} depending on
1805 // stack-switching.
1806 wasm::StackMemory::StackSwitchInfo maybe_stack_switch;
1807 if (iterator_->wasm_stack() != nullptr) {
1808 maybe_stack_switch = iterator_->wasm_stack()->stack_switch_info();
1809 }
1810 FullObjectSlot parameters_limit(
1811 maybe_stack_switch.has_value() && maybe_stack_switch.source_fp == fp()
1812 ? maybe_stack_switch.target_sp
1813 : frame_header_base.address() - spill_slot_space);
1814 FullObjectSlot spill_space_end =
1815 FullObjectSlot(frame_header_base.address() - spill_slot_space);
1816
1817 // Visit the rest of the parameters if they are tagged.
1818 bool has_tagged_outgoing_params =
1819 wasm_code->kind() != wasm::WasmCode::kWasmFunction &&
1820 wasm_code->kind() != wasm::WasmCode::kWasmToCapiWrapper;
1821 if (!InFastCCall() && has_tagged_outgoing_params) {
1822 FullObjectSlot parameters_base(&Memory<Address>(sp()));
1823 v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
1824 parameters_limit);
1825 }
1826
1827 // Visit pointer spill slots and locals.
1828 if (safepoint_entry.is_initialized()) {
1829 DCHECK_GE((wasm_code->stack_slots() + kBitsPerByte) / kBitsPerByte,
1830 safepoint_entry.tagged_slots().size());
1831 VisitSpillSlots(isolate(), v, spill_space_end,
1832 safepoint_entry.tagged_slots());
1833 }
1834
1835 // Visit tagged parameters that have been passed to the function of this
1836 // frame. Conceptionally these parameters belong to the parent frame. However,
1837 // the exact count is only known by this frame (in the presence of tail calls,
1838 // this information cannot be derived from the call site).
1839 if (wasm_code->num_tagged_parameter_slots() > 0) {
1840 FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp()));
1841 tagged_parameter_base += wasm_code->first_tagged_parameter_slot();
1842 FullObjectSlot tagged_parameter_limit =
1843 tagged_parameter_base + wasm_code->num_tagged_parameter_slots();
1844
1845 v->VisitRootPointers(Root::kStackRoots, nullptr, tagged_parameter_base,
1846 tagged_parameter_limit);
1847 }
1848
1849 // Visit the instance object.
1850 v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base,
1851 frame_header_limit);
1852}
1853
1854void TypedFrame::IterateParamsOfGenericWasmToJSWrapper(RootVisitor* v) const {
1855 Address maybe_sig =
1856 Memory<Address>(fp() + WasmToJSWrapperConstants::kSignatureOffset);
1857 if (maybe_sig == 0 || maybe_sig == static_cast<Address>(-1)) {
1858 // The signature slot was reset after processing all incoming parameters.
1859 // We don't have to keep them alive anymore.
1860 return;
1861 }
1862
1863 const wasm::CanonicalSig* sig =
1864 reinterpret_cast<wasm::CanonicalSig*>(maybe_sig);
1866 wasm::LinkageLocationAllocator allocator(wasm::kGpParamRegisters,
1868 // The first parameter is the instance data, which we don't have to scan. We
1869 // have to tell the LinkageLocationAllocator about it though.
1871
1872 // Parameters are separated into two groups (first all untagged, then all
1873 // tagged parameters). Therefore we first have to iterate over the signature
1874 // first to process all untagged parameters, and afterwards we can scan the
1875 // tagged parameters.
1876 bool has_tagged_param = false;
1877 for (wasm::CanonicalValueType type : sig->parameters()) {
1878 MachineRepresentation param = type.machine_representation();
1879 // Skip tagged parameters (e.g. any-ref).
1880 if (IsAnyTagged(param)) {
1881 has_tagged_param = true;
1882 continue;
1883 }
1885 allocator.Next(param);
1886 } else {
1887 allocator.Next(MachineRepresentation::kWord32);
1888 allocator.Next(MachineRepresentation::kWord32);
1889 }
1890 }
1891
1892 // End the untagged area, so tagged slots come after. This means, especially,
1893 // that tagged parameters should not fill holes in the untagged area.
1894 allocator.EndSlotArea();
1895
1896 if (!has_tagged_param) return;
1897
1898#if V8_TARGET_ARCH_ARM64
1899 constexpr size_t size_of_sig = 2;
1900#else
1901 constexpr size_t size_of_sig = 1;
1902#endif
1903
1904 for (wasm::CanonicalValueType type : sig->parameters()) {
1905 MachineRepresentation param = type.machine_representation();
1906 // Skip untagged parameters.
1907 if (!IsAnyTagged(param)) continue;
1908 LinkageLocation l = allocator.Next(param);
1909 if (l.IsRegister()) {
1910 // Calculate the slot offset.
1911 int slot_offset = 0;
1912 // We have to do a reverse lookup in the kGPParamRegisters array. This
1913 // can be optimized if necessary.
1914 for (size_t i = 1; i < arraysize(wasm::kGpParamRegisters); ++i) {
1915 if (wasm::kGpParamRegisters[i].code() == l.AsRegister()) {
1916 // The first register (the instance) does not get spilled.
1917 slot_offset = static_cast<int>(i) - 1;
1918 break;
1919 }
1920 }
1921 // Caller FP + return address + signature.
1922 size_t param_start_offset = 2 + size_of_sig;
1923 FullObjectSlot param_start(fp() +
1924 param_start_offset * kSystemPointerSize);
1925 FullObjectSlot tagged_slot = param_start + slot_offset;
1926 VisitSpillSlot(isolate(), v, tagged_slot);
1927 } else {
1928 // Caller frame slots have negative indices and start at -1. Flip it
1929 // back to a positive offset (to be added to the frame's FP to find the
1930 // slot).
1931 int slot_offset = -l.GetLocation() - 1;
1932 // Caller FP + return address + signature + spilled registers (without the
1933 // instance register).
1934 size_t slots_per_float64 = kDoubleSize / kSystemPointerSize;
1935 size_t param_start_offset =
1937 (arraysize(wasm::kFpParamRegisters) * slots_per_float64) + 2 +
1938 size_of_sig;
1939
1940 // The wasm-to-js wrapper pushes all but the first gp parameter register
1941 // on the stack, so if the number of gp parameter registers is even, this
1942 // means that the wrapper pushed an odd number. In that case, and when the
1943 // size of a double on the stack is two words, then there is an alignment
1944 // word between the pushed gp registers and the pushed fp registers, so
1945 // that the whole spill area is double-size aligned.
1946 if (arraysize(wasm::kGpParamRegisters) % 2 == (0) &&
1948 param_start_offset++;
1949 }
1950 FullObjectSlot param_start(fp() +
1951 param_start_offset * kSystemPointerSize);
1952 FullObjectSlot tagged_slot = param_start + slot_offset;
1953 VisitSpillSlot(isolate(), v, tagged_slot);
1954 }
1955 }
1956}
1957
1958void TypedFrame::IterateParamsOfOptimizedWasmToJSWrapper(RootVisitor* v) const {
1960 if (code->wasm_js_tagged_parameter_count() > 0) {
1961 FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp()));
1962 tagged_parameter_base += code->wasm_js_first_tagged_parameter();
1963 FullObjectSlot tagged_parameter_limit =
1964 tagged_parameter_base + code->wasm_js_tagged_parameter_count();
1965 v->VisitRootPointers(Root::kStackRoots, nullptr, tagged_parameter_base,
1966 tagged_parameter_limit);
1967 }
1968}
1969#endif // V8_ENABLE_WEBASSEMBLY
1970
1972 DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
1973
1974 // === TypedFrame ===
1975 // +-----------------+-----------------------------------------
1976 // | out_param n | <-- parameters_base / sp
1977 // | ... |
1978 // | out_param 0 |
1979 // +-----------------+-----------------------------------------
1980 // | spill_slot n | <-- parameters_limit ^
1981 // | ... | spill_slot_count
1982 // | spill_slot 0 | v
1983 // +-----------------+-----------------------------------------
1984 // | Type Marker | <-- frame_header_base ^
1985 // |- - - - - - - - -| |
1986 // | [Constant Pool] | |
1987 // |- - - - - - - - -| kFixedSlotCount
1988 // | saved frame ptr | <-- fp |
1989 // |- - - - - - - - -| |
1990 // | return addr | v
1991 // +-----------------+-----------------------------------------
1992
1993 // Find the code and compute the safepoint information.
1994 Address inner_pointer = pc();
1997 CHECK(entry->code.has_value());
1998 Tagged<GcSafeCode> code = entry->code.value();
1999#if V8_ENABLE_WEBASSEMBLY
2000 bool is_generic_wasm_to_js =
2001 code->is_builtin() && code->builtin_id() == Builtin::kWasmToJsWrapperCSA;
2002 bool is_optimized_wasm_to_js = this->type() == WASM_TO_JS_FUNCTION;
2003 if (is_generic_wasm_to_js) {
2005 } else if (is_optimized_wasm_to_js) {
2007 }
2008#endif // V8_ENABLE_WEBASSEMBLY
2009 DCHECK(code->is_turbofanned());
2010 SafepointEntry safepoint_entry =
2011 GetSafepointEntryFromCodeCache(isolate(), inner_pointer, entry);
2012
2013#ifdef DEBUG
2014 intptr_t marker =
2017#endif // DEBUG
2018
2019 // Determine the fixed header and spill slot area size.
2020 int frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
2021 int spill_slots_size =
2022 code->stack_slots() * kSystemPointerSize -
2024
2025 // Fixed frame slots.
2026 FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
2027 FullObjectSlot frame_header_limit(
2028 &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
2029 // Parameters passed to the callee.
2030#if V8_ENABLE_WEBASSEMBLY
2031 // Frame layout without stack switching (stack grows upwards):
2032 //
2033 // | callee |
2034 // | frame |
2035 // |-------------| <- sp()
2036 // | out params |
2037 // |-------------| <- frame_header_base - spill_slot_space
2038 // | spill slots |
2039 // |-------------| <- frame_header_base
2040 // | frame header|
2041 // |-------------| <- fp()
2042 //
2043 // With stack-switching:
2044 //
2045 // Secondary stack: Central stack:
2046 //
2047 // | callee |
2048 // | frame |
2049 // |------------| <- sp()
2050 // | out params |
2051 // |-------------| |------------| <- maybe_stack_switch.target_sp
2052 // | spill slots |
2053 // |-------------| <- frame_header_base
2054 // | frame header|
2055 // |-------------| <- fp()
2056 //
2057 // The base (lowest address) of the outgoing stack parameters area is always
2058 // sp(), and the limit (highest address) is either {frame_header_base -
2059 // spill_slot_size} or {maybe_stack_switch.target_sp} depending on
2060 // stack-switching.
2061 wasm::StackMemory::StackSwitchInfo maybe_stack_switch;
2062 if (iterator_->wasm_stack() != nullptr) {
2063 maybe_stack_switch = iterator_->wasm_stack()->stack_switch_info();
2064 }
2065 FullObjectSlot parameters_limit(
2066 maybe_stack_switch.has_value() && maybe_stack_switch.source_fp == fp()
2067 ? maybe_stack_switch.target_sp
2068 : frame_header_base.address() - spill_slots_size);
2069#else
2070 FullObjectSlot parameters_limit(frame_header_base.address() -
2071 spill_slots_size);
2072#endif
2073 FullObjectSlot parameters_base(&Memory<Address>(sp()));
2074 FullObjectSlot spill_slots_end(frame_header_base.address() -
2075 spill_slots_size);
2076
2077 // Visit the rest of the parameters.
2078 if (HasTaggedOutgoingParams(code)) {
2079 v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
2080 parameters_limit);
2081 }
2082
2083 // Visit pointer spill slots and locals.
2084 DCHECK_GE((code->stack_slots() + kBitsPerByte) / kBitsPerByte,
2085 safepoint_entry.tagged_slots().size());
2086 VisitSpillSlots(isolate(), v, spill_slots_end,
2087 safepoint_entry.tagged_slots());
2088
2089 // Visit fixed header region.
2090 v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base,
2091 frame_header_limit);
2092
2093 // Visit the return address in the callee and incoming arguments.
2094 IteratePc(v, constant_pool_address(), code);
2095}
2096
2098 DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
2099
2100 // === MaglevFrame ===
2101 // +-----------------+-----------------------------------------
2102 // | out_param n | <-- parameters_base / sp
2103 // | ... |
2104 // | out_param 0 |
2105 // +-----------------+-----------------------------------------
2106 // | pushed_double n | <-- parameters_limit ^
2107 // | ... | |
2108 // | pushed_double 0 | |
2109 // +- - - - - - - - -+ num_extra_spill_slots
2110 // | pushed_reg n | |
2111 // | ... | |
2112 // | pushed_reg 0 | <-- pushed_register_base v
2113 // +-----------------+-----------------------------------------
2114 // | untagged_slot n | ^
2115 // | ... | |
2116 // | untagged_slot 0 | |
2117 // +- - - - - - - - -+ spill_slot_count
2118 // | tagged_slot n | |
2119 // | ... | |
2120 // | tagged_slot 0 | v
2121 // +-----------------+-----------------------------------------
2122 // | argc | <-- frame_header_base ^
2123 // |- - - - - - - - -| |
2124 // | JSFunction | |
2125 // |- - - - - - - - -| |
2126 // | Context | |
2127 // |- - - - - - - - -| kFixedSlotCount
2128 // | [Constant Pool] | |
2129 // |- - - - - - - - -| |
2130 // | saved frame ptr | <-- fp |
2131 // |- - - - - - - - -| |
2132 // | return addr | v
2133 // +-----------------+-----------------------------------------
2134
2135 // Find the code and compute the safepoint information.
2136 Address inner_pointer = pc();
2139 CHECK(entry->code.has_value());
2140 Tagged<GcSafeCode> code = entry->code.value();
2141 DCHECK(code->is_maglevved());
2142 MaglevSafepointEntry maglev_safepoint_entry =
2143 GetMaglevSafepointEntryFromCodeCache(isolate(), inner_pointer, entry);
2144
2145#ifdef DEBUG
2146 // Assert that it is a JS frame and it has a context.
2147 intptr_t marker =
2150#endif // DEBUG
2151
2152 // Fixed frame slots.
2153 FullObjectSlot frame_header_base(
2155 FullObjectSlot frame_header_limit(
2156 &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
2157
2158 // Determine spill slot area count.
2159 uint32_t tagged_slot_count = maglev_safepoint_entry.num_tagged_slots();
2160 uint32_t spill_slot_count =
2161 code->stack_slots() - StandardFrameConstants::kFixedSlotCount;
2162
2163 // Visit the outgoing parameters if they are tagged.
2164 DCHECK(code->has_tagged_outgoing_params());
2165 FullObjectSlot parameters_base(&Memory<Address>(sp()));
2166 FullObjectSlot parameters_limit =
2167 frame_header_base - spill_slot_count -
2168 maglev_safepoint_entry.num_extra_spill_slots();
2169 v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
2170 parameters_limit);
2171
2172 // Maglev can also spill registers, tagged and untagged, just before making
2173 // a call. These are distinct from normal spill slots and live between the
2174 // normal spill slots and the pushed parameters. Some of these are tagged,
2175 // as indicated by the tagged register indexes, and should be visited too.
2176 if (maglev_safepoint_entry.num_extra_spill_slots() > 0) {
2177 FullObjectSlot pushed_register_base =
2178 frame_header_base - spill_slot_count - 1;
2179 uint32_t tagged_register_indexes =
2180 maglev_safepoint_entry.tagged_register_indexes();
2181 while (tagged_register_indexes != 0) {
2182 int index = base::bits::CountTrailingZeros(tagged_register_indexes);
2183 tagged_register_indexes &= ~(1 << index);
2184 FullObjectSlot spill_slot = pushed_register_base - index;
2185 VisitSpillSlot(isolate(), v, spill_slot);
2186 }
2187 }
2188
2189 // Visit tagged spill slots.
2190 for (uint32_t i = 0; i < tagged_slot_count; ++i) {
2191 FullObjectSlot spill_slot = frame_header_base - 1 - i;
2192 VisitSpillSlot(isolate(), v, spill_slot);
2193 }
2194
2195 // Visit fixed header region (the context and JSFunction), skipping the
2196 // argument count since it is stored untagged.
2197 v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base + 1,
2198 frame_header_limit);
2199
2200 // Visit the return address in the callee and incoming arguments.
2201 IteratePc(v, constant_pool_address(), code);
2202}
2203
2205 return Summarize().frames.back().AsJavaScript().function();
2206}
2207
2209 int deopt_index = SafepointEntry::kNoDeoptIndex;
2210 Tagged<Code> code = LookupCode();
2211 const Tagged<DeoptimizationData> data =
2212 GetDeoptimizationData(code, &deopt_index);
2213 if (deopt_index == SafepointEntry::kNoDeoptIndex) {
2214 CHECK(data.is_null());
2215 FATAL(
2216 "Missing deoptimization information for OptimizedJSFrame::Summarize.");
2217 }
2218
2220 data->FrameTranslation(), data->TranslationIndex(deopt_index).value());
2221 // Search the innermost interpreter frame and get its bailout id. The
2222 // translation stores frames bottom up.
2223 int js_frames = it.EnterBeginOpcode().js_frame_count;
2224 DCHECK_GT(js_frames, 0);
2226 while (js_frames > 0) {
2227 TranslationOpcode frame = it.SeekNextJSFrame();
2228 --js_frames;
2230 offset = BytecodeOffset(it.NextOperand());
2231 it.SkipOperands(TranslationOpcodeOperandCount(frame) - 1);
2232 } else {
2233 it.SkipOperands(TranslationOpcodeOperandCount(frame));
2234 }
2235 }
2236
2237 return offset;
2238}
2239
2241 Tagged<GcSafeCode> code_lookup) const {
2242#if V8_ENABLE_WEBASSEMBLY
2243 // With inlined JS-to-Wasm calls, we can be in an OptimizedJSFrame and
2244 // directly call a Wasm function from JavaScript. In this case the Wasm frame
2245 // is responsible for visiting incoming potentially tagged parameters.
2246 // (This is required for tail-call support: If the direct callee tail-called
2247 // another function which then caused a GC, the caller would not be able to
2248 // determine where there might be tagged parameters.)
2249 wasm::WasmCode* wasm_callee =
2251 if (wasm_callee) return false;
2252
2253 Tagged<Code> wrapper =
2254 isolate()->builtins()->code(Builtin::kWasmToJsWrapperCSA);
2255 if (callee_pc() >= wrapper->instruction_start() &&
2256 callee_pc() <= wrapper->instruction_end()) {
2257 return false;
2258 }
2259 return code_lookup->has_tagged_outgoing_params();
2260#else
2261 return code_lookup->has_tagged_outgoing_params();
2262#endif // V8_ENABLE_WEBASSEMBLY
2263}
2264
2266 std::optional<Tagged<GcSafeCode>> code_lookup =
2268 if (!code_lookup.has_value()) return {};
2269 return code_lookup.value();
2270}
2271
2273 DCHECK(!iterator_->IsStackFrameIteratorForProfiler());
2274
2275 // === TurbofanJSFrame ===
2276 // +-----------------+-----------------------------------------
2277 // | out_param n | <-- parameters_base / sp
2278 // | ... |
2279 // | out_param 0 |
2280 // +-----------------+-----------------------------------------
2281 // | spill_slot n | <-- parameters_limit ^
2282 // | ... | spill_slot_count
2283 // | spill_slot 0 | v
2284 // +-----------------+-----------------------------------------
2285 // | argc | <-- frame_header_base ^
2286 // |- - - - - - - - -| |
2287 // | JSFunction | |
2288 // |- - - - - - - - -| |
2289 // | Context | |
2290 // |- - - - - - - - -| kFixedSlotCount
2291 // | [Constant Pool] | |
2292 // |- - - - - - - - -| |
2293 // | saved frame ptr | <-- fp |
2294 // |- - - - - - - - -| |
2295 // | return addr | v
2296 // +-----------------+-----------------------------------------
2297
2298 // Find the code and compute the safepoint information.
2299 const Address inner_pointer = maybe_unauthenticated_pc();
2302 CHECK(entry->code.has_value());
2303 Tagged<GcSafeCode> code = entry->code.value();
2304 DCHECK(code->is_turbofanned());
2305 SafepointEntry safepoint_entry =
2306 GetSafepointEntryFromCodeCache(isolate(), inner_pointer, entry);
2307
2308#ifdef DEBUG
2309 // Assert that it is a JS frame and it has a context.
2310 intptr_t marker =
2313#endif // DEBUG
2314
2315 // Determine the fixed header and spill slot area size.
2316 int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
2317 int spill_slot_count =
2318 code->stack_slots() - StandardFrameConstants::kFixedSlotCount;
2319
2320 // Fixed frame slots.
2321 FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
2322 FullObjectSlot frame_header_limit(
2323 &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
2324
2325 FullObjectSlot parameters_limit = frame_header_base - spill_slot_count;
2326
2327 if (!InFastCCall()) {
2328 // Parameters passed to the callee.
2329 FullObjectSlot parameters_base(&Memory<Address>(sp()));
2330
2331 // Visit the outgoing parameters if they are tagged.
2332 if (HasTaggedOutgoingParams(code)) {
2333 v->VisitRootPointers(Root::kStackRoots, nullptr, parameters_base,
2334 parameters_limit);
2335 }
2336 } else {
2337 // There are no outgoing parameters to visit for fast C calls.
2338 }
2339
2340 // Spill slots are in the region ]frame_header_base, parameters_limit];
2341 // Visit pointer spill slots and locals.
2342 DCHECK_GE((code->stack_slots() + kBitsPerByte) / kBitsPerByte,
2343 safepoint_entry.tagged_slots().size());
2344 VisitSpillSlots(isolate(), v, parameters_limit,
2345 safepoint_entry.tagged_slots());
2346
2347 // Visit fixed header region (the context and JSFunction), skipping the
2348 // argument count since it is stored untagged.
2349 v->VisitRootPointers(Root::kStackRoots, nullptr, frame_header_base + 1,
2350 frame_header_limit);
2351
2352 // Visit the return address in the callee and incoming arguments.
2353 IteratePc(v, constant_pool_address(), code);
2354}
2355
2359
2363
2365 std::optional<Tagged<GcSafeCode>> code_lookup =
2367 if (!code_lookup.has_value()) return {};
2368 return code_lookup.value();
2369}
2370
2373 int pc_offset = -1;
2374 std::tie(code, pc_offset) = LookupCodeAndOffset();
2375 DCHECK(code->is_turbofanned());
2376 DCHECK(code->has_handler_table());
2377 HandlerTable table(code);
2378 return table.LookupReturn(pc_offset);
2379}
2380
2382 FrameSummaries summaries;
2383#if V8_ENABLE_WEBASSEMBLY
2384 Tagged<Code> code = LookupCode();
2385 if (code->kind() != CodeKind::BUILTIN) return summaries;
2386 // We skip most stub frames from stack traces, but a few builtins
2387 // specifically exist to pretend to be another builtin throwing an
2388 // exception.
2389 switch (code->builtin_id()) {
2390 case Builtin::kThrowDataViewTypeError:
2391 case Builtin::kThrowDataViewDetachedError:
2392 case Builtin::kThrowDataViewOutOfBounds:
2393 case Builtin::kThrowIndexOfCalledOnNull:
2394 case Builtin::kThrowToLowerCaseCalledOnNull:
2395 case Builtin::kWasmIntToString: {
2396 // When adding builtins here, also implement naming support for them.
2397 DCHECK_NE(nullptr,
2398 Builtins::NameForStackTrace(isolate(), code->builtin_id()));
2399 FrameSummary::BuiltinFrameSummary summary(isolate(), code->builtin_id());
2400 summaries.frames.push_back(summary);
2401 break;
2402 }
2403 default:
2404 break;
2405 }
2406#endif // V8_ENABLE_WEBASSEMBLY
2407 return summaries;
2408}
2409
2411 Memory<Address>(GetParameterSlot(index)) = value.ptr();
2412}
2413
2415 return IsConstructFrame(caller_fp());
2416}
2417
2421
2423 if (GcSafeLookupCode()->kind() == CodeKind::BUILTIN) {
2424 return static_cast<int>(
2425 Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset)) -
2427 } else {
2429 }
2430}
2431
2435
2437 std::vector<Tagged<SharedFunctionInfo>>* functions) const {
2438 DCHECK(functions->empty());
2439 functions->push_back(function()->shared());
2440}
2441
2443 std::vector<Handle<SharedFunctionInfo>>* functions) const {
2444 DCHECK(functions->empty());
2445 std::vector<Tagged<SharedFunctionInfo>> raw_functions;
2446 GetFunctions(&raw_functions);
2447 for (const auto& raw_function : raw_functions) {
2448 functions->push_back(
2449 Handle<SharedFunctionInfo>(raw_function, function()->GetIsolate()));
2450 }
2451}
2452
2453std::tuple<Tagged<AbstractCode>, int> JavaScriptFrame::GetActiveCodeAndOffset()
2454 const {
2455 int code_offset = 0;
2456 Tagged<AbstractCode> abstract_code;
2457 if (is_interpreted()) {
2458 const InterpretedFrame* iframe = InterpretedFrame::cast(this);
2459 code_offset = iframe->GetBytecodeOffset();
2460 abstract_code = Cast<AbstractCode>(iframe->GetBytecodeArray());
2461 } else if (is_baseline()) {
2462 // TODO(pthier): AbstractCode should fully support Baseline code.
2463 const BaselineFrame* baseline_frame = BaselineFrame::cast(this);
2464 code_offset = baseline_frame->GetBytecodeOffset();
2465 abstract_code = Cast<AbstractCode>(baseline_frame->GetBytecodeArray());
2466 } else {
2468 int pc_offset = -1;
2469 std::tie(code, pc_offset) = LookupCodeAndOffset();
2470 abstract_code = Cast<AbstractCode>(code);
2471 }
2472 return {abstract_code, code_offset};
2473}
2474
2478
2481 int offset = -1;
2482 std::tie(code, offset) = GcSafeLookupCodeAndOffset();
2483 DirectHandle<AbstractCode> abstract_code(
2484 Cast<AbstractCode>(code->UnsafeCastToCode()), isolate());
2487 isolate(), receiver(), function(), *abstract_code, offset,
2488 IsConstructor(), *params);
2489 return FrameSummaries(summary);
2490}
2491
2495
2497 // During deoptimization of an optimized function, we may have yet to
2498 // materialize some closures on the stack. The arguments marker object
2499 // marks this case.
2500 DCHECK(IsJSFunction(function_slot_object()) ||
2501 ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object());
2502 return function_slot_object();
2503}
2504
2506 // TODO(cbruni): document this better
2507 return GetParameter(-1);
2508}
2509
2512 Tagged<Object> maybe_result(Memory<Address>(fp() + offset));
2513 DCHECK(!IsSmi(maybe_result));
2514 return maybe_result;
2515}
2516
2520
2522 int* stack_depth, HandlerTable::CatchPrediction* prediction) {
2523 if (DEBUG_BOOL) {
2524 Tagged<Code> code_lookup_result = LookupCode();
2525 CHECK(!code_lookup_result->has_handler_table());
2526 CHECK(!code_lookup_result->is_optimized_code() ||
2527 code_lookup_result->kind() == CodeKind::BASELINE);
2528 }
2529 return -1;
2530}
2531
2533 Tagged<JSFunction> function,
2535 int code_offset, FILE* file,
2536 bool print_line_number) {
2537 PtrComprCageBase cage_base = GetPtrComprCageBase(function);
2538 PrintF(file, "%s", CodeKindToMarker(code->kind(cage_base), false));
2539 function->PrintName(file);
2540 PrintF(file, "+%d", code_offset);
2541 if (print_line_number) {
2542 Tagged<SharedFunctionInfo> shared = function->shared();
2543 int source_pos = code->SourcePosition(isolate, code_offset);
2544 Tagged<Object> maybe_script = shared->script();
2545 if (IsScript(maybe_script)) {
2546 Tagged<Script> script = Cast<Script>(maybe_script);
2547 int line = script->GetLineNumber(source_pos) + 1;
2548 Tagged<Object> script_name_raw = script->name();
2549 if (IsString(script_name_raw)) {
2550 Tagged<String> script_name = Cast<String>(script->name());
2551 std::unique_ptr<char[]> c_script_name = script_name->ToCString();
2552 PrintF(file, " at %s:%d", c_script_name.get(), line);
2553 } else {
2554 PrintF(file, " at <unknown>:%d", line);
2555 }
2556 } else {
2557 PrintF(file, " at <unknown>:<unknown>");
2558 }
2559 }
2560}
2561
2562void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
2563 bool print_line_number) {
2564 // constructor calls
2566 JavaScriptStackFrameIterator it(isolate);
2567 while (!it.done()) {
2568 if (it.frame()->is_javascript()) {
2569 JavaScriptFrame* frame = it.frame();
2570 if (frame->IsConstructor()) PrintF(file, "new ");
2571 Tagged<JSFunction> function = frame->function();
2572 int code_offset = 0;
2574 std::tie(code, code_offset) = frame->GetActiveCodeAndOffset();
2575 PrintFunctionAndOffset(isolate, function, code, code_offset, file,
2576 print_line_number);
2577 if (print_args) {
2578 // function arguments
2579 // (we are intentionally only printing the actually
2580 // supplied parameters, not all parameters required)
2581 PrintF(file, "(this=");
2582 ShortPrint(frame->receiver(), file);
2583 const int length = frame->ComputeParametersCount();
2584 for (int i = 0; i < length; i++) {
2585 PrintF(file, ", ");
2586 ShortPrint(frame->GetParameter(i), file);
2587 }
2588 PrintF(file, ")");
2589 }
2590 break;
2591 }
2592 it.Advance();
2593 }
2594}
2595
2596// static
2598 Isolate* isolate, Tagged<JSFunction> function, Tagged<AbstractCode> code,
2599 int code_offset) {
2600 auto ic_stats = ICStats::instance();
2601 ICInfo& ic_info = ic_stats->Current();
2602 PtrComprCageBase cage_base = GetPtrComprCageBase(function);
2603 Tagged<SharedFunctionInfo> shared = function->shared(cage_base);
2604
2605 ic_info.function_name = ic_stats->GetOrCacheFunctionName(isolate, function);
2606 ic_info.script_offset = code_offset;
2607
2608 int source_pos = code->SourcePosition(isolate, code_offset);
2609 Tagged<Object> maybe_script = shared->script(cage_base, kAcquireLoad);
2610 if (IsScript(maybe_script, cage_base)) {
2611 Tagged<Script> script = Cast<Script>(maybe_script);
2613 script->GetPositionInfo(source_pos, &info);
2614 ic_info.line_num = info.line + 1;
2615 ic_info.column_num = info.column + 1;
2616 ic_info.script_name = ic_stats->GetOrCacheScriptName(script);
2617 }
2618}
2619
2621 return Tagged<Object>(Memory<Address>(GetParameterSlot(index)));
2622}
2623
2625 DCHECK(!iterator_->IsStackFrameIteratorForProfiler() &&
2626 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
2627 return function()
2628 ->shared()
2629 ->internal_formal_parameter_count_without_receiver();
2630}
2631
2633 return static_cast<int>(
2634 Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset)) -
2636}
2637
2639 if (V8_LIKELY(!v8_flags.detailed_error_stack_trace)) {
2640 return isolate()->factory()->empty_fixed_array();
2641 }
2642 int param_count = ComputeParametersCount();
2643 DirectHandle<FixedArray> parameters =
2644 isolate()->factory()->NewFixedArray(param_count);
2645 for (int i = 0; i < param_count; i++) {
2646 parameters->set(i, GetParameter(i));
2647 }
2648
2649 return parameters;
2650}
2651
2656
2658 // Assert that the first allocatable register is also the argument count
2659 // register.
2660 DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0),
2662 Tagged<Object> argc_object(
2664 return Smi::ToInt(argc_object) - kJSArgcReceiverSlots;
2665}
2666
2668 Address height_slot =
2670 intptr_t height = Smi::ToInt(Tagged<Smi>(Memory<Address>(height_slot)));
2671 return height;
2672}
2673
2678
2680 Tagged<Object> exception) {
2681 int argc = ComputeParametersCount();
2682 Address exception_argument_slot =
2684 (argc - 1) * kSystemPointerSize;
2685
2686 // Only allow setting exception if previous value was the hole.
2687 CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(),
2688 Tagged<Object>(Memory<Address>(exception_argument_slot)));
2689 Memory<Address>(exception_argument_slot) = exception.ptr();
2690}
2691
2694 Tagged<AbstractCode> abstract_code, int code_offset, bool is_constructor,
2695 Tagged<FixedArray> parameters)
2697 receiver_(receiver, isolate),
2698 function_(function, isolate),
2699 abstract_code_(abstract_code, isolate),
2700 code_offset_(code_offset),
2701 is_constructor_(is_constructor),
2702 parameters_(parameters, isolate) {
2704 // It might be an ApiCallbackBuiltin inlined into optimized
2705 // code generated by Maglev.
2706 (v8_flags.maglev_inline_api_calls &&
2707 abstract_code->kind(isolate) == CodeKind::MAGLEV &&
2708 function->shared()->IsApiFunction()));
2709}
2710
2712 if (IsJavaScript()) {
2713 javascript_summary_.EnsureSourcePositionsAvailable();
2714 }
2715}
2716
2718 if (IsJavaScript()) {
2719 return javascript_summary_.AreSourcePositionsAvailable();
2720 }
2721 return true;
2722}
2723
2728
2730 return !v8_flags.enable_lazy_source_positions ||
2731 function()
2732 ->shared()
2733 ->GetBytecodeArray(isolate())
2734 ->HasSourcePositionTable();
2735}
2736
2738 return function()->shared()->IsSubjectToDebugging();
2739}
2740
2742 return abstract_code()->SourcePosition(isolate(), code_offset());
2743}
2744
2746 return abstract_code()->SourceStatementPosition(isolate(), code_offset());
2747}
2748
2750 return handle(function_->shared()->script(), isolate());
2751}
2752
2754 const {
2755 return direct_handle(function_->native_context(), isolate());
2756}
2757
2761 DirectHandle<Script> script(Cast<Script>(shared->script()), isolate());
2763 if (function_name->length() == 0 &&
2764 script->compilation_type() == Script::CompilationType::kEval) {
2765 function_name = isolate()->factory()->eval_string();
2766 }
2767 int bytecode_offset = code_offset();
2768 if (bytecode_offset == kFunctionEntryBytecodeOffset) {
2769 // For the special function entry bytecode offset (-1), which signals
2770 // that the stack trace was captured while the function entry was
2771 // executing (i.e. during the interrupt check), we cannot store this
2772 // sentinel in the bit field, so we just eagerly lookup the source
2773 // position within the script.
2775 int source_position =
2776 abstract_code()->SourcePosition(isolate(), bytecode_offset);
2777 return isolate()->factory()->NewStackFrameInfo(
2778 script, source_position, function_name, is_constructor());
2779 }
2780 return isolate()->factory()->NewStackFrameInfo(
2781 shared, bytecode_offset, function_name, is_constructor());
2782}
2783
2784#if V8_ENABLE_WEBASSEMBLY
2785FrameSummary::WasmFrameSummary::WasmFrameSummary(
2787 wasm::WasmCode* code, int byte_offset, int function_index,
2788 bool at_to_number_conversion)
2789 : FrameSummaryBase(isolate, WASM),
2790 instance_data_(instance_data),
2791 at_to_number_conversion_(at_to_number_conversion),
2792 code_(code),
2793 byte_offset_(byte_offset),
2794 function_index_(function_index) {}
2795
2796Handle<Object> FrameSummary::WasmFrameSummary::receiver() const {
2797 return isolate()->global_proxy();
2798}
2799
2800uint32_t FrameSummary::WasmFrameSummary::function_index() const {
2801 return function_index_;
2802}
2803
2804int FrameSummary::WasmFrameSummary::SourcePosition() const {
2805 const wasm::WasmModule* module = wasm_trusted_instance_data()->module();
2806 return GetSourcePosition(module, function_index(), code_offset(),
2807 at_to_number_conversion());
2808}
2809
2810Handle<Script> FrameSummary::WasmFrameSummary::script() const {
2811 return handle(wasm_instance()->module_object()->script(), isolate());
2812}
2813
2814DirectHandle<WasmInstanceObject> FrameSummary::WasmFrameSummary::wasm_instance()
2815 const {
2816 // TODO(42204563): Avoid crashing if the instance object is not available.
2817 CHECK(instance_data_->has_instance_object());
2818 return direct_handle(instance_data_->instance_object(), isolate());
2819}
2820
2821DirectHandle<Context> FrameSummary::WasmFrameSummary::native_context() const {
2822 return direct_handle(wasm_trusted_instance_data()->native_context(),
2823 isolate());
2824}
2825
2826DirectHandle<StackFrameInfo>
2827FrameSummary::WasmFrameSummary::CreateStackFrameInfo() const {
2828 DirectHandle<String> function_name =
2829 GetWasmFunctionDebugName(isolate(), instance_data_, function_index());
2830 return isolate()->factory()->NewStackFrameInfo(script(), SourcePosition(),
2831 function_name, false);
2832}
2833
2834FrameSummary::WasmInlinedFrameSummary::WasmInlinedFrameSummary(
2836 int function_index, int op_wire_bytes_offset)
2837 : FrameSummaryBase(isolate, WASM_INLINED),
2838 instance_data_(instance_data),
2839 function_index_(function_index),
2840 op_wire_bytes_offset_(op_wire_bytes_offset) {}
2841
2842DirectHandle<WasmInstanceObject>
2843FrameSummary::WasmInlinedFrameSummary::wasm_instance() const {
2844 // TODO(42204563): Avoid crashing if the instance object is not available.
2845 CHECK(instance_data_->has_instance_object());
2846 return direct_handle(instance_data_->instance_object(), isolate());
2847}
2848
2849Handle<Object> FrameSummary::WasmInlinedFrameSummary::receiver() const {
2850 return isolate()->global_proxy();
2851}
2852
2853uint32_t FrameSummary::WasmInlinedFrameSummary::function_index() const {
2854 return function_index_;
2855}
2856
2857int FrameSummary::WasmInlinedFrameSummary::SourcePosition() const {
2858 const wasm::WasmModule* module = instance_data_->module();
2859 return GetSourcePosition(module, function_index(), code_offset(), false);
2860}
2861
2862Handle<Script> FrameSummary::WasmInlinedFrameSummary::script() const {
2863 return handle(wasm_instance()->module_object()->script(), isolate());
2864}
2865
2866DirectHandle<Context> FrameSummary::WasmInlinedFrameSummary::native_context()
2867 const {
2868 return direct_handle(wasm_trusted_instance_data()->native_context(),
2869 isolate());
2870}
2871
2872DirectHandle<StackFrameInfo>
2873FrameSummary::WasmInlinedFrameSummary::CreateStackFrameInfo() const {
2874 DirectHandle<String> function_name =
2875 GetWasmFunctionDebugName(isolate(), instance_data_, function_index());
2876 return isolate()->factory()->NewStackFrameInfo(script(), SourcePosition(),
2877 function_name, false);
2878}
2879
2880#if V8_ENABLE_DRUMBRAKE
2881FrameSummary::WasmInterpretedFrameSummary::WasmInterpretedFrameSummary(
2882 Isolate* isolate, Handle<WasmInstanceObject> instance,
2883 uint32_t function_index, int byte_offset)
2884 : FrameSummaryBase(isolate, WASM_INTERPRETED),
2885 wasm_instance_(instance),
2886 function_index_(function_index),
2887 byte_offset_(byte_offset) {}
2888
2889Handle<Object> FrameSummary::WasmInterpretedFrameSummary::receiver() const {
2890 return wasm_instance_->GetIsolate()->global_proxy();
2891}
2892
2893int FrameSummary::WasmInterpretedFrameSummary::SourcePosition() const {
2894 const wasm::WasmModule* module = wasm_instance()->module_object()->module();
2895 return GetSourcePosition(module, function_index(), byte_offset(),
2896 false /*at_to_number_conversion*/);
2897}
2898
2900FrameSummary::WasmInterpretedFrameSummary::instance_data() const {
2901 return handle(wasm_instance_->trusted_data(isolate()), isolate());
2902}
2903
2904Handle<Script> FrameSummary::WasmInterpretedFrameSummary::script() const {
2905 return handle(wasm_instance()->module_object()->script(),
2906 wasm_instance()->GetIsolate());
2907}
2908
2909DirectHandle<Context>
2910FrameSummary::WasmInterpretedFrameSummary::native_context() const {
2911 return handle(wasm_instance_->trusted_data(isolate())->native_context(),
2912 isolate());
2913}
2914
2915DirectHandle<StackFrameInfo>
2916FrameSummary::WasmInterpretedFrameSummary::CreateStackFrameInfo() const {
2917 DirectHandle<String> function_name =
2918 GetWasmFunctionDebugName(isolate(), instance_data(), function_index());
2919 return isolate()->factory()->NewStackFrameInfo(script(), SourcePosition(),
2920 function_name, false);
2921}
2922#endif // V8_ENABLE_DRUMBRAKE
2923
2924FrameSummary::BuiltinFrameSummary::BuiltinFrameSummary(Isolate* isolate,
2925 Builtin builtin)
2926 : FrameSummaryBase(isolate, FrameSummary::BUILTIN), builtin_(builtin) {}
2927
2928Handle<Object> FrameSummary::BuiltinFrameSummary::receiver() const {
2929 return isolate()->factory()->undefined_value();
2930}
2931
2932Handle<Object> FrameSummary::BuiltinFrameSummary::script() const {
2933 return isolate()->factory()->undefined_value();
2934}
2935
2936DirectHandle<Context> FrameSummary::BuiltinFrameSummary::native_context()
2937 const {
2938 return isolate()->native_context();
2939}
2940
2941DirectHandle<StackFrameInfo>
2942FrameSummary::BuiltinFrameSummary::CreateStackFrameInfo() const {
2943 DirectHandle<String> name_str =
2944 isolate()->factory()->NewStringFromAsciiChecked(
2946 return isolate()->factory()->NewStackFrameInfo(
2947 Cast<Script>(script()), SourcePosition(), name_str, false);
2948}
2949
2950#endif // V8_ENABLE_WEBASSEMBLY
2951
2953#define FRAME_SUMMARY_DESTR(kind, type, field, desc) \
2954 case kind: \
2955 field.~type(); \
2956 break;
2957 switch (base_.kind()) {
2959 default:
2960 UNREACHABLE();
2961 }
2962#undef FRAME_SUMMARY_DESTR
2963}
2964
2966 FrameSummaries summaries = frame->Summarize();
2967 DCHECK_LT(0, summaries.size());
2968 return summaries.frames.back();
2969}
2970
2972 return Get(frame, 0);
2973}
2974
2976 FrameSummaries summaries = frame->Summarize();
2977 DCHECK_EQ(1, summaries.size());
2978 return summaries.frames.front();
2979}
2980
2982 DCHECK_LE(0, index);
2983 FrameSummaries summaries = frame->Summarize();
2984 DCHECK_GT(summaries.size(), index);
2985 return summaries.frames[index];
2986}
2987
2988#if V8_ENABLE_WEBASSEMBLY
2989#ifdef V8_ENABLE_DRUMBRAKE
2990#define CASE_WASM_INTERPRETED(name) \
2991 case WASM_INTERPRETED: \
2992 return wasm_interpreted_summary_.name();
2993#else // V8_ENABLE_DRUMBRAKE
2994#define CASE_WASM_INTERPRETED(name)
2995#endif // V8_ENABLE_DRUMBRAKE
2996#define FRAME_SUMMARY_DISPATCH(ret, name) \
2997 ret FrameSummary::name() const { \
2998 switch (base_.kind()) { \
2999 case JAVASCRIPT: \
3000 return javascript_summary_.name(); \
3001 case WASM: \
3002 return wasm_summary_.name(); \
3003 case WASM_INLINED: \
3004 return wasm_inlined_summary_.name(); \
3005 case BUILTIN: \
3006 return builtin_summary_.name(); \
3007 CASE_WASM_INTERPRETED(name) \
3008 default: \
3009 UNREACHABLE(); \
3010 } \
3011 }
3012#else
3013#define FRAME_SUMMARY_DISPATCH(ret, name) \
3014 ret FrameSummary::name() const { \
3015 DCHECK_EQ(JAVASCRIPT, base_.kind()); \
3016 return javascript_summary_.name(); \
3017 }
3018#endif // V8_ENABLE_WEBASSEMBLY
3019
3020FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver)
3024FRAME_SUMMARY_DISPATCH(Handle<Object>, script)
3027FRAME_SUMMARY_DISPATCH(DirectHandle<Context>, native_context)
3028FRAME_SUMMARY_DISPATCH(DirectHandle<StackFrameInfo>, CreateStackFrameInfo)
3029
3030#undef CASE_WASM_INTERPRETED
3031#undef FRAME_SUMMARY_DISPATCH
3032
3034 DCHECK(is_optimized());
3035 FrameSummaries summaries;
3036
3037 // Delegate to JS frame in absence of deoptimization info.
3038 // TODO(turbofan): Revisit once we support deoptimization across the board.
3039 DirectHandle<Code> code(LookupCode(), isolate());
3040 if (code->kind() == CodeKind::BUILTIN) {
3042 }
3043
3044 int deopt_index = SafepointEntry::kNoDeoptIndex;
3045 Tagged<DeoptimizationData> const data =
3046 GetDeoptimizationData(*code, &deopt_index);
3047 if (deopt_index == SafepointEntry::kNoDeoptIndex) {
3048 // Hack: For maglevved function entry, we don't emit lazy deopt information,
3049 // so create an extra special summary here.
3050 //
3051 // TODO(leszeks): Remove this hack, by having a maglev-specific frame
3052 // summary which is a bit more aware of maglev behaviour and can e.g. handle
3053 // more compact safepointed frame information for both function entry and
3054 // loop stack checks.
3055 if (code->is_maglevved()) {
3056 DirectHandle<AbstractCode> abstract_code(
3057 Cast<AbstractCode>(function()->shared()->GetBytecodeArray(isolate())),
3058 isolate());
3059 DirectHandle<FixedArray> params = GetParameters();
3061 isolate(), receiver(), function(), *abstract_code,
3062 kFunctionEntryBytecodeOffset, IsConstructor(), *params);
3063 summaries.frames.push_back(summary);
3064 return summaries;
3065 }
3066
3067 CHECK(data.is_null());
3068 FATAL(
3069 "Missing deoptimization information for OptimizedJSFrame::Summarize.");
3070 }
3071
3072 // Prepare iteration over translation. We must not materialize values here
3073 // because we do not deoptimize the function.
3074 TranslatedState translated(this);
3075 translated.Prepare(fp());
3076
3077 // We create the summary in reverse order because the frames
3078 // in the deoptimization translation are ordered bottom-to-top.
3079 bool is_constructor = IsConstructor();
3080 for (auto it = translated.begin(); it != translated.end(); it++) {
3081 if (it->kind() == TranslatedFrame::kUnoptimizedFunction ||
3083 it->kind() ==
3085 DirectHandle<SharedFunctionInfo> shared_info = it->shared_info();
3086
3087 // The translation commands are ordered and the function is always
3088 // at the first position, and the receiver is next.
3089 TranslatedFrame::iterator translated_values = it->begin();
3090
3091 // Get the correct function in the optimized frame.
3092 CHECK(!translated_values->IsMaterializedObject());
3093 DirectHandle<JSFunction> function =
3094 Cast<JSFunction>(translated_values->GetValue());
3095 translated_values++;
3096
3097 // Get the correct receiver in the optimized frame.
3098 CHECK(!translated_values->IsMaterializedObject());
3099 DirectHandle<Object> receiver = translated_values->GetValue();
3100 translated_values++;
3101
3102 // Determine the underlying code object and the position within it from
3103 // the translation corresponding to the frame type in question.
3104 DirectHandle<AbstractCode> abstract_code;
3105 unsigned code_offset;
3107 it->kind() ==
3109 code_offset = 0;
3110 abstract_code = Cast<AbstractCode>(isolate()->builtins()->code_handle(
3111 Builtins::GetBuiltinFromBytecodeOffset(it->bytecode_offset())));
3112 } else {
3114 code_offset = it->bytecode_offset().ToInt();
3115 abstract_code =
3116 direct_handle(shared_info->abstract_code(isolate()), isolate());
3117 }
3118
3119 // Append full summary of the encountered JS frame.
3120 DirectHandle<FixedArray> params = GetParameters();
3122 isolate(), *receiver, *function, *abstract_code, code_offset,
3123 is_constructor, *params);
3124 summaries.frames.push_back(summary);
3125 is_constructor = false;
3126 } else if (it->kind() == TranslatedFrame::kConstructCreateStub ||
3128 // The next encountered JS frame will be marked as a constructor call.
3130 is_constructor = true;
3131#if V8_ENABLE_WEBASSEMBLY
3132 } else if (it->kind() == TranslatedFrame::kWasmInlinedIntoJS) {
3133 DirectHandle<SharedFunctionInfo> shared_info = it->shared_info();
3134 DCHECK_NE(isolate()->heap()->gc_state(), Heap::MARK_COMPACT);
3135
3136 Tagged<WasmExportedFunctionData> function_data =
3137 shared_info->wasm_exported_function_data();
3138 Handle<WasmTrustedInstanceData> instance{function_data->instance_data(),
3139 isolate()};
3140 int func_index = function_data->function_index();
3141 FrameSummary::WasmInlinedFrameSummary summary(
3142 isolate(), instance, func_index, it->bytecode_offset().ToInt());
3143 summaries.frames.push_back(summary);
3144#endif // V8_ENABLE_WEBASSEMBLY
3145 }
3146 }
3147 if (is_constructor) {
3148 // If {is_constructor} is true, then we haven't inlined the constructor in
3149 // the optimized frames and the previous visited frame (top of the inlined
3150 // frames) is a construct call.
3151 summaries.top_frame_is_construct_call = true;
3152 }
3153 return summaries;
3154}
3155
3157 int* data, HandlerTable::CatchPrediction* prediction) {
3158 // We cannot perform exception prediction on optimized code. Instead, we need
3159 // to use FrameSummary to find the corresponding code offset in unoptimized
3160 // code to perform prediction there.
3161 DCHECK_NULL(prediction);
3163 int pc_offset = -1;
3164 std::tie(code, pc_offset) = LookupCodeAndOffset();
3165
3166 HandlerTable table(code);
3167 if (table.NumberOfReturnEntries() == 0) return -1;
3168
3169 DCHECK_NULL(data); // Data is not used and will not return a value.
3170
3171 // When the return pc has been replaced by a trampoline there won't be
3172 // a handler for this trampoline. Thus we need to use the return pc that
3173 // _used to be_ on the stack to get the right ExceptionHandler.
3174 if (CodeKindCanDeoptimize(code->kind())) {
3175 if (!code->marked_for_deoptimization()) {
3176 // Lazy deoptimize the function in case the handler table entry flags that
3177 // it wants to be lazily deoptimized on throw. This allows the optimizing
3178 // compiler to omit catch blocks that were never reached in practice.
3179 int optimized_exception_handler = table.LookupReturn(pc_offset);
3180 if (optimized_exception_handler != HandlerTable::kLazyDeopt) {
3181 return optimized_exception_handler;
3182 }
3184 function(), LazyDeoptimizeReason::kExceptionCaught, code);
3185 }
3186 DCHECK(code->marked_for_deoptimization());
3187 pc_offset = FindReturnPCForTrampoline(code, pc_offset);
3188 }
3189 return table.LookupReturn(pc_offset);
3190}
3191
3193 int trampoline_pc) const {
3194 DCHECK_EQ(code->kind(), CodeKind::MAGLEV);
3195 DCHECK(code->marked_for_deoptimization());
3196 MaglevSafepointTable safepoints(isolate(), pc(), code);
3197 return safepoints.find_return_pc(trampoline_pc);
3198}
3199
3201 int trampoline_pc) const {
3202 DCHECK_EQ(code->kind(), CodeKind::TURBOFAN_JS);
3203 DCHECK(code->marked_for_deoptimization());
3204 SafepointTable safepoints(isolate(), pc(), code);
3205 return safepoints.find_return_pc(trampoline_pc);
3206}
3207
3209 Tagged<Code> code, int* deopt_index) const {
3210 DCHECK(is_optimized());
3211
3212 Address pc = maybe_unauthenticated_pc();
3213
3214 DCHECK(code->contains(isolate(), pc));
3215 DCHECK(CodeKindCanDeoptimize(code->kind()));
3216
3217 if (code->is_maglevved()) {
3218 MaglevSafepointEntry safepoint_entry =
3219 code->GetMaglevSafepointEntry(isolate(), pc);
3220 if (safepoint_entry.has_deoptimization_index()) {
3221 *deopt_index = safepoint_entry.deoptimization_index();
3222 return Cast<DeoptimizationData>(code->deoptimization_data());
3223 }
3224 } else {
3225 SafepointEntry safepoint_entry = code->GetSafepointEntry(isolate(), pc);
3226 if (safepoint_entry.has_deoptimization_index()) {
3227 *deopt_index = safepoint_entry.deoptimization_index();
3228 return Cast<DeoptimizationData>(code->deoptimization_data());
3229 }
3230 }
3231 *deopt_index = SafepointEntry::kNoDeoptIndex;
3232 return {};
3233}
3234
3236 std::vector<Tagged<SharedFunctionInfo>>* functions) const {
3237 DCHECK(functions->empty());
3238 DCHECK(is_optimized());
3239
3240 // Delegate to JS frame in absence of turbofan deoptimization.
3241 // TODO(turbofan): Revisit once we support deoptimization across the board.
3242 Tagged<Code> code = LookupCode();
3243 if (code->kind() == CodeKind::BUILTIN) {
3244 return JavaScriptFrame::GetFunctions(functions);
3245 }
3246
3248 int deopt_index = SafepointEntry::kNoDeoptIndex;
3249 Tagged<DeoptimizationData> const data =
3250 GetDeoptimizationData(code, &deopt_index);
3251 DCHECK(!data.is_null());
3253 Tagged<DeoptimizationLiteralArray> const literal_array = data->LiteralArray();
3254
3256 data->FrameTranslation(), data->TranslationIndex(deopt_index).value());
3257 int jsframe_count = it.EnterBeginOpcode().js_frame_count;
3258
3259 // We insert the frames in reverse order because the frames
3260 // in the deoptimization translation are ordered bottom-to-top.
3261 while (jsframe_count != 0) {
3262 TranslationOpcode opcode = it.SeekNextJSFrame();
3263 it.NextOperand(); // Skip bailout id.
3264 jsframe_count--;
3265
3266 // The second operand of the frame points to the function.
3267 Tagged<Object> shared = literal_array->get(it.NextOperand());
3268 functions->push_back(Cast<SharedFunctionInfo>(shared));
3269
3270 // Skip over remaining operands to advance to the next opcode.
3271 it.SkipOperands(TranslationOpcodeOperandCount(opcode) - 2);
3272 }
3273}
3274
3277 ((slot_index + 1) * kSystemPointerSize);
3278}
3279
3281 Tagged<BytecodeArray> code = GetBytecodeArray();
3282 int code_offset = GetBytecodeOffset();
3283 return code->SourcePosition(code_offset);
3284}
3285
3287 int* context_register, HandlerTable::CatchPrediction* prediction) {
3288 HandlerTable table(GetBytecodeArray());
3289 int handler_index = table.LookupHandlerIndexForRange(GetBytecodeOffset());
3290 if (handler_index != HandlerTable::kNoHandlerFound) {
3291 if (context_register) *context_register = table.GetRangeData(handler_index);
3292 if (prediction) *prediction = table.GetRangePrediction(handler_index);
3293 table.MarkHandlerUsed(handler_index);
3294 return table.GetRangeHandler(handler_index);
3295 }
3296 return handler_index;
3297}
3298
3306
3315
3317 DirectHandle<AbstractCode> abstract_code(
3318 Cast<AbstractCode>(GetBytecodeArray()), isolate());
3319 DirectHandle<FixedArray> params = GetParameters();
3321 isolate(), receiver(), function(), *abstract_code, GetBytecodeOffset(),
3322 IsConstructor(), *params);
3323 return FrameSummaries(summary);
3324}
3325
3334
3343
3352
3354 Tagged<Code> code = LookupCode();
3355 return code->GetBytecodeOffsetForBaselinePC(this->pc(), GetBytecodeArray());
3356}
3357
3358intptr_t BaselineFrame::GetPCForBytecodeOffset(int bytecode_offset) const {
3359 Tagged<Code> code = LookupCode();
3360 return code->GetBaselineStartPCForBytecodeOffset(bytecode_offset,
3361 GetBytecodeArray());
3362}
3363
3368
3373
3379
3380#if V8_ENABLE_WEBASSEMBLY
3381void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
3382 int index) const {
3383 PrintIndex(accumulator, mode, index);
3384
3385#if V8_ENABLE_DRUMBRAKE
3386 if (v8_flags.wasm_jitless) {
3387 DCHECK(is_wasm_to_js());
3388 accumulator->Add("Wasm-to-JS");
3389 if (mode != OVERVIEW) accumulator->Add("\n");
3390 return;
3391 }
3392#endif // V8_ENABLE_DRUMBRAKE
3393
3394 if (function_index() == wasm::kAnonymousFuncIndex) {
3395 accumulator->Add("Anonymous wasm wrapper [pc: %p]\n",
3396 reinterpret_cast<void*>(pc()));
3397 return;
3398 }
3399 wasm::WasmCodeRefScope code_ref_scope;
3400 accumulator->Add(is_wasm_to_js() ? "Wasm-to-JS [" : "Wasm [");
3401 accumulator->PrintName(script()->name());
3402 Address instruction_start = wasm_code()->instruction_start();
3403 base::Vector<const uint8_t> raw_func_name =
3404 module_object()->GetRawFunctionName(function_index());
3405 const int kMaxPrintedFunctionName = 64;
3406 char func_name[kMaxPrintedFunctionName + 1];
3407 int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length());
3408 memcpy(func_name, raw_func_name.begin(), func_name_len);
3409 func_name[func_name_len] = '\0';
3410 int pos = position();
3411 const wasm::WasmModule* module = trusted_instance_data()->module();
3412 int func_index = function_index();
3413 int func_code_offset = module->functions[func_index].code.offset();
3414 accumulator->Add(
3415 "], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d) instance=%p\n",
3416 func_index, func_name, reinterpret_cast<void*>(pc()),
3417 static_cast<int>(pc() - instruction_start), pos, pos - func_code_offset,
3418 reinterpret_cast<void*>(trusted_instance_data()->ptr()));
3419 if (mode != OVERVIEW) accumulator->Add("\n");
3420}
3421
3422wasm::WasmCode* WasmFrame::wasm_code() const {
3424 maybe_unauthenticated_pc());
3425}
3426
3427Tagged<WasmInstanceObject> WasmFrame::wasm_instance() const {
3428 // TODO(42204563): Avoid crashing if the instance object is not available.
3429 CHECK(trusted_instance_data()->has_instance_object());
3430 return trusted_instance_data()->instance_object();
3431}
3432
3433Tagged<WasmTrustedInstanceData> WasmFrame::trusted_instance_data() const {
3435 Memory<Address>(fp() + WasmFrameConstants::kWasmInstanceDataOffset));
3437}
3438
3439wasm::NativeModule* WasmFrame::native_module() const {
3440 return trusted_instance_data()->native_module();
3441}
3442
3443Tagged<WasmModuleObject> WasmFrame::module_object() const {
3444 return trusted_instance_data()->module_object();
3445}
3446
3447int WasmFrame::function_index() const { return wasm_code()->index(); }
3448
3449Tagged<Script> WasmFrame::script() const { return module_object()->script(); }
3450
3451int WasmFrame::position() const {
3452 const wasm::WasmModule* module = trusted_instance_data()->module();
3453 return GetSourcePosition(module, function_index(), generated_code_offset(),
3454 at_to_number_conversion());
3455}
3456
3457int WasmFrame::generated_code_offset() const {
3458 wasm::WasmCode* code = wasm_code();
3459 int offset = static_cast<int>(pc() - code->instruction_start());
3460 return code->GetSourceOffsetBefore(offset);
3461}
3462
3463bool WasmFrame::is_inspectable() const { return wasm_code()->is_inspectable(); }
3464
3465Tagged<Object> WasmFrame::context() const {
3466 return trusted_instance_data()->native_context();
3467}
3468
3469FrameSummaries WasmFrame::Summarize() const {
3470 FrameSummaries summaries;
3471 // The {WasmCode*} escapes this scope via the {FrameSummary}, which is fine,
3472 // since this code object is part of our stack.
3473 wasm::WasmCode* code = wasm_code();
3474 int offset =
3475 static_cast<int>(maybe_unauthenticated_pc() - code->instruction_start());
3476 Handle<WasmTrustedInstanceData> instance_data{trusted_instance_data(),
3477 isolate()};
3478 // Push regular non-inlined summary.
3479 SourcePosition pos = code->GetSourcePositionBefore(offset);
3480 bool at_conversion = at_to_number_conversion();
3481 bool child_was_tail_call = false;
3482 // Add summaries for each inlined function at the current location.
3483 while (pos.isInlined()) {
3484 // Use current pc offset as the code offset for inlined functions.
3485 // This is not fully correct but there isn't a real code offset of a stack
3486 // frame for an inlined function as the inlined function is not a true
3487 // function with a defined start and end in the generated code.
3488 const auto [func_index, was_tail_call, caller_pos] =
3489 code->GetInliningPosition(pos.InliningId());
3490 if (!child_was_tail_call) {
3491 FrameSummary::WasmFrameSummary summary(isolate(), instance_data, code,
3492 pos.ScriptOffset(), func_index,
3493 at_conversion);
3494 summaries.frames.push_back(summary);
3495 }
3496 pos = caller_pos;
3497 at_conversion = false;
3498 child_was_tail_call = was_tail_call;
3499 }
3500
3501 if (!child_was_tail_call) {
3502 int func_index = code->index();
3503 FrameSummary::WasmFrameSummary summary(isolate(), instance_data, code,
3504 pos.ScriptOffset(), func_index,
3505 at_conversion);
3506 summaries.frames.push_back(summary);
3507 }
3508
3509 // The caller has to be on top.
3510 std::reverse(summaries.frames.begin(), summaries.frames.end());
3511 return summaries;
3512}
3513
3514bool WasmFrame::at_to_number_conversion() const {
3515 if (callee_pc() == kNullAddress) return false;
3516 // Check whether our callee is a WASM_TO_JS frame, and this frame is at the
3517 // ToNumber conversion call.
3518 wasm::WasmCode* wasm_code =
3519 wasm::GetWasmCodeManager()->LookupCode(isolate(), callee_pc());
3520
3521 if (wasm_code) {
3522 if (wasm_code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
3523 int offset = static_cast<int>(callee_pc() - wasm_code->instruction_start());
3524 int pos = wasm_code->GetSourceOffsetBefore(offset);
3525 // The imported call has position 0, ToNumber has position 1.
3526 // If there is no source position available, this is also not a ToNumber
3527 // call.
3528 DCHECK(pos == wasm::kNoCodePosition || pos == 0 || pos == 1);
3529 return pos == 1;
3530 }
3531
3532 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
3533 isolate()->inner_pointer_to_code_cache()->GetCacheEntry(callee_pc());
3534 CHECK(entry->code.has_value());
3535 Tagged<GcSafeCode> code = entry->code.value();
3536 if (code->builtin_id() != Builtin::kWasmToJsWrapperCSA) {
3537 return false;
3538 }
3539
3540 // The generic wasm-to-js wrapper maintains a slot on the stack to indicate
3541 // its state. Initially this slot contains a pointer to the signature, so that
3542 // incoming parameters can be scanned. After all parameters have been
3543 // processed, this slot is reset to nullptr. After returning from JavaScript,
3544 // -1 is stored in the slot to indicate that any call from now on is a
3545 // ToNumber conversion.
3546 Address maybe_sig =
3547 Memory<Address>(callee_fp() + WasmToJSWrapperConstants::kSignatureOffset);
3548
3549 return static_cast<intptr_t>(maybe_sig) == -1;
3550}
3551
3552int WasmFrame::LookupExceptionHandlerInTable() {
3553 wasm::WasmCode* code =
3555 if (!code->IsAnonymous() && code->handler_table_size() > 0) {
3556 HandlerTable table(code);
3557 int pc_offset = static_cast<int>(pc() - code->instruction_start());
3558 return table.LookupReturn(pc_offset);
3559 }
3560 return -1;
3561}
3562
3563void WasmDebugBreakFrame::Iterate(RootVisitor* v) const {
3564 DCHECK(caller_pc());
3566 caller_pc());
3567 SafepointEntry safepoint_entry = pair.second;
3568 uint32_t tagged_register_indexes = safepoint_entry.tagged_register_indexes();
3569
3570 while (tagged_register_indexes != 0) {
3571 int reg_code = base::bits::CountTrailingZeros(tagged_register_indexes);
3572 tagged_register_indexes &= ~(1 << reg_code);
3573 FullObjectSlot spill_slot(&Memory<Address>(
3574 fp() +
3576
3577 v->VisitRootPointer(Root::kStackRoots, nullptr, spill_slot);
3578 }
3579}
3580
3581void WasmDebugBreakFrame::Print(StringStream* accumulator, PrintMode mode,
3582 int index) const {
3583 PrintIndex(accumulator, mode, index);
3584 accumulator->Add("WasmDebugBreak");
3585 if (mode != OVERVIEW) accumulator->Add("\n");
3586}
3587
3588Tagged<WasmInstanceObject> WasmToJsFrame::wasm_instance() const {
3589 // WasmToJsFrames hold the {WasmImportData} object in the instance slot.
3590 // Load the instance from there.
3592 Memory<Address>(fp() + WasmFrameConstants::kWasmInstanceDataOffset)});
3593 // TODO(42204563): Avoid crashing if the instance object is not available.
3594 CHECK(import_data->instance_data()->has_instance_object());
3595 return import_data->instance_data()->instance_object();
3596}
3597
3598Tagged<WasmTrustedInstanceData> WasmToJsFrame::trusted_instance_data() const {
3599 return wasm_instance()->trusted_data(isolate());
3600}
3601
3602void JsToWasmFrame::Iterate(RootVisitor* v) const {
3603 // WrapperBuffer slot is RawPtr pointing to a stack.
3604 // Wasm instance and JS result array are passed as stack params.
3605 // So there is no need to visit them.
3606
3607#if V8_ENABLE_DRUMBRAKE
3608 // Please reference GenericJSToWasmInterpreterWrapper for stack layout.
3609 if (v8_flags.wasm_jitless) {
3610 DCHECK(GetContainingCode(isolate(), pc()).value()->builtin_id() ==
3611 Builtin::kGenericJSToWasmInterpreterWrapper);
3612
3613 // In a GenericJSToWasmInterpreterWrapper stack layout
3614 // ------+-----------------+----------------------
3615 // | return addr |
3616 // fp |- - - - - - - - -| -------------------|
3617 // | old fp | |
3618 // fp-p |- - - - - - - - -| |
3619 // | frame marker | | no GC scan
3620 // fp-2p |- - - - - - - - -| |
3621 // | scan_count | |
3622 // fp-3p |- - - - - - - - -| -------------------|
3623 // | .... | |
3624 // | .... | <- spill_slot_limit |
3625 // | spill slots | | GC scan scan_count slots
3626 // sp | .... | <- spill_slot_base--|
3627 // | | |
3628 // The [fp + BuiltinFrameConstants::kGCScanSlotCount] on the stack is a
3629 // value indicating how many values should be scanned from the top.
3630 intptr_t scan_count = *reinterpret_cast<intptr_t*>(
3631 fp() + BuiltinWasmInterpreterWrapperConstants::kGCScanSlotCountOffset);
3632
3633 FullObjectSlot spill_slot_base(&Memory<Address>(sp()));
3634 FullObjectSlot spill_slot_limit(
3635 &Memory<Address>(sp() + scan_count * kSystemPointerSize));
3636 v->VisitRootPointers(Root::kStackRoots, nullptr, spill_slot_base,
3637 spill_slot_limit);
3638
3639 // We should scan the arg/return values array which may hold heap pointers
3640 // for reference type of parameter/return values.
3641 uint32_t signature_data = *reinterpret_cast<uint32_t*>(
3642 fp() + BuiltinWasmInterpreterWrapperConstants::kSignatureDataOffset);
3643 bool has_ref_args =
3645 bool has_ref_rets =
3647
3648 // This value indicates the array is currently used as args array. If false,
3649 // it's an array for return values.
3650 bool is_args = *reinterpret_cast<intptr_t*>(
3651 fp() + BuiltinWasmInterpreterWrapperConstants::kArgRetsIsArgsOffset);
3652 if ((is_args && !has_ref_args) || (!is_args && !has_ref_rets)) return;
3653
3654 // Retrieve function signature.
3655 size_t return_count = *reinterpret_cast<size_t*>(
3656 fp() + BuiltinWasmInterpreterWrapperConstants::kReturnCountOffset);
3657 size_t param_count = *reinterpret_cast<size_t*>(
3658 fp() + BuiltinWasmInterpreterWrapperConstants::kParamCountOffset);
3659 const wasm::ValueType* reps = *reinterpret_cast<const wasm::ValueType**>(
3660 fp() + BuiltinWasmInterpreterWrapperConstants::kSigRepsOffset);
3661 wasm::FunctionSig sig(return_count, param_count, reps);
3662
3663 intptr_t slot_ptr = *reinterpret_cast<intptr_t*>(
3664 fp() + BuiltinWasmInterpreterWrapperConstants::kArgRetsAddressOffset);
3665
3666 if (is_args) {
3667 size_t current_index = *reinterpret_cast<size_t*>(
3668 fp() + BuiltinWasmInterpreterWrapperConstants::kCurrentIndexOffset);
3669 DCHECK_LE(current_index, param_count);
3670 for (size_t i = 0; i < current_index; i++) {
3671 wasm::ValueType type = sig.GetParam(i);
3672 if (type.is_reference()) {
3673 // Make sure slot for ref args are 64-bit aligned.
3674 slot_ptr += (slot_ptr & 0x04); // Branchless.
3675 FullObjectSlot array_slot(&Memory<Address>(slot_ptr));
3676 v->VisitRootPointer(Root::kStackRoots, nullptr, array_slot);
3677 slot_ptr += kSystemPointerSize;
3678 } else {
3679 switch (type.kind()) {
3680 case wasm::kI32:
3681 case wasm::kF32:
3682 slot_ptr += sizeof(int32_t);
3683 break;
3684 case wasm::kI64:
3685 case wasm::kF64:
3686 slot_ptr += sizeof(int64_t);
3687 break;
3688 case wasm::kS128:
3689 default:
3690 UNREACHABLE();
3691 }
3692 }
3693 }
3694 } else {
3695 // When converting return values, all results are already in the array.
3696 for (size_t i = 0; i < return_count; i++) {
3697 wasm::ValueType type = sig.GetReturn(i);
3698 if (type.is_reference()) {
3699 // Make sure slot for ref args are 64-bit aligned.
3700 slot_ptr += (slot_ptr & 0x04); // Branchless.
3701 FullObjectSlot array_slot(&Memory<Address>(slot_ptr));
3702 v->VisitRootPointer(Root::kStackRoots, nullptr, array_slot);
3703 slot_ptr += kSystemPointerSize;
3704 } else {
3705 switch (type.kind()) {
3706 case wasm::kI32:
3707 case wasm::kF32:
3708 slot_ptr += sizeof(int32_t);
3709 break;
3710 case wasm::kI64:
3711 case wasm::kF64:
3712 slot_ptr += sizeof(int64_t);
3713 break;
3714 case wasm::kS128:
3715 default:
3716 UNREACHABLE();
3717 }
3718 }
3719 }
3720 }
3721 }
3722#endif // V8_ENABLE_DRUMBRAKE
3723}
3724
3725#if V8_ENABLE_DRUMBRAKE
3726void WasmToJsFrame::Iterate(RootVisitor* v) const {
3727 if (v8_flags.wasm_jitless) {
3728 // Called from GenericWasmToJSInterpreterWrapper.
3729 CHECK(v8_flags.jitless);
3730 // The [fp + BuiltinFrameConstants::kGCScanSlotLimit] on the stack is a
3731 // pointer to the end of the stack frame area that contains tagged objects.
3732 Address limit_sp = *reinterpret_cast<intptr_t*>(
3733 fp() + WasmToJSInterpreterFrameConstants::kGCScanSlotLimitOffset);
3734
3735 FullObjectSlot spill_slot_base(&Memory<Address>(sp()));
3736 FullObjectSlot spill_slot_limit(limit_sp);
3737 v->VisitRootPointers(Root::kStackRoots, nullptr, spill_slot_base,
3738 spill_slot_limit);
3739 return;
3740 }
3741 WasmFrame::Iterate(v);
3742}
3743#endif // V8_ENABLE_DRUMBRAKE
3744
3745void StackSwitchFrame::Iterate(RootVisitor* v) const {
3746 // See JsToWasmFrame layout.
3747 // We cannot DCHECK that the pc matches the expected builtin code here,
3748 // because the return address is on a different stack.
3749 // The [fp + BuiltinFrameConstants::kGCScanSlotCountOffset] on the stack is a
3750 // value indicating how many values should be scanned from the top.
3751 intptr_t scan_count = Memory<intptr_t>(
3752 fp() + StackSwitchFrameConstants::kGCScanSlotCountOffset);
3753
3754 FullObjectSlot spill_slot_base(&Memory<Address>(sp()));
3755 FullObjectSlot spill_slot_limit(
3756 &Memory<Address>(sp() + scan_count * kSystemPointerSize));
3757 v->VisitRootPointers(Root::kStackRoots, nullptr, spill_slot_base,
3758 spill_slot_limit);
3759 // Also visit fixed spill slots that contain references.
3760 FullObjectSlot instance_slot(
3761 &Memory<Address>(fp() + StackSwitchFrameConstants::kImplicitArgOffset));
3762 v->VisitRootPointer(Root::kStackRoots, nullptr, instance_slot);
3763 FullObjectSlot result_array_slot(
3764 &Memory<Address>(fp() + StackSwitchFrameConstants::kResultArrayOffset));
3765 v->VisitRootPointer(Root::kStackRoots, nullptr, result_array_slot);
3766}
3767
3768#if V8_ENABLE_DRUMBRAKE
3769void WasmInterpreterEntryFrame::Iterate(RootVisitor* v) const {
3770 // WasmInterpreterEntryFrame stack layout
3771 // ------+-----------------+----------------------
3772 // | return addr | |
3773 // fp |- - - - - - - - -| -------------------|
3774 // | prev fp | |
3775 // fp-p |- - - - - - - - -| | no GC scan
3776 // | frame marker | |
3777 // fp-2p |- - - - - - - - -| -------------------|-------------
3778 // | WasmInstanceObj | | GC scan
3779 // fp-3p |- - - - - - - - -| -------------------|-------------
3780 // | function_index | |
3781 // fp-4p |- - - - - - - - -| -------------------| no GC scan
3782 // | array_start | |
3783 // fp-5p |- - - - - - - - -| -------------------|
3784
3785 static constexpr int kWasmInstanceObjOffset = -2 * kSystemPointerSize;
3786 FullObjectSlot slot_base(&Memory<Address>(fp() + kWasmInstanceObjOffset));
3787 FullObjectSlot slot_limit(
3788 &Memory<Address>(fp() + kWasmInstanceObjOffset + kSystemPointerSize));
3789 v->VisitRootPointers(Root::kStackRoots, nullptr, slot_base, slot_limit);
3790}
3791
3792void WasmInterpreterEntryFrame::Print(StringStream* accumulator, PrintMode mode,
3793 int index) const {
3794 PrintIndex(accumulator, mode, index);
3795 accumulator->Add("WASM INTERPRETER ENTRY [");
3796 Tagged<Script> script = this->script();
3797 accumulator->PrintName(script->name());
3798 accumulator->Add("]");
3799 if (mode != OVERVIEW) accumulator->Add("\n");
3800}
3801
3802FrameSummaries WasmInterpreterEntryFrame::Summarize() const {
3803 FrameSummaries summaries;
3804 Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
3805 std::vector<WasmInterpreterStackEntry> interpreted_stack =
3807 trusted_instance_data()->interpreter_object(), fp());
3808
3809 for (auto& e : interpreted_stack) {
3810 FrameSummary::WasmInterpretedFrameSummary summary(
3811 isolate(), instance, e.function_index, e.byte_offset);
3812 summaries.frames.push_back(summary);
3813 }
3814 return summaries;
3815}
3816
3817Tagged<HeapObject> WasmInterpreterEntryFrame::unchecked_code() const {
3818 return InstructionStream();
3819}
3820
3821Tagged<WasmInstanceObject> WasmInterpreterEntryFrame::wasm_instance() const {
3822 Tagged<Object> instance(Memory<Address>(
3823 fp() + WasmInterpreterFrameConstants::kWasmInstanceObjectOffset));
3824 return Cast<WasmInstanceObject>(instance);
3825}
3826
3828WasmInterpreterEntryFrame::trusted_instance_data() const {
3829 return wasm_instance()->trusted_data(isolate());
3830}
3831
3832Tagged<Tuple2> WasmInterpreterEntryFrame::interpreter_object() const {
3833 return trusted_instance_data()->interpreter_object();
3834}
3835
3836Tagged<WasmModuleObject> WasmInterpreterEntryFrame::module_object() const {
3837 return trusted_instance_data()->module_object();
3838}
3839
3840int WasmInterpreterEntryFrame::function_index(
3841 int inlined_function_index) const {
3843 trusted_instance_data()->interpreter_object(), fp(),
3844 inlined_function_index);
3845}
3846
3847int WasmInterpreterEntryFrame::position() const {
3848 return FrameSummary::GetBottom(this).AsWasmInterpreted().SourcePosition();
3849}
3850
3851Tagged<Object> WasmInterpreterEntryFrame::context() const {
3852 return trusted_instance_data()->native_context();
3853}
3854
3855Address WasmInterpreterEntryFrame::GetCallerStackPointer() const {
3857}
3858#endif // V8_ENABLE_DRUMBRAKE
3859
3860// static
3861void StackSwitchFrame::GetStateForJumpBuffer(wasm::JumpBuffer* jmpbuf,
3862 State* state) {
3863 DCHECK_NE(jmpbuf->fp, kNullAddress);
3864 DCHECK_EQ(ComputeFrameType(jmpbuf->fp), STACK_SWITCH);
3865 FillState(jmpbuf->fp, jmpbuf->sp, state);
3866 state->pc_address = &jmpbuf->pc;
3867 state->is_stack_exit_frame = true;
3868 DCHECK_NE(*state->pc_address, kNullAddress);
3869}
3870
3871int WasmLiftoffSetupFrame::GetDeclaredFunctionIndex() const {
3872 Tagged<Object> func_index(Memory<Address>(
3874 return Smi::ToInt(func_index);
3875}
3876
3877wasm::NativeModule* WasmLiftoffSetupFrame::GetNativeModule() const {
3878 return Memory<wasm::NativeModule*>(
3880}
3881
3882FullObjectSlot WasmLiftoffSetupFrame::wasm_instance_data_slot() const {
3883 return FullObjectSlot(&Memory<Address>(
3885}
3886
3887void WasmLiftoffSetupFrame::Iterate(RootVisitor* v) const {
3888 FullObjectSlot spilled_instance_slot(&Memory<Address>(
3890 v->VisitRootPointer(Root::kStackRoots, "spilled wasm instance",
3891 spilled_instance_slot);
3892 v->VisitRootPointer(Root::kStackRoots, "wasm instance data",
3893 wasm_instance_data_slot());
3894
3895 wasm::NativeModule* native_module = GetNativeModule();
3896 int func_index = GetDeclaredFunctionIndex() +
3897 native_module->module()->num_imported_functions;
3898
3899 // Scan the spill slots of the parameter registers. Parameters in WebAssembly
3900 // get reordered such that first all value parameters get put into registers.
3901 // If there are more registers than value parameters, the remaining registers
3902 // are used for reference parameters. Therefore we can determine which
3903 // registers get used for which parameters by counting the number of value
3904 // parameters and the number of reference parameters.
3905 int num_int_params = 0;
3906 int num_ref_params = 0;
3907 const wasm::FunctionSig* sig =
3908 native_module->module()->functions[func_index].sig;
3909 for (auto param : sig->parameters()) {
3910 if (param == wasm::kWasmI32) {
3911 num_int_params++;
3912 } else if (param == wasm::kWasmI64) {
3913 num_int_params += kSystemPointerSize == 8 ? 1 : 2;
3914 } else if (param.is_reference()) {
3915 num_ref_params++;
3916 }
3917 }
3918
3919 // There are no reference parameters, there is nothing to scan.
3920 if (num_ref_params == 0) return;
3921
3922 int num_int_params_in_registers =
3923 std::min(num_int_params,
3925 int num_ref_params_in_registers =
3926 std::min(num_ref_params,
3928 num_int_params_in_registers);
3929
3930 for (int i = 0; i < num_ref_params_in_registers; ++i) {
3931 FullObjectSlot spill_slot(
3933 [num_int_params_in_registers + i]);
3934
3935 v->VisitRootPointer(Root::kStackRoots, "register parameter", spill_slot);
3936 }
3937
3938 // Next we scan the slots of stack parameters.
3939 wasm::WasmCode* wasm_code = native_module->GetCode(func_index);
3940 uint32_t first_tagged_stack_slot = wasm_code->first_tagged_parameter_slot();
3941 uint32_t num_tagged_stack_slots = wasm_code->num_tagged_parameter_slots();
3942
3943 // Visit tagged parameters that have been passed to the function of this
3944 // frame. Conceptionally these parameters belong to the parent frame.
3945 // However, the exact count is only known by this frame (in the presence of
3946 // tail calls, this information cannot be derived from the call site).
3947 if (num_tagged_stack_slots > 0) {
3948 FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp()));
3949 tagged_parameter_base += first_tagged_stack_slot;
3950 FullObjectSlot tagged_parameter_limit =
3951 tagged_parameter_base + num_tagged_stack_slots;
3952
3953 v->VisitRootPointers(Root::kStackRoots, "stack parameter",
3954 tagged_parameter_base, tagged_parameter_limit);
3955 }
3956}
3957#endif // V8_ENABLE_WEBASSEMBLY
3958
3959namespace {
3960
3961void PrintFunctionSource(StringStream* accumulator,
3963 if (v8_flags.max_stack_trace_source_length != 0) {
3964 std::ostringstream os;
3965 os << "--------- s o u r c e c o d e ---------\n"
3966 << SourceCodeOf(shared, v8_flags.max_stack_trace_source_length)
3967 << "\n-----------------------------------------\n";
3968 accumulator->Add(os.str().c_str());
3969 }
3970}
3971
3972} // namespace
3973
3975 int index) const {
3978
3981 Tagged<JSFunction> function = this->function();
3982
3983 accumulator->PrintSecurityTokenIfChanged(isolate(), function);
3984 PrintIndex(accumulator, mode, index);
3985 PrintFrameKind(accumulator);
3986 if (IsConstructor()) accumulator->Add("new ");
3987 accumulator->PrintFunction(isolate(), function, receiver);
3988 accumulator->Add(" [%p]", function);
3989
3990 // Get scope information for nicer output, if possible. If code is nullptr, or
3991 // doesn't contain scope info, scope_info will return 0 for the number of
3992 // parameters, stack local variables, context local variables, stack slots,
3993 // or context slots.
3994 Tagged<ScopeInfo> scope_info = shared->scope_info();
3995 Tagged<Object> script_obj = shared->script();
3996 if (IsScript(script_obj)) {
3997 Tagged<Script> script = Cast<Script>(script_obj);
3998 accumulator->Add(" [");
3999 accumulator->PrintName(script->name());
4000
4001 if (is_interpreted()) {
4002 const InterpretedFrame* iframe = InterpretedFrame::cast(this);
4003 Tagged<BytecodeArray> bytecodes = iframe->GetBytecodeArray();
4004 int offset = iframe->GetBytecodeOffset();
4005 int source_pos = bytecodes->SourcePosition(offset);
4006 int line = script->GetLineNumber(source_pos) + 1;
4007 accumulator->Add(":%d] [bytecode=%p offset=%d]", line,
4008 reinterpret_cast<void*>(bytecodes.ptr()), offset);
4009 } else {
4010 int function_start_pos = shared->StartPosition();
4011 int line = script->GetLineNumber(function_start_pos) + 1;
4012 accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc()));
4013 }
4014 }
4015
4016 accumulator->Add("(this=%o", receiver);
4017
4018 // Print the parameters.
4019 int parameters_count = ComputeParametersCount();
4020 for (int i = 0; i < parameters_count; i++) {
4021 accumulator->Add(",");
4022 accumulator->Add("%o", GetParameter(i));
4023 }
4024
4025 accumulator->Add(")");
4026 if (mode == OVERVIEW) {
4027 accumulator->Add("\n");
4028 return;
4029 }
4030 if (is_optimized()) {
4031 accumulator->Add(" {\n// optimized frame\n");
4032 PrintFunctionSource(accumulator, *shared);
4033 accumulator->Add("}\n");
4034 return;
4035 }
4036 accumulator->Add(" {\n");
4037
4038 // Compute the number of locals and expression stack elements.
4039 int heap_locals_count = scope_info->ContextLocalCount();
4040 int expressions_count = ComputeExpressionsCount();
4041
4042 // Try to get hold of the context of this frame.
4044 if (IsContext(this->context())) {
4045 context = Cast<Context>(this->context());
4046 while (context->IsWithContext()) {
4047 context = context->previous();
4048 DCHECK(!context.is_null());
4049 }
4050 }
4051
4052 // Print heap-allocated local variables.
4053 if (heap_locals_count > 0) {
4054 accumulator->Add(" // heap-allocated locals\n");
4055 }
4056 for (auto it : ScopeInfo::IterateLocalNames(scope_info, no_gc)) {
4057 accumulator->Add(" var ");
4058 accumulator->PrintName(it->name());
4059 accumulator->Add(" = ");
4060 if (!context.is_null()) {
4061 int slot_index = Context::MIN_CONTEXT_SLOTS + it->index();
4062 if (slot_index < context->length()) {
4063 accumulator->Add("%o", context->get(slot_index));
4064 } else {
4065 accumulator->Add(
4066 "// warning: missing context slot - inconsistent frame?");
4067 }
4068 } else {
4069 accumulator->Add("// warning: no context found - inconsistent frame?");
4070 }
4071 accumulator->Add("\n");
4072 }
4073
4074 // Print the expression stack.
4075 if (0 < expressions_count) {
4076 accumulator->Add(" // expression stack (top to bottom)\n");
4077 }
4078 for (int i = expressions_count - 1; i >= 0; i--) {
4079 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
4080 }
4081
4082 PrintFunctionSource(accumulator, *shared);
4083
4084 accumulator->Add("}\n\n");
4085}
4086
4088 IteratePc(v, constant_pool_address(), GcSafeLookupCode());
4089}
4090
4092 const int last_object_offset = StandardFrameConstants::kLastObjectOffset;
4093 intptr_t marker =
4094 Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
4095 FullObjectSlot base(&Memory<Address>(sp()));
4096 FullObjectSlot limit(&Memory<Address>(fp() + last_object_offset) + 1);
4098 v->VisitRootPointers(Root::kStackRoots, nullptr, base, limit);
4099}
4100
4102 // The frame contains the actual argument count (intptr) that should not be
4103 // visited.
4104 FullObjectSlot argc(
4105 &Memory<Address>(fp() + StandardFrameConstants::kArgCOffset));
4106 const int last_object_offset = StandardFrameConstants::kLastObjectOffset;
4107 FullObjectSlot base(&Memory<Address>(sp()));
4108 FullObjectSlot limit(&Memory<Address>(fp() + last_object_offset) + 1);
4109 v->VisitRootPointers(Root::kStackRoots, nullptr, base, argc);
4110 v->VisitRootPointers(Root::kStackRoots, nullptr, argc + 1, limit);
4111 IteratePc(v, constant_pool_address(), GcSafeLookupCode());
4112}
4113
4115 Tagged<GcSafeCode> code = GcSafeLookupCode();
4116 IteratePc(v, constant_pool_address(), code);
4117 // Internal frames typically do not receive any arguments, hence their stack
4118 // only contains tagged pointers.
4119 // We are misusing the has_tagged_outgoing_params flag here to tell us whether
4120 // the full stack frame contains only tagged pointers or only raw values.
4121 // This is used for the WasmCompileLazy builtin, where we actually pass
4122 // untagged arguments and also store untagged values on the stack.
4123 if (code->has_tagged_outgoing_params()) IterateExpressions(v);
4124}
4125
4126// -------------------------------------------------------------------------
4127
4128namespace {
4129
4130// Predictably converts PC to uint32 by calculating offset of the PC in
4131// from the embedded builtins start or from respective MemoryChunk.
4132uint32_t PcAddressForHashing(Isolate* isolate, Address address) {
4133 uint32_t hashable_address;
4135 &hashable_address)) {
4136 return hashable_address;
4137 }
4138 return ObjectAddressForHashing(address);
4139}
4140
4141} // namespace
4142
4143InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
4145 DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize));
4146 uint32_t hash =
4147 ComputeUnseededHash(PcAddressForHashing(isolate_, inner_pointer));
4148 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
4149 InnerPointerToCodeCacheEntry* entry = cache(index);
4150 if (entry->inner_pointer == inner_pointer) {
4151 // Why this DCHECK holds is nontrivial:
4152 //
4153 // - the cache is filled lazily on calls to this function.
4154 // - this function may be called while GC, and in particular
4155 // MarkCompactCollector::UpdatePointersAfterEvacuation, is in progress.
4156 // - the cache is cleared at the end of UpdatePointersAfterEvacuation.
4157 // - now, why does pointer equality hold even during moving GC?
4158 // - .. because GcSafeFindCodeForInnerPointer does not follow forwarding
4159 // pointers and always returns the old object (which is still valid,
4160 // *except* for the map_word).
4161 DCHECK_EQ(entry->code,
4162 isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer));
4163 } else {
4164 // Because this code may be interrupted by a profiling signal that
4165 // also queries the cache, we cannot update inner_pointer before the code
4166 // has been set. Otherwise, we risk trying to use a cache entry before
4167 // the code has been computed.
4168 entry->code =
4169 isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer);
4170 if (entry->code.value()->is_maglevved()) {
4171 entry->maglev_safepoint_entry.Reset();
4172 } else {
4173 entry->safepoint_entry.Reset();
4174 }
4175 entry->inner_pointer = inner_pointer;
4176 }
4177 return entry;
4178}
4179
4180// Frame layout helper class implementation.
4181// -------------------------------------------------------------------------
4182
4183namespace {
4184
4185// Some architectures need to push padding together with the TOS register
4186// in order to maintain stack alignment.
4187constexpr int TopOfStackRegisterPaddingSlots() {
4188 return ArgumentPaddingSlots(1);
4189}
4190
4191bool BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode) {
4192 switch (mode) {
4195 return false;
4198 return true;
4199 }
4200 UNREACHABLE();
4201}
4202
4203} // namespace
4204
4205UnoptimizedFrameInfo::UnoptimizedFrameInfo(int parameters_count_with_receiver,
4206 int translation_height,
4207 bool is_topmost, bool pad_arguments,
4208 FrameInfoKind frame_info_kind) {
4209 const int locals_count = translation_height;
4210
4211 register_stack_slot_count_ =
4213
4214 static constexpr int kTheAccumulator = 1;
4215 static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
4216 int maybe_additional_slots =
4217 (is_topmost || frame_info_kind == FrameInfoKind::kConservative)
4218 ? (kTheAccumulator + kTopOfStackPadding)
4219 : 0;
4220 frame_size_in_bytes_without_fixed_ =
4221 (register_stack_slot_count_ + maybe_additional_slots) *
4223
4224 // The 'fixed' part of the frame consists of the incoming parameters and
4225 // the part described by InterpreterFrameConstants. This will include
4226 // argument padding, when needed.
4227 const int parameter_padding_slots =
4228 pad_arguments ? ArgumentPaddingSlots(parameters_count_with_receiver) : 0;
4229 const int fixed_frame_size =
4231 (parameters_count_with_receiver + parameter_padding_slots) *
4233 frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ + fixed_frame_size;
4234}
4235
4236// static
4238 int parameters_count) {
4239 return (parameters_count + ArgumentPaddingSlots(parameters_count)) *
4241}
4242
4244 bool is_topmost,
4245 FrameInfoKind frame_info_kind) {
4246 // Note: This is according to the Translation's notion of 'parameters' which
4247 // differs to that of the SharedFunctionInfo, e.g. by including the receiver.
4248 const int parameters_count = translation_height;
4249
4250 // If the construct frame appears to be topmost we should ensure that the
4251 // value of result register is preserved during continuation execution.
4252 // We do this here by "pushing" the result of the constructor function to
4253 // the top of the reconstructed stack and popping it in
4254 // {Builtin::kNotifyDeoptimized}.
4255
4256 static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
4257 static constexpr int kTheResult = 1;
4258 const int argument_padding = ArgumentPaddingSlots(parameters_count);
4259
4260 const int adjusted_height =
4261 (is_topmost || frame_info_kind == FrameInfoKind::kConservative)
4262 ? parameters_count + argument_padding + kTheResult +
4263 kTopOfStackPadding
4264 : parameters_count + argument_padding;
4265 frame_size_in_bytes_without_fixed_ = adjusted_height * kSystemPointerSize;
4266 frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ +
4268}
4269
4271 // If the construct frame appears to be topmost we should ensure that the
4272 // value of result register is preserved during continuation execution.
4273 // We do this here by "pushing" the result of the constructor function to
4274 // the top of the reconstructed stack and popping it in
4275 // {Builtin::kNotifyDeoptimized}.
4276
4277 static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
4278 static constexpr int kTheResult = 1;
4279 const int adjusted_height =
4281 (is_topmost ? kTheResult + kTopOfStackPadding : 0);
4282 frame_size_in_bytes_without_fixed_ = adjusted_height * kSystemPointerSize;
4283 frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ +
4285}
4286
4288 int translation_height,
4289 const CallInterfaceDescriptor& continuation_descriptor,
4290 const RegisterConfiguration* register_config, bool is_topmost,
4291 DeoptimizeKind deopt_kind, BuiltinContinuationMode continuation_mode,
4292 FrameInfoKind frame_info_kind) {
4293 const bool is_conservative = frame_info_kind == FrameInfoKind::kConservative;
4294
4295 // Note: This is according to the Translation's notion of 'parameters' which
4296 // differs to that of the SharedFunctionInfo, e.g. by including the receiver.
4297 const int parameters_count = translation_height;
4298 frame_has_result_stack_slot_ =
4299 !is_topmost || deopt_kind == DeoptimizeKind::kLazy;
4300 const int result_slot_count =
4301 (frame_has_result_stack_slot_ || is_conservative) ? 1 : 0;
4302
4303 const int exception_slot_count =
4304 (BuiltinContinuationModeIsWithCatch(continuation_mode) || is_conservative)
4305 ? 1
4306 : 0;
4307
4308 const int allocatable_register_count =
4309 register_config->num_allocatable_general_registers();
4310 const int padding_slot_count =
4312 allocatable_register_count);
4313
4314 const int register_parameter_count =
4315 continuation_descriptor.GetRegisterParameterCount();
4316 translated_stack_parameter_count_ =
4317 parameters_count - register_parameter_count;
4318 stack_parameter_count_ = translated_stack_parameter_count_ +
4319 result_slot_count + exception_slot_count;
4320 const int stack_param_pad_count =
4321 ArgumentPaddingSlots(stack_parameter_count_);
4322
4323 // If the builtins frame appears to be topmost we should ensure that the
4324 // value of result register is preserved during continuation execution.
4325 // We do this here by "pushing" the result of callback function to the
4326 // top of the reconstructed stack and popping it in
4327 // {Builtin::kNotifyDeoptimized}.
4328 static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
4329 static constexpr int kTheResult = 1;
4330 const int push_result_count =
4331 (is_topmost || is_conservative) ? kTheResult + kTopOfStackPadding : 0;
4332
4333 frame_size_in_bytes_ =
4334 kSystemPointerSize * (stack_parameter_count_ + stack_param_pad_count +
4335 allocatable_register_count + padding_slot_count +
4336 push_result_count) +
4338
4339 frame_size_in_bytes_above_fp_ =
4340 kSystemPointerSize * (allocatable_register_count + padding_slot_count +
4341 push_result_count) +
4344}
4345
4346} // namespace internal
4347} // namespace v8
Isolate * isolate_
#define BUILTIN(name)
Builtins::Kind kind
Definition builtins.cc:40
SourcePosition pos
static constexpr U kMask
Definition bit-field.h:41
constexpr size_t size() const
Definition vector.h:70
static constexpr int kPropertyCallbackInfoReturnValueIndex
static constexpr int kPropertyCallbackInfoPropertyKeyIndex
FrameSummaries Summarize() const override
Definition frames.cc:1357
Tagged< Object > receiver() const
Definition frames-inl.h:233
Tagged< Object > holder() const
Definition frames-inl.h:237
void Print(StringStream *accumulator, PrintMode mode, int index) const override
Definition frames.cc:1447
Tagged< Name > property_name() const
Definition frames-inl.h:229
static constexpr int kFunctionCallbackInfoReturnValueIndex
void Print(StringStream *accumulator, PrintMode mode, int index) const override
Definition frames.cc:1424
FullObjectSlot target_slot() const
Definition frames-inl.h:172
DirectHandle< JSFunction > GetFunction() const
Definition frames.cc:1280
FrameSummaries Summarize() const override
Definition frames.cc:1328
void set_target(Tagged< HeapObject > function) const
Definition frames-inl.h:187
DirectHandle< FunctionTemplateInfo > GetFunctionTemplateInfo() const
Definition frames.cc:1304
Tagged< Object > context() const override
Definition frames-inl.h:167
Tagged< HeapObject > target() const
Definition frames-inl.h:181
Tagged< Object > GetParameter(int i) const
Definition frames-inl.h:199
DirectHandle< FixedArray > GetParameters() const
Definition frames.cc:1316
Tagged< Object > receiver() const
Definition frames-inl.h:176
static V8_WARN_UNUSED_RESULT MaybeHandle< JSFunction > InstantiateFunction(Isolate *isolate, DirectHandle< NativeContext > native_context, DirectHandle< FunctionTemplateInfo > data, MaybeDirectHandle< Name > maybe_name={})
int GetBytecodeOffset() const override
Definition frames.cc:3353
void PatchContext(Tagged< Context > value)
Definition frames.cc:3364
intptr_t GetPCForBytecodeOffset(int lookup_offset) const
Definition frames.cc:3358
static BaselineFrame * cast(StackFrame *frame)
Definition frames.h:1179
BuiltinContinuationFrameInfo(int translation_height, const CallInterfaceDescriptor &continuation_descriptor, const RegisterConfiguration *register_config, bool is_topmost, DeoptimizeKind deopt_kind, BuiltinContinuationMode continuation_mode, FrameInfoKind frame_info_kind)
Definition frames.cc:4287
Tagged< Object > argc_slot_object() const
Definition frames-inl.h:148
Tagged< Object > new_target_slot_object() const
Definition frames-inl.h:158
Tagged< JSFunction > function() const
Definition frames.cc:1224
Tagged< Object > GetParameter(int i) const
Definition frames.cc:1232
void Print(StringStream *accumulator, PrintMode mode, int index) const override
Definition frames.cc:1395
DirectHandle< FixedArray > GetParameters() const
Definition frames.cc:1251
Tagged< Object > receiver_slot_object() const
Definition frames-inl.h:143
Tagged< Object > target_slot_object() const
Definition frames-inl.h:153
FrameSummaries Summarize() const override
Definition frames.cc:1212
Tagged< Object > receiver() const
Definition frames.cc:1228
int ComputeParametersCount() const override
Definition frames.cc:3374
Tagged< JSFunction > function() const override
Definition frames.cc:3369
V8_EXPORT_PRIVATE Tagged< Code > code(Builtin builtin)
Definition builtins.cc:149
static Builtin GetBuiltinFromBytecodeOffset(BytecodeOffset)
Definition builtins.cc:104
static V8_EXPORT_PRIVATE const char * NameForStackTrace(Isolate *isolate, Builtin builtin)
Definition builtins.cc:233
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
Definition builtins.cc:226
static constexpr BytecodeOffset None()
Definition utils.h:675
static constexpr int kContextOrFrameTypeOffset
static constexpr int kConstantPoolOffset
static constexpr int kFixedFrameSizeAboveFp
virtual int ComputeParametersCount() const
Definition frames.cc:2624
Address GetParameterSlot(int index) const
Definition frames-inl.h:269
FrameSummaries Summarize() const override
Definition frames.cc:2479
Tagged< HeapObject > unchecked_code() const override
Definition frames.cc:2418
virtual bool IsConstructor() const
Definition frames.cc:2475
virtual int LookupExceptionHandlerInTable(int *data, HandlerTable::CatchPrediction *prediction)
Definition frames.cc:2521
DirectHandle< FixedArray > GetParameters() const
Definition frames.cc:2638
static bool IsConstructFrame(Address fp)
Definition frames-inl.h:259
virtual Tagged< JSFunction > function() const =0
virtual Tagged< Object > GetParameter(int index) const
Definition frames.cc:2620
virtual Tagged< Object > receiver() const
Definition frames.cc:2505
Address GetCallerStackPointer() const override
Definition frames.cc:1065
int ComputeExpressionsCount() const
Definition frames.cc:1481
virtual Tagged< Object > context() const
Definition frames.cc:1470
void ComputeCallerState(State *state) const override
Definition frames.cc:1489
bool HasTaggedOutgoingParams(Tagged< GcSafeCode > code_lookup) const
Definition frames.cc:2240
Address caller_fp() const
Definition frames-inl.h:250
void IterateTurbofanJSOptimizedFrame(RootVisitor *v) const
Definition frames.cc:2272
virtual int position() const
Definition frames.cc:1474
Address caller_pc() const
Definition frames-inl.h:254
void IterateExpressions(RootVisitor *v) const
Definition frames.cc:4091
virtual FrameSummaries Summarize() const
Definition frames.cc:1508
static CommonFrame * cast(StackFrame *frame)
Definition frames.h:662
virtual Address GetExpressionAddress(int n) const
Definition frames.cc:1460
Tagged< HeapObject > unchecked_code() const override
Definition frames.cc:1118
void Iterate(RootVisitor *v) const override
Definition frames.cc:326
ConstructStubFrameInfo(int translation_height, bool is_topmost, FrameInfoKind frame_info_kind)
Definition frames.cc:4243
DebuggableStackFrameIterator(Isolate *isolate)
Definition frames.cc:349
static bool IsValidFrame(StackFrame *frame)
Definition frames.cc:395
static void DeoptimizeFunction(Tagged< JSFunction > function, LazyDeoptimizeReason reason, Tagged< Code > code={})
static V8_INLINE DirectHandle FromSlot(Address *slot)
Definition handles.h:687
static EmbeddedData FromBlob()
static constexpr int kNextFastCallFrameFPOffset
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
Tagged< HeapObject > unchecked_code() const override
Definition frames.cc:1078
Type GetCallerState(State *state) const override
Definition frames.cc:1086
void Iterate(RootVisitor *v) const override
Definition frames.cc:4087
void ComputeCallerState(State *state) const override
Definition frames.cc:1082
void ComputeCallerState(State *state) const override
Definition frames.cc:1122
static StackFrame::Type ComputeFrameType(Address fp)
Definition frames.cc:1155
static void FillState(Address fp, Address sp, State *state)
Definition frames.cc:1199
Type type() const override
Definition frames.h:904
void Iterate(RootVisitor *v) const override
Definition frames.cc:1135
static Address ComputeStackPointer(Address fp)
Definition frames.cc:1182
static Type GetStateForFramePointer(Address fp, State *state)
Definition frames.cc:1141
ExternalCallbackScope * previous()
Definition vm-state.h:51
Handle< FixedArray > NewFixedArray(int length, AllocationType allocation=AllocationType::kYoung)
DirectHandle< Context > native_context() const
Definition frames.cc:2753
DirectHandle< StackFrameInfo > CreateStackFrameInfo() const
Definition frames.cc:2759
Handle< AbstractCode > abstract_code() const
Definition frames.h:450
JavaScriptFrameSummary(Isolate *isolate, Tagged< Object > receiver, Tagged< JSFunction > function, Tagged< AbstractCode > abstract_code, int code_offset, bool is_constructor, Tagged< FixedArray > parameters)
Definition frames.cc:2692
static FrameSummary GetTop(const CommonFrame *frame)
Definition frames.cc:2965
bool is_subject_to_debugging() const
static FrameSummary Get(const CommonFrame *frame, int index)
Definition frames.cc:2981
DirectHandle< Context > native_context() const
int SourceStatementPosition() const
FrameSummaryBase base_
Definition frames.h:619
static FrameSummary GetSingle(const CommonFrame *frame)
Definition frames.cc:2975
Handle< Object > script() const
DirectHandle< StackFrameInfo > CreateStackFrameInfo() const
static FrameSummary GetBottom(const CommonFrame *frame)
Definition frames.cc:2971
Handle< Object > receiver() const
void EnsureSourcePositionsAvailable()
Definition frames.cc:2711
bool AreSourcePositionsAvailable() const
Definition frames.cc:2717
static const int kNoHandlerFound
static const int kLazyDeopt
static bool IsSelfForwarded(Tagged< HeapObject > object)
static constexpr int kHeaderSize
std::optional< Tagged< GcSafeCode > > GcSafeTryFindCodeForInnerPointer(Address inner_pointer)
Definition heap.cc:7138
static V8_INLINE ICStats * instance()
Definition ic-stats.h:69
InnerPointerToCodeCacheEntry * GetCacheEntry(Address inner_pointer)
Definition frames.cc:4144
void Iterate(RootVisitor *v) const override
Definition frames.cc:4114
static InterpretedFrame * cast(StackFrame *frame)
Definition frames.h:1152
void PatchBytecodeOffset(int new_offset)
Definition frames.cc:3335
void PatchBytecodeArray(Tagged< BytecodeArray > bytecode_array)
Definition frames.cc:3344
int GetBytecodeOffset() const override
Definition frames.cc:3326
static constexpr int kBytecodeOffsetExpressionIndex
static Address c_entry_fp(ThreadLocalTop *thread)
Definition isolate.h:889
Address code_cage_base() const
Definition isolate.h:1222
const IsolateData * isolate_data() const
Definition isolate.h:1207
InnerPointerToCodeCache * inner_pointer_to_code_cache()
Definition isolate.h:1404
Builtins * builtins()
Definition isolate.h:1443
v8::internal::Factory * factory()
Definition isolate.h:1527
static Address handler(ThreadLocalTop *thread)
Definition isolate.h:892
static DirectHandle< String > GetDebugName(DirectHandle< JSFunction > function)
Tagged< JSFunction > function() const override
Definition frames.cc:2652
Tagged< Object > context() const override
Definition frames.cc:2674
bool IsConstructor() const override
Definition frames.cc:2414
Address GetCallerStackPointer() const override
Definition frames.cc:2432
Tagged< JSFunction > function() const override
Definition frames.cc:2492
void Print(StringStream *accumulator, PrintMode mode, int index) const override
Definition frames.cc:3974
Tagged< Object > function_slot_object() const
Definition frames-inl.h:291
void SetParameterValue(int index, Tagged< Object > value) const
Definition frames.cc:2410
void Iterate(RootVisitor *v) const override
Definition frames.cc:4101
static void CollectFunctionAndOffsetForICStats(Isolate *isolate, Tagged< JSFunction > function, Tagged< AbstractCode > code, int code_offset)
Definition frames.cc:2597
virtual void GetFunctions(std::vector< Tagged< SharedFunctionInfo > > *functions) const
Definition frames.cc:2436
static void PrintTop(Isolate *isolate, FILE *file, bool print_args, bool print_line_number)
Definition frames.cc:2562
int GetActualArgumentCount() const override
Definition frames.cc:2632
Tagged< Script > script() const
Definition frames.cc:2517
std::tuple< Tagged< AbstractCode >, int > GetActiveCodeAndOffset() const
Definition frames.cc:2453
static void PrintFunctionAndOffset(Isolate *isolate, Tagged< JSFunction > function, Tagged< AbstractCode > code, int code_offset, FILE *file, bool print_line_number)
Definition frames.cc:2532
Tagged< Object > unchecked_function() const
Definition frames.cc:2496
Tagged< Object > context() const override
Definition frames.cc:2510
V8_EXPORT_PRIVATE void Advance()
Definition frames.cc:341
BytecodeOffset GetBytecodeOffsetForOSR() const
Definition frames.cc:2208
DirectHandle< JSFunction > GetInnermostFunction() const
Definition frames.cc:2204
int FindReturnPCForTrampoline(Tagged< Code > code, int trampoline_pc) const override
Definition frames.cc:3192
void Iterate(RootVisitor *v) const override
Definition frames.cc:2097
MaglevSafepointEntry FindEntry(Address pc) const
bool IsForwardingAddress() const
Tagged< HeapObject > ToForwardingAddress(Tagged< HeapObject > map_word_host)
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
void ComputeCallerState(State *state) const override
Definition frames.cc:1069
static bool TryGetAddressForHashing(Isolate *isolate, Address address, uint32_t *hashable_address)
static Builtin TryLookupCode(Isolate *isolate, Address address)
FrameSummaries Summarize() const override
Definition frames.cc:3033
void GetFunctions(std::vector< Tagged< SharedFunctionInfo > > *functions) const override
Definition frames.cc:3235
int LookupExceptionHandlerInTable(int *data, HandlerTable::CatchPrediction *prediction) override
Definition frames.cc:3156
static int StackSlotOffsetRelativeToFp(int slot_index)
Definition frames.cc:3275
Tagged< DeoptimizationData > GetDeoptimizationData(Tagged< Code > code, int *deopt_index) const
Definition frames.cc:3208
static V8_INLINE void ReplacePC(Address *pc_address, Address new_pc, int offset_from_sp)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
virtual void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end)=0
virtual void VisitRunningCode(FullObjectSlot code_slot, FullObjectSlot istream_or_smi_zero_slot)
Definition visitors.h:101
virtual void VisitRootPointer(Root root, const char *description, FullObjectSlot p)
Definition visitors.h:75
base::Vector< const uint8_t > tagged_slots() const
SafepointEntry FindEntry(Address pc) const
int find_return_pc(int pc_offset)
static LocalNamesRange< DirectHandle< ScopeInfo > > IterateLocalNames(DirectHandle< ScopeInfo > scope_info)
static void EnsureSourcePositionsAvailable(Isolate *isolate, DirectHandle< SharedFunctionInfo > shared_info)
TData * location() const
Definition slots.h:80
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
StackFrameIteratorBase(const StackFrameIteratorBase &)=delete
void SetNewFrame(StackFrame::Type type, StackFrame::State *state)
Definition frames.cc:290
StackFrameIteratorForProfilerForTesting(Isolate *isolate, Address pc, Address fp, Address sp, Address lr, Address js_entry_sp)
Definition frames.cc:725
bool IsValidStackAddress(Address addr) const
Definition frames.h:1834
bool IsValidTop(ThreadLocalTop *top) const
Definition frames.cc:613
bool IsValidState(const StackFrame::State &frame) const
Definition frames.cc:652
StackFrameIteratorForProfiler(Isolate *isolate, Address pc, Address fp, Address sp, Address lr, Address js_entry_sp)
Definition frames.cc:472
bool HasValidExitIfEntryFrame(const StackFrame *frame) const
Definition frames.cc:657
bool IsValidExitFrame(Address fp) const
Definition frames.cc:674
bool IsNoFrameBytecodeHandlerPc(Isolate *isolate, Address pc, Address fp) const
Definition frames.cc:447
StackFrame::Type ComputeStackFrameType(StackFrame::State *state) const override
Definition frames.cc:1007
ExternalCallbackScope * external_callback_scope_
Definition frames.h:1857
static bool IsValidFrameType(StackFrame::Type type)
Definition frames-inl.h:428
V8_EXPORT_PRIVATE void Advance()
Definition frames.cc:161
StackFrame * frame() const
Definition frames.h:1726
void Reset(ThreadLocalTop *top)
Definition frames.cc:255
V8_EXPORT_PRIVATE StackFrameIterator(Isolate *isolate)
Definition frames.cc:137
StackFrame::Type ComputeStackFrameType(StackFrame::State *state) const override
Definition frames.cc:903
V8_EXPORT_PRIVATE Tagged< GcSafeCode > GcSafeLookupCode() const
Definition frames.cc:745
Address unauthenticated_pc() const
Definition frames-inl.h:80
bool is_baseline() const
Definition frames.h:242
bool is_builtin_exit() const
Definition frames.h:279
V8_EXPORT_PRIVATE Tagged< Code > LookupCode() const
Definition frames.cc:757
static ReturnAddressLocationResolver return_address_location_resolver_
Definition frames.h:392
V8_EXPORT_PRIVATE std::pair< Tagged< Code >, int > LookupCodeAndOffset() const
Definition frames.cc:762
Address caller_sp() const
Definition frames.h:300
static constexpr bool IsTypeMarker(uintptr_t function_or_marker)
Definition frames.h:214
Address sp() const
Definition frames.h:293
static constexpr Type MarkerToType(intptr_t marker)
Definition frames.h:205
virtual Type type() const =0
bool InFastCCall() const
Definition frames.h:320
bool is_javascript() const
Definition frames.h:290
virtual void ComputeCallerState(State *state) const =0
Address * pc_address() const
Definition frames.h:327
Isolate * isolate() const
Definition frames.h:376
bool is_entry() const
Definition frames.h:230
static Address * ResolveReturnAddressLocation(Address *pc_address)
Definition frames-inl.h:107
bool is_api_callback_exit() const
Definition frames.h:281
bool is_exit() const
Definition frames.h:232
virtual void Iterate(RootVisitor *v) const =0
Address callee_pc() const
Definition frames.h:299
const StackFrameIteratorBase *const iterator_
Definition frames.h:386
virtual Type GetCallerState(State *state) const
Definition frames.cc:1060
Address pc() const
Definition frames-inl.h:78
Address * constant_pool_address() const
Definition frames.h:329
bool is_construct_entry() const
Definition frames.h:231
Address fp() const
Definition frames.h:297
V8_EXPORT_PRIVATE std::pair< Tagged< GcSafeCode >, int > GcSafeLookupCodeAndOffset() const
Definition frames.cc:749
static void SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)
Definition frames.cc:813
bool is_interpreted() const
Definition frames.h:241
virtual void Print(StringStream *accumulator, PrintMode mode, int index) const
Definition frames.cc:1386
bool is_optimized_js() const
Definition frames.h:233
Address maybe_unauthenticated_pc() const
Definition frames-inl.h:89
bool is_api_accessor_exit() const
Definition frames.h:280
void IteratePc(RootVisitor *v, Address *constant_pool_address, Tagged< GcSafeCode > holder) const
Definition frames.cc:768
static Address ReadPC(Address *pc_address)
Definition frames-inl.h:103
static const int kPaddingOffset
Definition frames.h:90
StackHandlerIterator(const StackFrame *frame, StackHandler *handler)
Definition frames.cc:79
StackHandler * handler() const
Definition frames.cc:114
static StackHandler * FromAddress(Address address)
Definition frames-inl.h:67
StackHandler * next() const
Definition frames-inl.h:58
void PrintFunction(Isolate *isolate, Tagged< JSFunction > function, Tagged< Object > receiver)
void PrintSecurityTokenIfChanged(Isolate *isolate, Tagged< JSFunction > function)
void Add(const char *format)
void PrintName(Tagged< Object > o)
Tagged< HeapObject > unchecked_code() const override
Definition frames.cc:2364
FrameSummaries Summarize() const override
Definition frames.cc:2381
int LookupExceptionHandlerInTable()
Definition frames.cc:2371
static bool CanLookupStartOfJitAllocationAt(Address inner_pointer)
void Prepare(Address stack_frame_pointer)
int FindReturnPCForTrampoline(Tagged< Code > code, int trampoline_pc) const override
Definition frames.cc:3200
void Iterate(RootVisitor *v) const override
Definition frames.cc:2360
int ComputeParametersCount() const override
Definition frames.cc:2422
Tagged< HeapObject > unchecked_code() const override
Definition frames.cc:2265
void Iterate(RootVisitor *v) const override
Definition frames.cc:2356
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
void Iterate(RootVisitor *v) const override
Definition frames.cc:319
void IterateParamsOfGenericWasmToJSWrapper(RootVisitor *v) const
void Iterate(RootVisitor *v) const override
Definition frames.cc:1971
void IterateParamsOfOptimizedWasmToJSWrapper(RootVisitor *v) const
static constexpr int kBytecodeArrayExpressionIndex
static constexpr int kRegisterFileExpressionIndex
static int RegisterStackSlotCount(int register_count)
static uint32_t GetStackSizeForAdditionalArguments(int parameters_count)
Definition frames.cc:4237
UnoptimizedFrameInfo(int parameters_count_with_receiver, int translation_height, bool is_topmost, bool pad_arguments, FrameInfoKind frame_info_kind)
Definition frames.cc:4205
Address GetExpressionAddress(int n) const override
Definition frames.cc:1465
Tagged< BytecodeArray > GetBytecodeArray() const
Definition frames.cc:3299
Tagged< Object > ReadInterpreterRegister(int register_index) const
Definition frames.cc:3307
int LookupExceptionHandlerInTable(int *data, HandlerTable::CatchPrediction *prediction) override
Definition frames.cc:3286
FrameSummaries Summarize() const override
Definition frames.cc:3316
int position() const override
Definition frames.cc:3280
static V8_INLINE Tagged_t CompressObject(Address tagged)
static V8_INLINE Address DecompressTagged(TOnHeapAddress on_heap_addr, Tagged_t raw_value)
static int GetFunctionIndex(Tagged< Tuple2 > interpreter_object, Address frame_pointer, int index)
static std::vector< WasmInterpreterStackEntry > GetInterpretedStack(Tagged< Tuple2 > interpreter_object, Address frame_pointer)
std::pair< WasmCode *, SafepointEntry > LookupCodeAndSafepoint(Isolate *isolate, Address pc)
WasmCode * LookupCode(Isolate *isolate, Address pc) const
Handle< Code > code
#define V8_EXTERNAL_CODE_SPACE_BOOL
Definition globals.h:255
#define HAS_SMI_TAG(value)
Definition globals.h:1771
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
Definition globals.h:81
#define DEBUG_BOOL
Definition globals.h:87
const JSFunctionRef function_
Handle< SharedFunctionInfo > info
WasmFrame *const frame_
ZoneList< RegExpInstruction > code_
#define FRAME_TYPE_CASE(type, class)
#define FRAME_SUMMARY_DESTR(kind, type, field, desc)
#define FRAME_SUMMARY_DISPATCH(ret, name)
Definition frames.cc:3013
#define STACK_FRAME_TYPE_LIST(V)
Definition frames.h:115
Isolate * isolate
int32_t offset
TNode< Context > context
Node * receiver_
TNode< Object > receiver
SharedFunctionInfoRef shared
bool forwarded
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
LiftoffAssembler::CacheState state
int pc_offset
int position
Definition liveedit.cc:290
base::SmallVector< int32_t, 1 > stack_slots
#define MSAN_MEMORY_IS_INITIALIZED(start, size)
Definition msan.h:37
int int32_t
Definition unicode.cc:40
unsigned short uint16_t
Definition unicode.cc:39
constexpr unsigned CountTrailingZeros(T value)
Definition bits.h:144
constexpr bool IsPowerOfTwo(T value)
Definition bits.h:187
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
T & Memory(Address addr)
Definition memory.h:18
constexpr int kAnonymousFuncIndex
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
WasmCodeManager * GetWasmCodeManager()
TypeCanonicalizer * GetTypeCanonicalizer()
constexpr WasmCodePosition kNoCodePosition
constexpr IndependentValueType kWasmI32
int GetSourcePosition(const WasmModule *module, uint32_t func_index, uint32_t byte_offset, bool is_at_number_conversion)
Signature< ValueType > FunctionSig
constexpr IndependentValueType kWasmI64
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
constexpr bool CodeKindIsOptimizedJSFunction(CodeKind kind)
Definition code-kind.h:66
constexpr int kBitsPerByte
Definition globals.h:682
const char * CodeKindToMarker(CodeKind kind, bool context_specialized)
Definition code-kind.cc:21
void PrintF(const char *format,...)
Definition utils.cc:39
constexpr int kPCOnStackSize
Definition globals.h:412
Tagged(T object) -> Tagged< T >
uint32_t ComputeUnseededHash(uint32_t key)
Definition utils.h:271
kWasmInternalFunctionIndirectPointerTag instance_data
bool IsTranslationInterpreterFrameOpcode(TranslationOpcode o)
int TranslationOpcodeOperandCount(TranslationOpcode o)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
BuiltinContinuationMode
Definition frames.h:1899
kInterpreterTrampolineOffset Tagged< HeapObject >
constexpr Register kJavaScriptCallArgCountRegister
Address Tagged_t
Definition globals.h:547
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
Tagged< T > GCSafeCast(Tagged< Object > object, const Heap *heap)
Definition casting.h:142
constexpr bool IsAnyTagged(MachineRepresentation rep)
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset Tagged< WasmInstanceObject >
constexpr int kSystemPointerSize
Definition globals.h:410
V8_INLINE PtrComprCageBase GetPtrComprCageBase()
void ShortPrint(Tagged< Object > obj, FILE *out)
Definition objects.cc:1865
const int kHeapObjectTag
Definition v8-internal.h:72
V8_EXPORT_PRIVATE FlagValues v8_flags
static uint32_t ObjectAddressForHashing(Address object)
return value
Definition map-inl.h:893
constexpr int ArgumentPaddingSlots(int argument_count)
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr int kDoubleSize
Definition globals.h:407
DirectHandle< String > GetWasmFunctionDebugName(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > instance_data, uint32_t func_index)
constexpr bool CodeKindCanDeoptimize(CodeKind kind)
Definition code-kind.h:83
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
Local< T > Handle
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
uintptr_t(*)(uintptr_t return_addr_location) ReturnAddressLocationResolver
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
ro::BitSet tagged_slots
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define arraysize(array)
Definition macros.h:67
std::vector< FrameSummary > frames
Definition frames.h:631
const char * function_name
Definition ic-stats.h:36
const char * script_name
Definition ic-stats.h:38
EmbedderRootsHandler * handler_
#define V8_LIKELY(condition)
Definition v8config.h:661
Node ** parameters_
wasm::ValueType type