32#if V8_ENABLE_WEBASSEMBLY
51class DeoptimizableCodeIterator {
53 explicit DeoptimizableCodeIterator(Isolate* isolate);
54 DeoptimizableCodeIterator(
const DeoptimizableCodeIterator&) =
delete;
55 DeoptimizableCodeIterator& operator=(
const DeoptimizableCodeIterator&) =
63 enum { kIteratingCodeSpace, kIteratingCodeLOSpace, kDone }
state_;
68DeoptimizableCodeIterator::DeoptimizableCodeIterator(Isolate* isolate)
71 isolate, isolate->is_shared_space_isolate()
75 isolate->
heap()->code_space()->GetObjectIterator(isolate->
heap())),
76 state_(kIteratingCodeSpace) {}
78Tagged<Code> DeoptimizableCodeIterator::Next() {
81 if (
object.is_null()) {
85 case kIteratingCodeSpace: {
87 isolate_->heap()->code_lo_space()->GetObjectIterator(
89 state_ = kIteratingCodeLOSpace;
92 case kIteratingCodeLOSpace:
105 Tagged<InstructionStream> istream = Cast<InstructionStream>(
object);
107 if (!istream->TryGetCode(&code, kAcquireLoad))
continue;
123 static const int NO_INPUT_INDEX = -1;
126 : deoptimizer_(deoptimizer),
128 trace_scope_(trace_scope),
129 top_offset_(frame->GetFrameSize()) {}
133 if (trace_scope_ !=
nullptr) {
134 DebugPrintOutputValue(value, debug_hint);
139 intptr_t value = obj.
ptr();
141 if (trace_scope_ !=
nullptr) {
142 DebugPrintOutputObject(obj, top_offset_, debug_hint);
151 frame_->SetFrameSlot(top_offset_,
pc);
152 DebugPrintOutputPc(
pc,
"bottommost caller's pc\n");
157 frame_->SetCallerPc(top_offset_,
pc);
158 DebugPrintOutputPc(
pc,
"caller's pc\n");
163 frame_->SetCallerFp(top_offset_, fp);
164 DebugPrintOutputValue(fp,
"caller's fp\n");
169 frame_->SetCallerConstantPool(top_offset_,
cp);
170 DebugPrintOutputValue(
cp,
"caller's constant_pool\n");
174 const char* debug_hint =
"") {
176 PushRawObject(obj, debug_hint);
177 if (trace_scope_ !=
nullptr) {
180 deoptimizer_->QueueValueForMaterialization(output_address(top_offset_), obj,
187 PushRawObject(
ReadOnlyRoots(deoptimizer_->isolate()).arguments_marker(),
189 deoptimizer_->QueueFeedbackVectorForMaterialization(
190 output_address(top_offset_), iterator);
194 int parameters_count) {
195 std::vector<TranslatedFrame::iterator> parameters;
196 parameters.reserve(parameters_count);
197 for (
int i = 0;
i < parameters_count; ++
i, ++iterator) {
198 parameters.push_back(iterator);
200 for (
auto& parameter : base::Reversed(parameters)) {
201 PushTranslatedValue(parameter,
"stack parameter");
213 frame_->SetFrameSlot(top_offset_, value);
217 Address output_address =
218 static_cast<Address
>(
frame_->GetTop()) + output_offset;
219 return output_address;
223 if (trace_scope_ !=
nullptr) {
224 PrintF(trace_scope_->file(),
226 output_address(top_offset_), top_offset_, value, debug_hint);
231#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
232 if (trace_scope_ !=
nullptr) {
233 PrintF(trace_scope_->file(),
236 output_address(top_offset_), top_offset_, value,
237 PointerAuthentication::StripPAC(value), debug_hint);
240 DebugPrintOutputValue(value, debug_hint);
245 const char* debug_hint =
"") {
246 if (trace_scope_ !=
nullptr) {
248 output_address(output_offset), output_offset);
251 Cast<Smi>(obj).value());
255 PrintF(trace_scope_->file(),
" ; %s", debug_hint);
268 Address from,
int fp_to_sp_delta,
272 raw_function != 0 ? Cast<JSFunction>(
Tagged<Object>(raw_function))
276 isolate->set_current_deoptimizer(deoptimizer);
286size_t Deoptimizer::DeleteForWasm(
Isolate* isolate) {
288 DCHECK(!AllowGarbageCollection::IsAllowed());
289 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
293 DCHECK(AllowGarbageCollection::IsAllowed());
305 int counter = jsframe_index;
306 for (
auto it = translated_values.
begin(); it != translated_values.
end();
308 if (it->kind() == TranslatedFrame::kUnoptimizedFunction ||
309 it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
311 TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
319 CHECK(frame_it != translated_values.
end());
322 CHECK_EQ(frame_it->kind(), TranslatedFrame::kUnoptimizedFunction);
334 bool safe_to_deopt_topmost_optimized_code) {
336 topmost_ = topmost_optimized_code;
337 safe_to_deopt_ = safe_to_deopt_topmost_optimized_code;
344 void VisitThread(
Isolate* isolate, ThreadLocalTop* top)
override {
345 for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
346 if (it.frame()->is_optimized_js()) {
347 Tagged<GcSafeCode> code = it.frame()->GcSafeLookupCode();
348 if (CodeKindCanDeoptimize(code->kind()) &&
349 code->marked_for_deoptimization()) {
352 if (code->is_maglevved()) {
353 MaglevSafepointEntry safepoint = MaglevSafepointTable::FindEntry(
354 isolate, code, it.frame()->pc());
355 trampoline_pc = safepoint.trampoline_pc();
357 SafepointEntry safepoint = SafepointTable::FindEntry(
358 isolate, code, it.frame()->maybe_unauthenticated_pc());
359 trampoline_pc = safepoint.trampoline_pc();
366 static_assert(SafepointEntry::kNoTrampolinePC == -1);
368 if (!it.frame()->InFastCCall()) {
369 Address new_pc = code->instruction_start() + trampoline_pc;
372 Deoptimizer::PatchToJump(
pc, new_pc);
376 Address* pc_addr = it.frame()->pc_address();
377 PointerAuthentication::ReplacePC(pc_addr, new_pc,
388 Tagged<GcSafeCode> topmost_;
396void Deoptimizer::DeoptimizeMarkedCode(
Isolate* isolate) {
400 bool safe_to_deopt_topmost_optimized_code =
false;
407 if (it.frame()->is_optimized_js()) {
411 TraceFoundActivation(isolate, function);
412 bool safe_if_deopt_triggered;
413 if (code->is_maglevved()) {
415 MaglevSafepointTable::FindEntry(isolate, code, it.frame()->pc());
419 isolate, code, it.frame()->maybe_unauthenticated_pc());
424 bool is_builtin_code = code->kind() == CodeKind::BUILTIN;
425 DCHECK(topmost_optimized_code.
is_null() || safe_if_deopt_triggered ||
427 if (topmost_optimized_code.
is_null()) {
428 topmost_optimized_code =
code;
429 safe_to_deopt_topmost_optimized_code = safe_if_deopt_triggered;
435 ActivationsFinder visitor(topmost_optimized_code,
436 safe_to_deopt_topmost_optimized_code);
438 visitor.VisitThread(isolate, isolate->thread_local_top());
442 isolate->thread_manager()->IterateArchivedThreads(&visitor);
445void Deoptimizer::DeoptimizeAll(
Isolate* isolate) {
446 RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
449 TraceDeoptAll(isolate);
450 isolate->AbortConcurrentOptimization(BlockingBehavior::kBlock);
454 DeoptimizableCodeIterator it(isolate);
456 code->SetMarkedForDeoptimization(isolate,
457 LazyDeoptimizeReason::kDebugger);
461 DeoptimizeMarkedCode(isolate);
468 Isolate* isolate = function->GetIsolate();
469 RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
472 function->ResetIfCodeFlushed(isolate);
473 if (code.is_null()) code = function->code(isolate);
479 code->SetMarkedForDeoptimization(isolate, reason);
480#ifndef V8_ENABLE_LEAPTIERING_BOOL
483 function->feedback_vector()->EvictOptimizedCodeMarkedForDeoptimization(
484 isolate, function->shared(),
"unlinking code marked for deopt");
487 DeoptimizeMarkedCode(isolate);
492void Deoptimizer::DeoptimizeAllOptimizedCodeWithFunction(
494 RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
496 TRACE_EVENT0(
"v8",
"V8.DeoptimizeAllOptimizedCodeWithFunction");
499 isolate->AbortConcurrentOptimization(BlockingBehavior::kBlock);
502 bool any_marked =
false;
504 DeoptimizableCodeIterator it(isolate);
506 if (code->Inlines(*function)) {
507 code->SetMarkedForDeoptimization(isolate,
508 LazyDeoptimizeReason::kDebugger);
514 DeoptimizeMarkedCode(isolate);
518#define DEOPTIMIZATION_HELPER_BUILTINS(V) \
519 V(Builtin::kInterpreterEnterAtBytecode, \
520 deopt_pc_offset_after_adapt_shadow_stack) \
521 V(Builtin::kInterpreterEnterAtNextBytecode, \
522 deopt_pc_offset_after_adapt_shadow_stack) \
523 V(Builtin::kContinueToCodeStubBuiltinWithResult, \
524 deopt_pc_offset_after_adapt_shadow_stack) \
525 V(Builtin::kContinueToCodeStubBuiltin, \
526 deopt_pc_offset_after_adapt_shadow_stack) \
527 V(Builtin::kContinueToJavaScriptBuiltinWithResult, \
528 deopt_pc_offset_after_adapt_shadow_stack) \
529 V(Builtin::kContinueToJavaScriptBuiltin, \
530 deopt_pc_offset_after_adapt_shadow_stack) \
531 V(Builtin::kRestartFrameTrampoline, \
532 deopt_pc_offset_after_adapt_shadow_stack) \
533 V(Builtin::kJSConstructStubGeneric, construct_stub_create_deopt_pc_offset) \
534 V(Builtin::kInterpreterPushArgsThenFastConstructFunction, \
535 construct_stub_invoke_deopt_pc_offset)
538Address Deoptimizer::EnsureValidReturnAddress(
Isolate* isolate,
542 Builtins* builtins = isolate->builtins();
544#define CHECK_BUILTIN(builtin, offset) \
545 if (builtins->code(builtin)->instruction_start() + heap->offset().value() - \
546 Deoptimizer::kAdaptShadowStackOffsetToSubtract == \
554 if (builtins->code(Builtin::kNotifyDeoptimized)->instruction_start() ==
558#if V8_ENABLE_WEBASSEMBLY
560 wasm::GetWasmCodeManager()->LookupCode(isolate, address) !=
nullptr) {
577 case DeoptimizeKind::kEager:
578 return "deopt-eager";
579 case DeoptimizeKind::kLazy:
588 deopt_exit_index_(kFixedExitSizeMarker),
591 fp_to_sp_delta_(fp_to_sp_delta),
592 deoptimizing_throw_(false),
593 catch_handler_data_(-1),
594 catch_handler_pc_offset_(-1),
595 restart_frame_index_(-1),
599 caller_frame_top_(0),
602 caller_constant_pool_(0),
603 actual_argument_count_(0),
608 if (isolate->deoptimizer_lazy_throw()) {
609 CHECK_EQ(kind, DeoptimizeKind::kLazy);
610 isolate->set_deoptimizer_lazy_throw(false);
611 deoptimizing_throw_ = true;
614 if (isolate->debug()->IsRestartFrameScheduled()) {
615 CHECK(deoptimizing_throw_);
616 restart_frame_index_ = isolate->debug()->restart_inline_frame_index();
617 CHECK_GE(restart_frame_index_, 0);
618 isolate->debug()->clear_restart_frame();
624 DCHECK(AllowGarbageCollection::IsAllowed());
628#if V8_ENABLE_WEBASSEMBLY
629 if (
v8_flags.wasm_deopt && function.is_null()) {
631 no_heap_access_during_wasm_deopt_ =
636 compiled_optimized_wasm_code_ =
code;
643 Address deopt_exit_offset = from - code->instruction_start();
650 static_cast<uint32_t
>(deopt_exit_offset -
664 code->native_module()->module()->functions[code->index()].sig;
665 int parameter_stack_slots, return_stack_slots;
666 GetWasmStackSlotsCounts(
sig, ¶meter_stack_slots, &return_stack_slots);
668 unsigned input_frame_size = fp_to_sp_delta +
681 DCHECK(IsJSFunction(function));
692 function->shared()->internal_formal_parameter_count_with_receiver());
702 deopt_data->DeoptExitStart().value();
703 int eager_deopt_count = deopt_data->EagerDeoptCount().value();
710 "lazy deopts are expected to be emitted last");
715 if (
from_ <= lazy_deopt_start) {
739#ifdef V8_ENABLE_CET_SHADOW_STACK
754#ifdef V8_ENABLE_CET_SHADOW_STACK
755 if (shadow_stack_ !=
nullptr) {
756 delete[] shadow_stack_;
757 shadow_stack_ =
nullptr;
761 DCHECK(!AllowGarbageCollection::IsAllowed());
763 delete disallow_garbage_collection_;
764 disallow_garbage_collection_ =
nullptr;
771 return Builtin::kDeoptimizationEntry_Eager;
773 return Builtin::kDeoptimizationEntry_Lazy;
781 switch (translated_frame->
kind()) {
786 int handler_index = table.LookupHandlerIndexForRange(bytecode_offset);
788 *data_out = table.GetRangeData(handler_index);
789 table.MarkHandlerUsed(handler_index);
790 return table.GetRangeHandler(handler_index);
808 PrintF(file,
"[bailout (kind: %s, reason: %s): begin. deoptimizing ",
820 "bytecode offset %d, deopt exit %d, FP to SP "
830 PrintF(file,
" ;;; deoptimize at ");
850 if (code->kind() == CodeKind::BASELINE)
return;
852 DCHECK(code->uses_deoptimization_data());
860 PrintF(scope.
file(),
"[marking dependent code ");
864 PrintF(
") (opt id %d) for deoptimization, reason: %s]\n",
865 deopt_data->OptimizationId().value(),
873 CodeDependencyChangeEvent(
883 if (!
v8_flags.trace_deopt_verbose)
return;
888 "[evicting optimized code marked for deoptimization (%s) for ",
896void Deoptimizer::TraceFoundActivation(
Isolate* isolate,
898 if (!
v8_flags.trace_deopt_verbose)
return;
900 PrintF(scope.file(),
"[deoptimizer found activation of function: ");
901 function->PrintName(scope.file());
908 if (!
v8_flags.trace_deopt_verbose)
return;
910 PrintF(scope.
file(),
"[deoptimize all code in all contexts]\n");
913#if V8_ENABLE_WEBASSEMBLY
916 std::unique_ptr<wasm::LiftoffFrameDescriptionForDeopt>>
927 wire_bytes.
begin() + function->code.offset(),
928 wire_bytes.
begin() + function->code.end_offset(),
930 wasm::WasmCompilationResult
result = ExecuteLiftoffCompilation(
932 wasm::LiftoffOptions{}
933 .set_func_index(function_index)
934 .set_deopt_info_bytecode_offset(deopt_point.
ToInt())
935 .set_deopt_location_kind(
941 wasm::UnpublishedWasmCode compiled_code =
943 wasm::WasmCodeRefScope code_ref_scope;
950 return {wasm_code, std::move(
result.liftoff_frame_descriptions)};
954FrameDescription* Deoptimizer::DoComputeWasmLiftoffFrame(
955 TranslatedFrame& frame, wasm::NativeModule* native_module,
957 std::stack<intptr_t>& shadow_stack) {
961 const bool is_bottommost = frame_index == 0;
968 auto [wasm_code, liftoff_description] = CompileWithLiftoffAndGetDeoptInfo(
969 native_module, frame.wasm_function_index(), frame.bytecode_offset(),
972 DCHECK(liftoff_description);
974 int parameter_stack_slots, return_stack_slots;
976 native_module->module()->functions[frame.wasm_function_index()].sig;
977 GetWasmStackSlotsCounts(
sig, ¶meter_stack_slots, &return_stack_slots);
980 const uint32_t output_frame_size = liftoff_description->total_frame_size;
981 const uint32_t total_output_frame_size =
986 std::ostringstream outstream;
987 outstream <<
" Liftoff stack & register state for function index "
988 << frame.wasm_function_index() <<
", frame size "
989 << output_frame_size <<
", total frame size "
990 << total_output_frame_size <<
'\n';
992 for (
const wasm::LiftoffVarState& state : liftoff_description->var_state) {
993 outstream <<
" " << index++ <<
": " << state <<
'\n';
996 PrintF(file,
"%s", outstream.str().c_str());
1000 total_output_frame_size, parameter_stack_slots,
isolate());
1011 for (
int i = 0;
i < parameter_stack_slots; ++
i) {
1019 total_output_frame_size;
1020 output_frame->SetTop(top);
1021 Address pc = wasm_code->instruction_start() + liftoff_description->pc_offset;
1029 isolate(),
pc, output_frame->GetTop()));
1030#ifdef V8_ENABLE_CET_SHADOW_STACK
1033 shadow_stack.push(
pc);
1035 shadow_stack.push(wasm_code->instruction_start() +
1036 liftoff_description->adapt_shadow_stack_pc_offset);
1042 if (is_bottommost) {
1048 new_context, old_context);
1049 }
else if (parameter_stack_slots != 0) {
1057 isolate(), previous_frame->GetPc(), new_context, old_context);
1058 previous_frame->SetPc(signed_pc);
1063 output_frame->SetFrameSlot(
1068 output_frame->SetFrameSlot(
1073 int base_offset = output_frame_size;
1076 output_frame->SetFrameSlot(
1078 wasm_trusted_instance.ptr());
1079 if (liftoff_description->trusted_instance !=
no_reg) {
1080 output_frame->SetRegister(liftoff_description->trusted_instance.code(),
1081 wasm_trusted_instance.ptr());
1085 auto liftoff_iter = liftoff_description->var_state.begin();
1086 if constexpr (
Is64()) {
1089 CHECK_EQ(liftoff_description->var_state.size(), frame.GetValueCount());
1092 bool int64_lowering_is_low =
true;
1094 for (
const TranslatedValue& value : frame) {
1095 bool skip_increase_liftoff_iter =
false;
1096 switch (liftoff_iter->loc()) {
1098 if (!
Is64() && liftoff_iter->kind() == wasm::ValueKind::kI64) {
1099 if (int64_lowering_is_low) skip_increase_liftoff_iter =
true;
1100 int64_lowering_is_low = !int64_lowering_is_low;
1104 if (liftoff_iter->is_gp_reg()) {
1106 switch (value.kind()) {
1109 reg_value =
static_cast<uint32_t
>(value.int32_value());
1112 reg_value = value.raw_literal().ptr();
1115 reg_value = value.int64_value();
1120 output_frame->SetRegister(liftoff_iter->reg().gp().code(), reg_value);
1121 }
else if (liftoff_iter->is_fp_reg()) {
1122 switch (value.kind()) {
1124 output_frame->SetDoubleRegister(liftoff_iter->reg().fp().code(),
1125 value.double_value());
1131 static_assert(std::is_same_v<
decltype(liftoff_iter->reg().fp()),
1133 output_frame->SetDoubleRegister(
1134 liftoff_iter->reg().fp().code(),
1135 Float64::FromBits(value.float_value().get_bits()));
1138 output_frame->SetSimd128Register(liftoff_iter->reg().fp().code(),
1139 value.simd_value());
1144 }
else if (!
Is64() && liftoff_iter->is_gp_reg_pair()) {
1146 switch (value.kind()) {
1149 reg_value =
static_cast<uint32_t
>(value.int32_value());
1152 reg_value = value.raw_literal().ptr();
1157 int8_t
reg = int64_lowering_is_low
1158 ? liftoff_iter->reg().low_gp().code()
1159 : liftoff_iter->reg().high_gp().code();
1160 output_frame->SetRegister(
reg, reg_value);
1161 if (int64_lowering_is_low) skip_increase_liftoff_iter =
true;
1162 int64_lowering_is_low = !int64_lowering_is_low;
1163 }
else if (!
Is64() && liftoff_iter->is_fp_reg_pair()) {
1165 Simd128 simd_value = value.simd_value();
1167 output_frame->SetDoubleRegister(
1168 liftoff_iter->reg().low_fp().code(),
1170 output_frame->SetDoubleRegister(
1171 liftoff_iter->reg().high_fp().code(),
1173 val_ptr +
sizeof(
double))));
1179#ifdef V8_TARGET_BIG_ENDIAN
1180 static constexpr int kLiftoffStackBias = 4;
1182 static constexpr int kLiftoffStackBias = 0;
1184 switch (liftoff_iter->kind()) {
1185 case wasm::ValueKind::kI32:
1188 output_frame->SetLiftoffFrameSlot32(
1189 base_offset - liftoff_iter->offset() + kLiftoffStackBias,
1190 value.int32_value_);
1192 case wasm::ValueKind::kF32:
1194 output_frame->SetLiftoffFrameSlot32(
1195 base_offset - liftoff_iter->offset() + kLiftoffStackBias,
1196 value.float_value().get_bits());
1198 case wasm::ValueKind::kI64:
1199 if constexpr (
Is64()) {
1202 output_frame->SetLiftoffFrameSlot64(
1203 base_offset - liftoff_iter->offset(), value.int64_value_);
1209 if (int64_lowering_is_low) {
1210 skip_increase_liftoff_iter =
true;
1211 output_frame->SetLiftoffFrameSlot32(
1212 base_offset - liftoff_iter->offset(), value.int32_value_);
1214 output_frame->SetLiftoffFrameSlot32(
1215 base_offset - liftoff_iter->offset() +
sizeof(int32_t),
1216 value.int32_value_);
1218 int64_lowering_is_low = !int64_lowering_is_low;
1221 case wasm::ValueKind::kS128: {
1222 int64x2 values = value.simd_value().to_i64x2();
1223 const int offset = base_offset - liftoff_iter->offset();
1224 output_frame->SetLiftoffFrameSlot64(
offset, values.val[0]);
1225 output_frame->SetLiftoffFrameSlot64(
offset +
sizeof(int64_t),
1229 case wasm::ValueKind::kF64:
1231 output_frame->SetLiftoffFrameSlot64(
1232 base_offset - liftoff_iter->offset(),
1233 value.double_value().get_bits());
1235 case wasm::ValueKind::kRef:
1236 case wasm::ValueKind::kRefNull:
1238 output_frame->SetLiftoffFrameSlotPointer(
1239 base_offset - liftoff_iter->offset(), value.raw_literal_.ptr());
1247 if (!skip_increase_liftoff_iter) {
1253 uint32_t frame_type_offset =
1255 output_frame->SetFrameSlot(frame_type_offset,
1259 wasm_trusted_instance->feedback_vectors();
1260 uint32_t feedback_offset =
1263 native_module->module(), frame.wasm_function_index());
1264 CHECK_LT(fct_feedback_index, module_feedback->length());
1265 Tagged<Object> feedback_vector = module_feedback->get(fct_feedback_index);
1266 if (
IsSmi(feedback_vector)) {
1269 "Deopt with uninitialized feedback vector for function %s [%d]\n",
1270 wasm_code->DebugName().c_str(), frame.wasm_function_index());
1278 output_frame->SetFrameSlot(feedback_offset,
1281 output_frame->SetFrameSlot(feedback_offset, feedback_vector.ptr());
1287 output_frame->SetContinuation(0);
1289 const intptr_t fp_value = top + output_frame_size;
1290 output_frame->SetFp(fp_value);
1292 output_frame->SetRegister(fp_reg.code(), fp_value);
1294#ifdef V8_COMPRESS_POINTERS
1296 isolate()->cage_base());
1299 return output_frame;
1305void Deoptimizer::DoComputeOutputFramesWasmImpl() {
1307 base::ElapsedTimer timer;
1309 wasm::WasmCode* code = compiled_optimized_wasm_code_;
1312 wasm::WasmDeoptView deopt_view(code->deopt_data());
1313 wasm::WasmDeoptEntry deopt_entry =
1320 "[bailout (kind: %s, reason: %s, type: Wasm): begin. deoptimizing "
1321 "%s, function index %d, bytecode offset %d, deopt exit %d, FP to SP "
1326 code->DebugName().c_str(), code->index(),
1327 deopt_entry.bytecode_offset.ToInt(), deopt_entry.translation_index,
1331 base::Vector<const uint8_t> off_heap_translations =
1332 deopt_view.GetTranslationsArray();
1334 DeoptTranslationIterator state_iterator(off_heap_translations,
1335 deopt_entry.translation_index);
1336 wasm::NativeModule* native_module = code->native_module();
1338 native_module->module()->functions[code->index()].sig->parameter_count());
1339 DeoptimizationLiteralProvider literals(
1340 deopt_view.BuildDeoptimizationLiteralArray());
1354 &state_iterator, {}, literals,
1367 compiled_optimized_wasm_code_->index()) {
1368 CompileWithLiftoffAndGetDeoptInfo(native_module,
1369 compiled_optimized_wasm_code_->index(),
1370 deopt_entry.bytecode_offset,
false);
1380 std::stack<intptr_t> shadow_stack;
1383 output_[
i] = DoComputeWasmLiftoffFrame(
1384 frame, native_module, wasm_trusted_instance,
i, shadow_stack);
1387#ifdef V8_ENABLE_CET_SHADOW_STACK
1390 shadow_stack_ =
new intptr_t[shadow_stack.size()];
1391 while (!shadow_stack.empty()) {
1392 shadow_stack_[shadow_stack_count_++] = shadow_stack.top();
1404 wasm::TypeFeedbackStorage& feedback =
1405 native_module->module()->type_feedback;
1408 int index = frame.wasm_function_index();
1409 auto iter = feedback.feedback_for_function.find(index);
1410 if (iter != feedback.feedback_for_function.end()) {
1411 iter->second.needs_reprocessing_after_deopt =
true;
1418 feedback.feedback_for_function[code->index()].tierup_priority = 0;
1421 ++feedback.deopt_count_for_function[code->index()]);
1425 int declared_func_index =
1427 wasm_trusted_instance->tiering_budget_array()[declared_func_index].store(
1428 v8_flags.wasm_tiering_budget, std::memory_order_relaxed);
1439 int* parameter_stack_slots,
1440 int* return_stack_slots) {
1441 class DummyResultCollector {
1443 void AddParamAt(
size_t index, LinkageLocation location) {}
1444 void AddReturnAt(
size_t index, LinkageLocation location) {}
1448#if V8_TARGET_ARCH_32_BIT
1452 zone_.emplace(&*alloc_,
"deoptimizer i32sig lowering");
1456 int untagged_slots, untagged_return_slots;
1458 parameter_stack_slots, &untagged_return_slots,
1459 return_stack_slots);
1465bool DeoptimizedMaglevvedCodeEarly(Isolate* isolate,
1468 if (!code->is_maglevved())
return false;
1469 if (function->GetRequestedOptimizationIfAny(isolate) ==
1470 CodeKind::TURBOFAN_JS) {
1476 int current_invocation_budget =
1477 function->raw_feedback_cell()->interrupt_budget() /
1478 function->shared()->GetBytecodeArray(isolate)->length();
1479 return current_invocation_budget >=
1480 v8_flags.invocation_count_for_turbofan -
1481 v8_flags.invocation_count_for_maglev_with_delay;
1492#if V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK
1497#if V8_ENABLE_WEBASSEMBLY
1500 DoComputeOutputFramesWasmImpl();
1539 auto translations = input_data->FrameTranslation();
1540 unsigned translation_index =
1545 TraceDeoptBegin(input_data->OptimizationId().value(), bytecode_offset);
1554 &state_iterator, input_data->ProtectedLiteralArray(),
1571 size_t catch_handler_frame_index =
count;
1572 for (
size_t i =
count;
i-- > 0;) {
1576 catch_handler_frame_index =
i;
1581 count = catch_handler_frame_index + 1;
1589 int frame_index = 0;
1590 size_t total_output_frame_size = 0;
1591 for (
size_t i = 0;
i <
count; ++
i, ++frame_index) {
1594 switch (translated_frame->
kind()) {
1609#if V8_ENABLE_WEBASSEMBLY
1610 case TranslatedFrame::kJSToWasmBuiltinContinuation:
1621 translated_frame, frame_index,
1626#if V8_ENABLE_WEBASSEMBLY
1627 case TranslatedFrame::kWasmInlinedIntoJS:
1628 FATAL(
"inlined wasm frames may not appear in JS deopts");
1629 case TranslatedFrame::kLiftoffFunction:
1630 FATAL(
"wasm liftoff frames may not appear in JS deopts");
1633 FATAL(
"invalid frame");
1640 isolate()->isolate_root());
1641#ifdef V8_COMPRESS_POINTERS
1643 isolate()->cage_base());
1646#ifdef V8_ENABLE_CET_SHADOW_STACK
1649 shadow_stack_ =
new intptr_t[
count + 1];
1656 shadow_stack_[shadow_stack_count_++] =
1662 shadow_stack_[shadow_stack_count_++] =
1668 for (
int i =
static_cast<int>(
count) - 1;
i > 0;
i--) {
1669 if (!
output_[
i]->HasCallerPc())
continue;
1670 shadow_stack_[shadow_stack_count_++] =
1681 DeoptimizeReason::kOSREarlyExit;
1688 : (!osr_early_exit &&
1692 if (
v8_flags.profile_guided_optimization &&
1693 function_->shared()->cached_tiering_decision() !=
1696 function_->shared()->set_cached_tiering_decision(
1699 function_->shared()->set_cached_tiering_decision(
1710 CodeKind::INTERPRETED_FUNCTION);
1711 function_->feedback_vector()->set_was_once_deoptimized();
1743 function->shared()->GetBytecodeArray(isolate), isolate);
1745 bytecode_array, deopt_exit_offset.
ToInt()));
1748 CHECK(it.CurrentBytecodeIsValidOSREntry());
1750 for (; !it.done(); it.Advance()) {
1751 const int current_offset = it.current_offset();
1755 if (current_offset == deopt_exit_offset.
ToInt())
return true;
1757 if (it.current_bytecode() != interpreter::Bytecode::kJumpLoop)
continue;
1765 const int loop_nesting_level = it.GetImmediateOperand(1);
1766 if (loop_nesting_level == 0)
return false;
1774Builtin DispatchBuiltinFor(
bool advance_bc,
bool is_restart_frame) {
1775 if (is_restart_frame)
return Builtin::kRestartFrameTrampoline;
1777 return advance_bc ? Builtin::kInterpreterEnterAtNextBytecode
1778 : Builtin::kInterpreterEnterAtBytecode;
1785 bool goto_catch_handler) {
1788 const bool is_bottommost = (0 == frame_index);
1792 const int bytecode_offset =
1795 const int parameters_count = bytecode_array->parameter_count();
1801 bool should_pad_arguments =
1805 const int locals_count = translated_frame->
height();
1807 parameters_count, locals_count, is_topmost, should_pad_arguments);
1812 std::optional<Tagged<DebugInfo>> debug_info =
1814 if (debug_info.has_value() && debug_info.value()->HasBreakInfo()) {
1816 bytecode_array = debug_info.value()->DebugBytecodeArray(
isolate());
1826 output_[frame_index] = output_frame;
1837 const bool advance_bc =
1839 !goto_catch_handler;
1842 builtins->code(DispatchBuiltinFor(advance_bc, restart_frame));
1846 std::unique_ptr<char[]> name =
1850 real_bytecode_offset);
1853 goto_catch_handler ?
" (throw)" :
"");
1858 const intptr_t top_address =
1861 output_frame->
SetTop(top_address);
1865 if (should_pad_arguments) {
1867 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
1874 " -- %d extra argument(s) already in the stack --\n",
1894 if (is_bottommost) {
1904 const intptr_t caller_fp =
1908 const intptr_t fp_value = top_address + frame_writer.
top_offset();
1909 output_frame->
SetFp(fp_value);
1919 const intptr_t caller_cp =
1932 if (goto_catch_handler) {
1947 if (is_bottommost) {
1956 frame_writer.
PushRawValue(argc,
"actual argument count\n");
1959 frame_writer.
PushRawObject(bytecode_array,
"bytecode array\n");
1962 const int raw_bytecode_offset =
1965 frame_writer.
PushRawObject(smi_bytecode_offset,
"bytecode offset\n");
1977 const int return_value_first_reg =
1980 for (
int i = 0;
i < locals_count; ++
i, ++value_iterator) {
1983 if (is_topmost && !goto_catch_handler &&
1985 i < return_value_first_reg + return_value_count) {
1986 const int return_index =
i - return_value_first_reg;
1987 if (return_index == 0) {
1989 "return value 0\n");
1994 CHECK_LE(return_value_first_reg + return_value_count, locals_count);
1998 "return value 1\n");
2006 uint32_t register_slots_written =
static_cast<uint32_t
>(locals_count);
2011 register_slots_written++;
2012 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2018 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2023 if (goto_catch_handler) {
2026 intptr_t accumulator_value =
2038 "return value 0\n");
2049 CHECK_EQ(translated_frame->
end(), value_iterator);
2053 static_cast<intptr_t
>(dispatch_builtin->instruction_start()) +
2054 isolate()->
heap()->deopt_pc_offset_after_adapt_shadow_stack().value();
2060 output_frame->
SetPc(top_most_pc);
2067 intptr_t constant_pool_value =
2068 static_cast<intptr_t
>(dispatch_builtin->constant_pool());
2073 output_frame->
SetRegister(constant_pool_reg.
code(), constant_pool_value);
2082 intptr_t context_value =
static_cast<intptr_t
>(
Smi::zero().
ptr());
2088 static_cast<intptr_t
>(
continuation->instruction_start()));
2105 const int argument_count_without_receiver = translated_frame->
height() - 1;
2106 const int formal_parameter_count_without_receiver =
2108 SBXCHECK_GE(formal_parameter_count_without_receiver, 0);
2109 const int extra_argument_count =
2110 argument_count_without_receiver - formal_parameter_count_without_receiver;
2115 formal_parameter_count_without_receiver) +
2117 const int output_frame_size =
2121 " translating inlined arguments frame => variable_size=%d\n",
2131 const intptr_t top_address =
2133 output_frame->
SetTop(top_address);
2135 output_frame->
SetPc(
output_[frame_index - 1]->GetPc());
2136 output_frame->
SetFp(
output_[frame_index - 1]->GetFp());
2137 output_[frame_index] = output_frame;
2142 for (
int i = 0;
i < padding; ++
i) {
2143 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2146 if (extra_argument_count > 0) {
2153 for (
int i = 0;
i < formal_parameter_count_without_receiver;
i++)
2169 const int parameters_count = translated_frame->
height();
2177 " translating construct create stub => variable_frame_size=%d, "
2188 output_[frame_index] = output_frame;
2192 const intptr_t top_address =
2194 output_frame->
SetTop(top_address);
2198 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2213 const intptr_t caller_pc =
output_[frame_index - 1]->
GetPc();
2217 const intptr_t caller_fp =
output_[frame_index - 1]->
GetFp();
2220 const intptr_t fp_value = top_address + frame_writer.
top_offset();
2221 output_frame->
SetFp(fp_value);
2235 frame_writer.
PushRawValue(marker,
"context (construct stub sentinel)\n");
2240 const uint32_t argc = parameters_count;
2250 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2255 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2263 CHECK_EQ(translated_frame->
end(), value_iterator);
2271 isolate_->
heap()->construct_stub_create_deopt_pc_offset().value();
2279 output_frame->
SetPc(pc_value);
2284 intptr_t constant_pool_value =
2285 static_cast<intptr_t
>(construct_stub->constant_pool());
2290 output_frame->
SetRegister(constant_pool_reg.
code(), constant_pool_value);
2299 intptr_t context_value =
static_cast<intptr_t
>(
Smi::zero().
ptr());
2308 static_cast<intptr_t
>(
continuation->instruction_start()));
2328 " translating construct invoke stub => variable_frame_size=%d, "
2339 output_[frame_index] = output_frame;
2343 const intptr_t top_address =
2345 output_frame->
SetTop(top_address);
2354 const intptr_t caller_pc =
output_[frame_index - 1]->
GetPc();
2358 const intptr_t caller_fp =
output_[frame_index - 1]->
GetFp();
2361 const intptr_t fp_value = top_address + frame_writer.
top_offset();
2362 output_frame->
SetFp(fp_value);
2374 frame_writer.
PushRawValue(marker,
"fast construct stub sentinel\n");
2381 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2386 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2394 CHECK_EQ(translated_frame->
end(), value_iterator);
2399 Builtin::kInterpreterPushArgsThenFastConstructFunction);
2402 isolate_->
heap()->construct_stub_invoke_deopt_pc_offset().value();
2410 output_frame->
SetPc(pc_value);
2415 intptr_t constant_pool_value =
2416 static_cast<intptr_t
>(construct_stub->constant_pool());
2421 output_frame->
SetRegister(constant_pool_reg.
code(), constant_pool_value);
2430 intptr_t context_value =
static_cast<intptr_t
>(
Smi::zero().
ptr());
2439 static_cast<intptr_t
>(
continuation->instruction_start()));
2461 return StackFrame::BUILTIN_CONTINUATION;
2463 return StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION;
2465 return StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION_WITH_CATCH;
2467 return StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION_WITH_CATCH;
2478 return must_handle_result ? Builtin::kContinueToCodeStubBuiltinWithResult
2479 : Builtin::kContinueToCodeStubBuiltin;
2483 return must_handle_result
2484 ? Builtin::kContinueToJavaScriptBuiltinWithResult
2485 : Builtin::kContinueToJavaScriptBuiltin;
2490#if V8_ENABLE_WEBASSEMBLY
2492 std::optional<wasm::ValueKind> wasm_call_return_kind) {
2493 if (wasm_call_return_kind) {
2494 switch (wasm_call_return_kind.value()) {
2506 Float32(*
reinterpret_cast<float*
>(
2513 case wasm::kRefNull:
2523 ReadOnlyRoots(
isolate()).undefined_value());
2590 bool is_js_to_wasm_builtin_continuation =
false;
2591#if V8_ENABLE_WEBASSEMBLY
2592 is_js_to_wasm_builtin_continuation =
2593 translated_frame->
kind() == TranslatedFrame::kJSToWasmBuiltinContinuation;
2594 if (is_js_to_wasm_builtin_continuation) {
2601 translated_frame->wasm_call_return_kind());
2615 const bool is_bottommost = (0 == frame_index);
2618 const int parameters_count = translated_frame->
height();
2621 continuation_descriptor, config,
2625 const unsigned output_frame_size_above_fp =
2630 bool has_argc =
false;
2631 const int register_parameter_count =
2633 for (
int i = 0;
i < register_parameter_count; ++
i) {
2647 CHECK_EQ(BuiltinContinuationModeIsJavaScript(mode), has_argc);
2651 " translating BuiltinContinuation to %s,"
2652 " => register_param_count=%d,"
2653 " stack_param_count=%d, frame_size=%d\n",
2660 output_[frame_index] = output_frame;
2665 const intptr_t top_address =
2668 output_frame->
SetTop(top_address);
2673 const intptr_t maybe_function = value_iterator->GetRawValue().ptr();
2678 for (
int i = 0;
i < padding; ++
i) {
2679 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2686 ++
i, ++value_iterator) {
2690 if (is_js_to_wasm_builtin_continuation) {
2692 "return result on lazy deopt\n");
2696 roots.the_hole_value(),
2697 "placeholder for return result on lazy deopt\n");
2704 roots.the_hole_value(),
2705 "placeholder for return result on lazy deopt\n");
2714 "placeholder for exception on lazy deopt\n");
2717 intptr_t accumulator_value =
2720 "exception (from accumulator)\n");
2730 std::vector<TranslatedFrame::iterator> register_values;
2732 register_values.resize(total_registers, {value_iterator});
2734 for (
int i = 0;
i < register_parameter_count; ++
i, ++value_iterator) {
2736 register_values[
code] = value_iterator;
2745 const intptr_t value = context.ptr();
2751 if (is_bottommost) {
2758 const intptr_t caller_fp =
2762 const intptr_t fp_value = top_address + frame_writer.
top_offset();
2763 output_frame->
SetFp(fp_value);
2769 const intptr_t caller_cp =
2776 const intptr_t marker =
2779 "context (builtin continuation sentinel)\n");
2781 if (BuiltinContinuationModeIsJavaScript(mode)) {
2782 frame_writer.
PushRawValue(maybe_function,
"JSFunction\n");
2790 "frame height at deoptimization\n");
2795 "builtin JavaScript context\n");
2801 const int allocatable_register_count =
2803 for (
int i = 0;
i < allocatable_register_count; ++
i) {
2807 if (BuiltinContinuationModeIsJavaScript(mode) &&
2811 "tagged argument count %s (will be untagged by continuation)\n",
2814 SNPrintF(str,
"builtin register argument %s\n",
2824 const int padding_slot_count =
2826 allocatable_register_count);
2827 for (
int i = 0;
i < padding_slot_count; ++
i) {
2828 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2833 frame_writer.
PushRawObject(roots.the_hole_value(),
"padding\n");
2841 !is_js_to_wasm_builtin_continuation) {
2844 "callback result\n");
2846 frame_writer.
PushRawObject(roots.undefined_value(),
"callback result\n");
2850 CHECK_EQ(result_iterator, value_iterator);
2858 intptr_t context_value =
static_cast<intptr_t
>(
Smi::zero().
ptr());
2873 !is_js_to_wasm_builtin_continuation));
2875 static_cast<intptr_t
>(continue_to_builtin->instruction_start()) +
2876 isolate()->
heap()->deopt_pc_offset_after_adapt_shadow_stack().value();
2882 output_frame->
SetPc(top_most_pc);
2890 static_cast<intptr_t
>(
continuation->instruction_start()));
2895 if (
v8_flags.deopt_every_n_times > 0) {
2907 static_cast<intptr_t
>(materialization.output_slot_address_),
2913 *(
reinterpret_cast<Address*
>(materialization.output_slot_address_)) =
2919 DCHECK(IsJSFunction(*closure));
2922 CHECK(IsFeedbackVector(feedback_vector));
2923 *(
reinterpret_cast<Address*
>(fbv_materialization.output_slot_address_)) =
2924 feedback_vector.
ptr();
2933 PrintF(file,
"Feedback updated from deoptimization at ");
2976 code->instruction_start() + deopt_data->DeoptExitStart().value();
2977 int eager_deopt_count = deopt_data->EagerDeoptCount().value();
2984 "lazy deopts are expected to be emitted last");
2985 if (return_pc <= lazy_deopt_start) {
3011 if (
v8_flags.enable_slow_asserts) {
3022 unsigned outgoing_size = 0;
3032 int parameter_slots = code->parameter_count();
3041 uint32_t last_node_id = 0;
3050 if (info->pc() >=
pc)
break;
3052 int script_offset =
static_cast<int>(info->data());
3055 int inlining_id =
static_cast<int>(it.rinfo()->
data());
3058 last_deopt_id =
static_cast<int>(info->data());
3062 last_node_id =
static_cast<uint32_t
>(info->data());
3065 return DeoptInfo(last_position, last_reason, last_node_id, last_deopt_id);
#define DISALLOW_GARBAGE_COLLECTION(name)
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
#define SBXCHECK_GE(lhs, rhs)
constexpr T * begin() const
static int PaddingSlotCount(int register_count)
uint32_t frame_size_in_bytes_above_fp() const
bool frame_has_result_stack_slot() const
uint32_t frame_size_in_bytes() const
uint32_t translated_stack_parameter_count() const
static BuiltinContinuationFrameInfo Precise(int translation_height, const CallInterfaceDescriptor &continuation_descriptor, const RegisterConfiguration *register_config, bool is_topmost, DeoptimizeKind deopt_kind, BuiltinContinuationMode continuation_mode)
uint32_t stack_parameter_count() const
V8_EXPORT_PRIVATE Tagged< Code > code(Builtin builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static Builtin GetBuiltinFromBytecodeOffset(BytecodeOffset)
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
constexpr bool IsNone() const
constexpr int ToInt() const
Register GetRegisterParameter(int index) const
StackArgumentOrder GetStackArgumentOrder() const
int GetRegisterParameterCount() const
MachineType GetParameterType(int index) const
static constexpr int kConstantPoolOffset
static constexpr int kCallerPCOffset
static constexpr int kFixedFrameSizeAboveFp
uint32_t frame_size_in_bytes() const
static ConstructStubFrameInfo Precise(int translation_height, bool is_topmost)
uint32_t frame_size_in_bytes_without_fixed() const
unsigned ComputeInputFrameSize() const
unsigned deopt_exit_index_
bool is_restart_frame() const
intptr_t caller_constant_pool_
void DoComputeInlinedExtraArguments(TranslatedFrame *translated_frame, int frame_index)
void DoComputeConstructInvokeStubFrame(TranslatedFrame *translated_frame, int frame_index)
void DoComputeUnoptimizedFrame(TranslatedFrame *translated_frame, int frame_index, bool goto_catch_handler)
DirectHandle< JSFunction > function() const
void QueueFeedbackVectorForMaterialization(Address output_address, const TranslatedFrame::iterator &iterator)
static int output_offset()
friend class FrameDescription
std::vector< ValueToMaterialize > feedback_vector_to_materialize_
static V8_EXPORT_PRIVATE const int kEagerDeoptExitSize
static void TraceDeoptAll(Isolate *isolate)
void TraceDeoptBegin(int optimization_id, BytecodeOffset bytecode_offset)
DirectHandle< Code > compiled_code() const
Tagged< Code > compiled_code_
static V8_EXPORT_PRIVATE const int kAdaptShadowStackOffsetToSubtract
bool tracing_enabled() const
void TraceDeoptEnd(double deopt_duration)
static bool DeoptExitIsInsideOsrLoop(Isolate *isolate, Tagged< JSFunction > function, BytecodeOffset deopt_exit_offset, BytecodeOffset osr_offset)
void DoComputeOutputFrames()
unsigned ComputeInputFrameAboveFpFixedSize() const
static Builtin TrampolineForBuiltinContinuation(BuiltinContinuationMode mode, bool must_handle_result)
CodeTracer::Scope * verbose_trace_scope() const
CodeTracer::Scope *const trace_scope_
static V8_EXPORT_PRIVATE Builtin GetDeoptimizationEntry(DeoptimizeKind kind)
void DeleteFrameDescriptions()
static unsigned ComputeIncomingArgumentSize(Tagged< Code > code)
static void TraceEvictFromOptimizedCodeCache(Isolate *isolate, Tagged< SharedFunctionInfo > sfi, const char *reason)
static constexpr unsigned kFixedExitSizeMarker
static V8_EXPORT_PRIVATE const int kLazyDeoptExitSize
Isolate * isolate() const
TranslatedState translated_state_
int actual_argument_count_
void MaterializeHeapObjects()
static void TraceMarkForDeoptimization(Isolate *isolate, Tagged< Code > code, LazyDeoptimizeReason reason)
DeoptimizeKind deopt_kind_
CodeTracer::Scope * trace_scope() const
void DoComputeConstructCreateStubFrame(TranslatedFrame *translated_frame, int frame_index)
FrameDescription ** output_
int catch_handler_pc_offset_
FrameDescription * input_
bool verbose_tracing_enabled() const
BytecodeOffset bytecode_offset_in_outermost_frame_
void DoComputeBuiltinContinuation(TranslatedFrame *translated_frame, int frame_index, BuiltinContinuationMode mode)
DeoptInfo GetDeoptInfo() const
void QueueValueForMaterialization(Address output_address, Tagged< Object > obj, const TranslatedFrame::iterator &iterator)
std::vector< ValueToMaterialize > values_to_materialize_
static const char * MessageFor(DeoptimizeKind kind)
Tagged< JSFunction > function_
intptr_t caller_frame_top_
uint32_t frame_size_in_bytes() const
uint32_t frame_size_in_bytes_without_fixed() const
static FastConstructStubFrameInfo Precise(bool is_topmost)
uint64_t * get_bits_address()
unsigned GetLastArgumentSlotOffset(bool pad_arguments=true)
intptr_t GetCallerPc() const
void SetFp(intptr_t frame_pointer)
uint32_t GetFrameSize() const
void SetTop(intptr_t top)
Address GetFramePointerAddress()
intptr_t GetConstantPool() const
intptr_t GetContinuation() const
void SetConstantPool(intptr_t constant_pool)
Float64 GetDoubleRegister(unsigned n) const
static FrameDescription * Create(uint32_t frame_size, int parameter_count, Isolate *isolate)
intptr_t GetFrameSlot(unsigned offset)
void SetRegister(unsigned n, intptr_t value)
intptr_t GetRegister(unsigned n) const
RegisterValues * GetRegisterValues()
void SetContinuation(intptr_t pc)
void PushBottommostCallerPc(intptr_t pc)
void PushValue(intptr_t value)
void PushRawValue(intptr_t value, const char *debug_hint)
void PushStackJSArguments(TranslatedFrame::iterator &iterator, int parameters_count)
void PushCallerConstantPool(intptr_t cp)
void DebugPrintOutputPc(intptr_t value, const char *debug_hint="")
void PushFeedbackVectorForMaterialization(const TranslatedFrame::iterator &iterator)
void PushApprovedCallerPc(intptr_t pc)
Deoptimizer * deoptimizer_
void PushRawObject(Tagged< Object > obj, const char *debug_hint)
FrameWriter(Deoptimizer *deoptimizer, FrameDescription *frame, CodeTracer::Scope *trace_scope)
void PushTranslatedValue(const TranslatedFrame::iterator &iterator, const char *debug_hint="")
Address output_address(unsigned output_offset)
unsigned top_offset() const
FrameDescription * frame_
void DebugPrintOutputObject(Tagged< Object > obj, unsigned output_offset, const char *debug_hint="")
void DebugPrintOutputValue(intptr_t value, const char *debug_hint="")
void PushCallerFp(intptr_t fp)
FrameDescription * frame()
CodeTracer::Scope *const trace_scope_
static const int kNoHandlerFound
static constexpr int kHeaderSize
V8_EXPORT_PRIVATE Tagged< Code > FindCodeForInnerPointer(Address inner_pointer)
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
MaterializedObjectStore * materialized_object_store() const
StackGuard * stack_guard()
Tagged< JSFunction > function() const override
static Register fp_register()
static Register constant_pool_pointer_register()
static Register context_register()
static constexpr MachineType Int32()
uint8_t num_extra_spill_slots() const
static V8_INLINE Address SignAndCheckPC(Isolate *isolate, Address pc, Address sp)
static V8_INLINE Address StripPAC(Address pc)
static V8_INLINE Address MoveSignedPC(Isolate *isolate, Address pc, Address new_sp, Address old_sp)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
int num_general_registers() const
int num_allocatable_general_registers() const
int GetAllocatableGeneralCode(int index) const
void SetRegister(unsigned n, intptr_t value)
static constexpr Register from_code(int code)
static constexpr int ModeMask(Mode mode)
bool has_deoptimization_index() const
static BlockAccessScope MaybeBlockAccess()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static SourcePosition Unknown()
static constexpr int32_t TypeToMarker(Type type)
bool is_optimized_js() const
static constexpr int kArgCOffset
V8_INLINE constexpr StorageType ptr() const
V8_INLINE constexpr bool is_null() const
BytecodeOffset bytecode_offset() const
int return_value_offset() const
void Add(const TranslatedValue &value)
Tagged< BytecodeArray > raw_bytecode_array() const
int formal_parameter_count() const
@ kJavaScriptBuiltinContinuationWithCatch
@ kJavaScriptBuiltinContinuation
int return_value_count() const
Tagged< SharedFunctionInfo > raw_shared_info() const
std::vector< TranslatedFrame > & frames()
void Init(Isolate *isolate, Address input_frame_pointer, Address stack_frame_pointer, DeoptTranslationIterator *iterator, Tagged< ProtectedDeoptimizationLiteralArray > protected_literal_array, const DeoptimizationLiteralProvider &literal_array, RegisterValues *registers, FILE *trace_file, int parameter_count, int actual_argument_count)
void VerifyMaterializedObjects()
std::vector< TranslatedFrame >::iterator iterator
void Prepare(Address stack_frame_pointer)
static TranslatedValue NewDouble(TranslatedState *container, Float64 value)
static TranslatedValue NewInt64ToBigInt(TranslatedState *container, int64_t value)
static TranslatedValue NewInt32(TranslatedState *container, int32_t value)
static TranslatedValue NewFloat(TranslatedState *container, Float32 value)
static TranslatedValue NewTagged(TranslatedState *container, Tagged< Object > literal)
static constexpr int kFrameTypeOffset
uint32_t register_stack_slot_count() const
static UnoptimizedFrameInfo Precise(int parameters_count_with_receiver, int translation_height, bool is_topmost, bool pad_arguments)
uint32_t frame_size_in_bytes() const
uint32_t frame_size_in_bytes_without_fixed() const
static constexpr int kInstanceDataOffset
static constexpr int kFeedbackVectorOffset
static bool IsValidOffset(Handle< BytecodeArray > bytecode_array, int offset)
std::vector< WasmCode * > PublishCode(base::Vector< UnpublishedWasmCode > unpublished_code)
CompilationState * compilation_state() const
base::Vector< const uint8_t > wire_bytes() const
V8_WARN_UNUSED_RESULT UnpublishedWasmCode AddCompiledCode(WasmCompilationResult &)
WasmCode * LookupCode(Isolate *isolate, Address pc) const
const WasmDeoptData & GetDeoptData() const
#define PROFILE(the_isolate, Call)
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
const JSFunctionRef function_
Handle< SharedFunctionInfo > info
#define DEOPTIMIZATION_HELPER_BUILTINS(V)
enum v8::internal::@1270::DeoptimizableCodeIterator::@67 state_
#define CHECK_BUILTIN(builtin, offset)
std::unique_ptr< SafepointScope > safepoint_scope_
std::unique_ptr< ObjectIterator > object_iterator_
ZoneVector< RpoNumber > & result
MovableLabel continuation
Comparator::Output * output_
base::SmallVector< int32_t, 1 > stack_slots
static V ReadUnalignedValue(Address p)
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
constexpr Vector< T > VectorOf(T *start, size_t size)
LockGuard< Mutex > MutexGuard
FloatWithBits< 32 > Float32
WordWithBits< 128 > Simd128
constexpr DoubleRegister kFpReturnRegisters[]
WasmCodeManager * GetWasmCodeManager()
const wasm::FunctionSig * GetI32Sig(Zone *zone, const wasm::FunctionSig *sig)
WasmEngine * GetWasmEngine()
int declared_function_index(const WasmModule *module, int func_index)
void IterateSignatureImpl(const SigType *sig, bool extra_callable_param, ResultCollector &locations, int *untagged_parameter_slots, int *total_parameter_slots, int *untagged_return_slots, int *total_return_slots)
Signature< ValueType > FunctionSig
constexpr Register no_reg
constexpr Register kRootRegister
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
DwVfpRegister DoubleRegister
void PrintF(const char *format,...)
constexpr int kPCOnStackSize
Tagged(T object) -> Tagged< T >
char const * DeoptimizeReasonToString(DeoptimizeReason reason)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
@ JAVASCRIPT_HANDLE_EXCEPTION
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
kStaticElementsTemplateOffset kInstancePropertiesTemplateOffset Tagged< FixedArray >
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kFPOnStackSize
constexpr bool IsAnyTagged(MachineRepresentation rep)
constexpr int kJSArgcReceiverSlots
constexpr int kSystemPointerSize
constexpr Register kReturnRegister1
constexpr int kStackLimitSlackForDeoptimizationInBytes
constexpr uint32_t kZapValue
constexpr Register kReturnRegister0
void ShortPrint(Tagged< Object > obj, FILE *out)
constexpr DeoptimizeKind kLastDeoptimizeKind
constexpr Register kContextRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
constexpr Register kPtrComprCageBaseRegister
constexpr bool kAllCodeObjectsLiveInTrustedSpace
constexpr int ArgumentPaddingSlots(int argument_count)
static constexpr Address kNullAddress
JSArrayBuffer::IsDetachableBit is_shared
constexpr int kNoDeoptimizationId
constexpr Register kJavaScriptCallDispatchHandleRegister
constexpr bool CodeKindCanDeoptimize(CodeKind kind)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define DCHECK_LE(v1, v2)
#define CHECK_GE(lhs, rhs)
#define CHECK_GT(lhs, rhs)
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define CHECK_WITH_MSG(condition, message)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
const DeoptimizeReason deopt_reason
static CompilationEnv ForModule(const NativeModule *native_module)
const WasmModule *const module
int deopt_exit_start_offset
uint32_t translation_array_size
#define TRACE_EVENT0(category_group, name)