33#if V8_ENABLE_WEBASSEMBLY
45#define __ ACCESS_MASM(masm)
48 int formal_parameter_count,
Address address) {
61enum class ArgumentsElementType {
66void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
68 ArgumentsElementType element_type) {
76 if (element_type == ArgumentsElementType::kHandle) {
86void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
99 FrameScope scope(masm, StackFrame::CONSTRUCT);
116 Generate_PushArguments(masm, rbx, rax, rcx, ArgumentsElementType::kRaw);
118 __ PushRoot(RootIndex::kTheHoleValue);
133 __ DropArguments(rbx, rcx);
137 __ bind(&stack_overflow);
139 FrameScope scope(masm, StackFrame::INTERNAL);
140 __ CallRuntime(Runtime::kThrowStackOverflow);
156void Generate_CallToAdaptShadowStackForDeopt(MacroAssembler* masm,
158#ifdef V8_ENABLE_CET_SHADOW_STACK
160 Label post_adapt_shadow_stack;
162 const auto saved_pc_offset = masm->pc_offset();
164 Builtin::kAdaptShadowStackForDeopt)));
166 masm->pc_offset() - saved_pc_offset);
167 if (add_jump)
__ bind(&post_adapt_shadow_stack);
172void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
183 __ EnterFrame(StackFrame::CONSTRUCT);
184 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
190 __ PushRoot(RootIndex::kTheHoleValue);
201 const TaggedRegister shared_function_info(rbx);
202 __ LoadTaggedField(shared_function_info,
203 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
205 FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset));
206 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(rbx);
213 __ CallBuiltin(Builtin::kFastNewObject);
217 __ bind(¬_create_implicit_receiver);
218 __ LoadRoot(rax, RootIndex::kTheHoleValue);
228 __ bind(&post_instantiation_deopt_entry);
253 Label stack_overflow;
254 __ StackOverflowCheck(rax, &stack_overflow);
264 Generate_PushArguments(masm, rbx, rax, rcx, ArgumentsElementType::kRaw);
275 Label use_receiver, do_throw, leave_and_return, check_result;
280 __ JumpIfNotRoot(rax, RootIndex::kUndefinedValue, &check_result,
285 __ bind(&use_receiver);
287 __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw,
Label::kNear);
289 __ bind(&leave_and_return);
292 __ LeaveFrame(StackFrame::CONSTRUCT);
294 __ DropArguments(rbx, rcx);
298 __ bind(&check_result);
302 __ JumpIfJSAnyIsNotPrimitive(rax, rcx, &leave_and_return,
Label::kNear);
303 __ jmp(&use_receiver);
308 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
312 __ bind(&stack_overflow);
315 __ CallRuntime(Runtime::kThrowStackOverflow);
322 Generate_CallToAdaptShadowStackForDeopt(masm,
false);
324 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
329void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
330 Generate_JSBuiltinsConstructStubHelper(masm);
333void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
334 FrameScope scope(masm, StackFrame::INTERNAL);
336 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
351 Label
invoke, handler_entry, exit;
352 Label not_outermost_js, not_outermost_js_2;
355 NoRootArrayScope uninitialized_root_register(masm);
377#ifdef V8_TARGET_OS_WIN
383#ifdef V8_TARGET_OS_WIN
385 __ AllocateStackSpace(EntryFrameConstants::kXMMRegistersBlockSize);
396 static_assert(EntryFrameConstants::kCalleeSaveXMMRegisters == 10);
397 static_assert(EntryFrameConstants::kXMMRegistersBlockSize ==
399 EntryFrameConstants::kCalleeSaveXMMRegisters);
406#ifdef V8_COMPRESS_POINTERS
409 IsolateData::cage_base_offset());
415 IsolateAddressId::kCEntryFPAddress, masm->isolate());
420#ifdef V8_TARGET_OS_WIN
423 EntryFrameConstants::kXMMRegistersBlockSize);
428 Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
429 __ Push(c_entry_fp_operand);
436 __ Move(c_entry_fp_operand, 0);
438 Operand fast_c_call_fp_operand =
439 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP);
440 Operand fast_c_call_pc_operand =
441 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC);
442 __ Push(fast_c_call_fp_operand);
443 __ Move(fast_c_call_fp_operand, 0);
445 __ Push(fast_c_call_pc_operand);
446 __ Move(fast_c_call_pc_operand, 0);
451 IsolateAddressId::kContextAddress, masm->isolate());
458 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
459 __ Load(rax, js_entry_sp);
467 __ bind(¬_outermost_js);
474 __ BindExceptionHandler(&handler_entry);
478 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
483 IsolateAddressId::kExceptionAddress, masm->isolate());
485 __ LoadRoot(rax, RootIndex::kException);
490 __ PushStackHandler();
494 __ CallBuiltin(entry_trampoline);
497 __ PopStackHandler();
506 __ bind(¬_outermost_js_2);
510 Operand fast_c_call_pc_operand =
511 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC);
512 __ Pop(fast_c_call_pc_operand);
514 Operand fast_c_call_fp_operand =
515 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP);
516 __ Pop(fast_c_call_fp_operand);
518 Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
519 __ Pop(c_entry_fp_operand);
523#ifdef V8_TARGET_OS_WIN
535 __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
539#ifdef V8_TARGET_OS_WIN
557void Builtins::Generate_JSEntry(MacroAssembler* masm) {
558 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
561void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
562 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
563 Builtin::kJSConstructEntryTrampoline);
566void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
567 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
568 Builtin::kRunMicrotasksTrampoline);
615 FrameScope scope(masm, StackFrame::INTERNAL);
619 IsolateAddressId::kContextAddress, masm->isolate());
620 __ movq(rsi, masm->ExternalReferenceAsOperand(context_address));
625#ifdef V8_TARGET_OS_WIN
651 Label enough_stack_space, stack_overflow;
655 __ bind(&stack_overflow);
656 __ CallRuntime(Runtime::kThrowStackOverflow);
660 __ bind(&enough_stack_space);
667 Generate_PushArguments(masm, rbx, rax, rcx, ArgumentsElementType::kHandle);
674 __ CallBuiltin(builtin);
684void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
688void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
692void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
695 __ TailCallBuiltin(Builtin::kRunMicrotasks);
698static void AssertCodeIsBaselineAllowClobber(MacroAssembler* masm,
699 Register code, Register scratch) {
702 __ DecodeField<Code::KindField>(scratch);
703 __ cmpl(scratch, Immediate(
static_cast<int>(CodeKind::BASELINE)));
710 return AssertCodeIsBaselineAllowClobber(masm, code, scratch);
713static void CheckSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
717 Label* is_bytecode) {
718#if V8_STATIC_ROOTS_BOOL
719 __ IsObjectTypeFast(data, CODE_TYPE, scratch);
721 __ CmpObjectType(data, CODE_TYPE, scratch);
728 __ bind(¬_baseline);
733#if V8_STATIC_ROOTS_BOOL
735 __ CompareInstanceTypeWithUniqueCompressedMap(scratch, INTERPRETER_DATA_TYPE);
738 __ CmpInstanceType(scratch, INTERPRETER_DATA_TYPE);
744 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
745 Label* is_baseline, Label* is_unavailable) {
750 __ LoadTrustedPointerField(
751 data,
FieldOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset),
755 __ IsObjectType(data, INTERPRETER_DATA_TYPE, scratch1);
758 CheckSharedFunctionInfoBytecodeOrBaseline(masm, data, scratch1, is_baseline,
762 __ LoadProtectedPointerField(
763 bytecode,
FieldOperand(data, InterpreterData::kBytecodeArrayOffset));
766 __ IsObjectType(bytecode, BYTECODE_ARRAY_TYPE, scratch1);
771void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
780 FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
782 __ Move(
object, rdx);
783 __ RecordWriteField(
object, JSGeneratorObject::kInputOrDebugPosOffset, rax,
787 __ AssertGeneratorObject(rdx);
790 __ LoadTaggedField(rdi,
792 __ LoadTaggedField(rsi,
FieldOperand(rdi, JSFunction::kContextOffset));
795 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
796 Label stepping_prepared;
797 ExternalReference debug_hook =
798 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
799 Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
800 __ cmpb(debug_hook_operand, Immediate(0));
804 ExternalReference debug_suspended_generator =
805 ExternalReference::debug_suspended_generator_address(masm->isolate());
806 Operand debug_suspended_generator_operand =
807 masm->ExternalReferenceAsOperand(debug_suspended_generator);
808 __ cmpq(rdx, debug_suspended_generator_operand);
809 __ j(
equal, &prepare_step_in_suspended_generator);
810 __ bind(&stepping_prepared);
814 Label stack_overflow;
830 __ PopReturnAddressTo(return_address);
834#if V8_ENABLE_LEAPTIERING
837 __ movl(r15,
FieldOperand(rdi, JSFunction::kDispatchHandleOffset));
838 __ LoadEntrypointAndParameterCountFromJSDispatchTable(rcx, argc, r15);
840 __ LoadTaggedField(argc,
841 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
843 argc, SharedFunctionInfo::kFormalParameterCountOffset));
848 Label push_arguments, done_loop, loop;
850#if V8_ENABLE_LEAPTIERING
864 __ bind(&push_arguments);
867 FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
870 __ leal(index, Operand(argc, -1));
882 __ PushTaggedField(
FieldOperand(rdx, JSGeneratorObject::kReceiverOffset),
888 Label is_baseline, is_unavailable, ok;
891 scratch,
FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
897 __ bind(&is_unavailable);
898 __ Abort(AbortReason::kMissingBytecodeArray);
900 __ bind(&is_baseline);
901 __ IsObjectType(scratch, CODE_TYPE, scratch);
909 __ PushReturnAddressFrom(return_address);
914#if V8_ENABLE_LEAPTIERING
919 __ JumpJSFunction(rdi);
923 __ bind(&prepare_step_in_if_stepping);
925 FrameScope scope(masm, StackFrame::INTERNAL);
929 __ PushRoot(RootIndex::kTheHoleValue);
930 __ CallRuntime(Runtime::kDebugOnFunctionCall);
932 __ LoadTaggedField(rdi,
935 __ jmp(&stepping_prepared);
937 __ bind(&prepare_step_in_suspended_generator);
939 FrameScope scope(masm, StackFrame::INTERNAL);
941 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
943 __ LoadTaggedField(rdi,
946 __ jmp(&stepping_prepared);
948 __ bind(&stack_overflow);
950 FrameScope scope(masm, StackFrame::INTERNAL);
951 __ CallRuntime(Runtime::kThrowStackOverflow);
963 __ movzxwl(params_size,
964 FieldOperand(params_size, BytecodeArray::kParameterSizeOffset));
966 Register actual_params_size = scratch2;
968 __ movq(actual_params_size,
973 __ cmpq(params_size, actual_params_size);
974 __ cmovq(
kLessThan, params_size, actual_params_size);
980 __ DropArguments(params_size, scratch2);
990 Register bytecode_array,
991 Register bytecode_offset,
992 Register bytecode, Register scratch1,
993 Register scratch2, Label* if_return) {
995 Register bytecode_size_table = scratch1;
1001 Register original_bytecode_offset = scratch2;
1003 bytecode_size_table, original_bytecode_offset));
1005 __ movq(original_bytecode_offset, bytecode_offset);
1007 __ Move(bytecode_size_table,
1008 ExternalReference::bytecode_size_table_address());
1011 Label process_bytecode, extra_wide;
1012 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
1013 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
1014 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1016 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1017 __ cmpb(bytecode, Immediate(0x3));
1022 __ incl(bytecode_offset);
1023 __ testb(bytecode, Immediate(0x1));
1024 __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset,
times_1, 0));
1028 __ addq(bytecode_size_table,
1032 __ bind(&extra_wide);
1034 __ addq(bytecode_size_table,
1037 __ bind(&process_bytecode);
1040#define JUMP_IF_EQUAL(NAME) \
1042 Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1043 __ j(equal, if_return, Label::kFar);
1049 Label
end, not_jump_loop;
1051 Immediate(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1055 __ movq(bytecode_offset, original_bytecode_offset);
1058 __ bind(¬_jump_loop);
1061 Operand(bytecode_size_table, bytecode,
times_1, 0));
1069void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
1070 __ movw(
FieldOperand(sfi, SharedFunctionInfo::kAgeOffset), Immediate(0));
1073void ResetJSFunctionAge(MacroAssembler* masm, Register js_function) {
1076 shared_function_info,
1077 FieldOperand(js_function, JSFunction::kSharedFunctionInfoOffset));
1078 ResetSharedFunctionInfoAge(masm, shared_function_info);
1081void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
1082 Register feedback_vector, Register scratch) {
1084 FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
1085 __ andb(scratch, Immediate(~FeedbackVector::OsrUrgencyBits::kMask));
1086 __ movb(
FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset),
1107 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1114 shared_function_info,
1115 FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1116 ResetSharedFunctionInfoAge(masm, shared_function_info);
1120 Label is_baseline, compile_lazy;
1125#ifdef V8_ENABLE_SANDBOX
1135 __ LoadParameterCountFromJSDispatchTable(r8, dispatch_handle);
1137 BytecodeArray::kParameterSizeOffset));
1138 __ SbxCheck(
equal, AbortReason::kJSSignatureMismatch);
1141 Label push_stack_frame;
1143 __ LoadFeedbackVector(feedback_vector, closure, &push_stack_frame,
1147#ifndef V8_ENABLE_LEAPTIERING
1150 Label flags_need_processing;
1151 __ CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1152 feedback_vector, CodeKind::INTERPRETED_FUNCTION, &flags_need_processing);
1159 FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1171 __ bind(&push_stack_frame);
1189 __ Push(feedback_vector);
1192 Label stack_overflow;
1196 BytecodeArray::kFrameSizeOffset));
1209 __ bind(&loop_header);
1213 __ bind(&loop_check);
1220 Label no_incoming_new_target_or_generator_register;
1224 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1226 __ j(zero, &no_incoming_new_target_or_generator_register,
Label::kNear);
1228 __ bind(&no_incoming_new_target_or_generator_register);
1232 Label stack_check_interrupt, after_stack_check_interrupt;
1234 __ j(
below, &stack_check_interrupt);
1235 __ bind(&after_stack_check_interrupt);
1242 __ bind(&do_dispatch);
1245 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1257 __ RecordComment(
"--- InterpreterEntryPC point ---");
1259 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1266 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1277 __ SmiUntagUnsigned(
1288 __ jmp(&do_dispatch);
1290 __ bind(&do_return);
1295 __ bind(&stack_check_interrupt);
1301 __ CallRuntime(Runtime::kStackGuard);
1315 __ jmp(&after_stack_check_interrupt);
1317 __ bind(&compile_lazy);
1318 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1322#ifndef V8_ENABLE_LEAPTIERING
1323 __ bind(&flags_need_processing);
1324 __ OptimizeCodeOrTailCallOptimizedCodeSlot(feedback_vector, closure,
1328 __ bind(&is_baseline);
1330#ifndef V8_ENABLE_LEAPTIERING
1332 TaggedRegister feedback_cell(feedback_vector);
1333 __ LoadTaggedField(feedback_cell,
1334 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1335 __ LoadTaggedField(feedback_vector,
1336 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
1338 Label install_baseline_code;
1341 __ IsObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, rcx);
1345 __ CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1346 feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1356 __ ReplaceClosureCodeWithOptimizedCode(
1361 __ bind(&install_baseline_code);
1364 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1368 __ bind(&stack_overflow);
1369 __ CallRuntime(Runtime::kThrowStackOverflow);
1374 Register start_address,
1378 __ movq(scratch, num_args);
1380 __ leaq(start_address,
1384 __ PushArray(start_address, num_args, scratch,
1400 Label stack_overflow;
1413 __ StackOverflowCheck(rcx, &stack_overflow);
1423 __ PushRoot(RootIndex::kUndefinedValue);
1437 __ TailCallBuiltin(Builtin::kCallWithSpread);
1443 __ bind(&stack_overflow);
1445 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1464 Label stack_overflow;
1467 __ StackOverflowCheck(rax, &stack_overflow);
1483 __ Push(Immediate(0));
1492 __ AssertUndefinedOrAllocationSite(rbx);
1498 __ AssertFunction(rdi);
1500 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1503 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1507 __ TailCallBuiltin(Builtin::kConstruct);
1511 __ bind(&stack_overflow);
1513 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1521 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1527 Label stack_overflow;
1530 switch (which_frame) {
1543 __ StackOverflowCheck(rax, &stack_overflow);
1556 __ PushArray(rcx, argc_without_receiver, r8);
1559 __ Push(Immediate(0));
1564 __ TailCallBuiltin(Builtin::kConstruct);
1567 __ bind(&stack_overflow);
1569 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1577void NewImplicitReceiver(MacroAssembler* masm) {
1598 __ CallBuiltin(Builtin::kFastNewObject);
1600 __ movq(implicit_receiver, rax);
1605 __ SmiUntagUnsigned(rax);
1608 __ movq(Operand(rsp, 0 ), implicit_receiver);
1620void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1621 MacroAssembler* masm) {
1630 __ AssertFunction(rdi);
1633 Label non_constructor;
1636 Immediate(Map::Bits1::IsConstructorBit::kMask));
1637 __ j(zero, &non_constructor);
1640 Label stack_overflow;
1641 __ StackOverflowCheck(rax, &stack_overflow);
1645 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1648 __ PushRoot(RootIndex::kTheHoleValue);
1655 __ PushRoot(RootIndex::kTheHoleValue);
1660 __ LoadTaggedField(shared_function_info,
1661 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1662 __ testl(
FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset),
1663 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1667 Label not_create_implicit_receiver;
1669 FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset));
1676 NewImplicitReceiver(masm);
1677 __ bind(¬_create_implicit_receiver);
1693 __ bind(&deopt_entry);
1698 Label use_receiver, do_throw, leave_and_return, check_result;
1703 __ JumpIfNotRoot(rax, RootIndex::kUndefinedValue, &check_result,
1708 __ bind(&use_receiver);
1711 __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw,
Label::kNear);
1713 __ bind(&leave_and_return);
1714 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1718 __ bind(&check_result);
1722 __ JumpIfJSAnyIsNotPrimitive(rax, rcx, &leave_and_return,
Label::kNear);
1723 __ jmp(&use_receiver);
1727 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1731 __ bind(&builtin_call);
1734 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1739 __ bind(&non_constructor);
1740 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1743 __ bind(&stack_overflow);
1744 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1748 Generate_CallToAdaptShadowStackForDeopt(masm,
false);
1750 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1752 __ jmp(&deopt_entry);
1758 Label builtin_trampoline, trampoline_loaded;
1760 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1768 const Register shared_function_info(rbx);
1769 __ LoadTaggedField(shared_function_info,
1770 FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1772 __ LoadTrustedPointerField(
1775 SharedFunctionInfo::kTrustedFunctionDataOffset),
1779 __ LoadProtectedPointerField(
1780 rbx,
FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1784 __ bind(&builtin_trampoline);
1787 __ ExternalReferenceAsOperand(
1789 address_of_interpreter_entry_trampoline_instruction_start(
1793 __ bind(&trampoline_loaded);
1794 __ addq(rbx, Immediate(interpreter_entry_return_pc_offset.value()));
1800 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1813 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1817 __ SmiUntagUnsigned(
1842void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1843 Generate_CallToAdaptShadowStackForDeopt(masm,
true);
1844 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
1850 __ SmiUntagUnsigned(
1854 Label enter_bytecode, function_entry_bytecode;
1858 __ j(
equal, &function_entry_bytecode);
1870 __ bind(&enter_bytecode);
1878 __ bind(&function_entry_bytecode);
1885 __ jmp(&enter_bytecode);
1888 __ bind(&if_return);
1889 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1892void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1893 Generate_CallToAdaptShadowStackForDeopt(masm,
true);
1894 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
1901void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1914 Register closure = descriptor.GetRegisterParameter(
1915 BaselineOutOfLinePrologueDescriptor::kClosure);
1917 __ LoadTaggedField(feedback_cell,
1918 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1919 __ LoadTaggedField(feedback_vector,
1920 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
1923#ifndef V8_ENABLE_LEAPTIERING
1925 Label flags_need_processing;
1926 __ CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1927 feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1934 FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1938 __ PopReturnAddressTo(return_address);
1942 __ EnterFrame(StackFrame::BASELINE);
1944 __ Push(descriptor.GetRegisterParameter(
1945 BaselineOutOfLinePrologueDescriptor::kCalleeContext));
1947 Register callee_js_function = descriptor.GetRegisterParameter(
1948 BaselineOutOfLinePrologueDescriptor::kClosure);
1951 ResetJSFunctionAge(masm, callee_js_function);
1952 __ Push(callee_js_function);
1953 __ Push(descriptor.GetRegisterParameter(
1954 BaselineOutOfLinePrologueDescriptor::
1955 kJavaScriptCallArgCount));
1959 Register bytecode_array = descriptor.GetRegisterParameter(
1960 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1961 __ Push(bytecode_array);
1962 __ Push(feedback_cell);
1963 __ Push(feedback_vector);
1967 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallNewTarget);
1969 Label call_stack_guard;
1970 Register frame_size = descriptor.GetRegisterParameter(
1971 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1987 __ j(
below, &call_stack_guard);
1991 __ PushReturnAddressFrom(return_address);
1996#ifndef V8_ENABLE_LEAPTIERING
1997 __ bind(&flags_need_processing);
2005 __ OptimizeCodeOrTailCallOptimizedCodeSlot(feedback_vector, closure,
2011 __ bind(&call_stack_guard);
2017 __ PushReturnAddressFrom(return_address);
2018 FrameScope inner_frame_scope(masm, StackFrame::INTERNAL);
2021#ifdef V8_ENABLE_LEAPTIERING
2023 static_assert(kJSDispatchHandleShift > 0);
2026 __ SmiTag(frame_size);
2027 __ Push(frame_size);
2028 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2029#ifdef V8_ENABLE_LEAPTIERING
2042void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
2062 __ LeaveFrame(StackFrame::BASELINE);
2065 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
2069void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2070 bool javascript_builtin,
2072 Generate_CallToAdaptShadowStackForDeopt(masm,
true);
2073 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
2078 int allocatable_register_count = config->num_allocatable_general_registers();
2080 if (javascript_builtin) {
2087 Operand(rsp, config->num_allocatable_general_registers() *
2093 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
2094 int code = config->GetAllocatableGeneralCode(
i);
2100 if (with_result && javascript_builtin) {
2112 const int offsetToPC =
2115 __ popq(Operand(rsp, offsetToPC));
2128void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2129 Generate_ContinueToBuiltinHelper(masm,
false,
false);
2132void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2133 MacroAssembler* masm) {
2134 Generate_ContinueToBuiltinHelper(masm,
false,
true);
2137void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2138 Generate_ContinueToBuiltinHelper(masm,
true,
false);
2141void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2142 MacroAssembler* masm) {
2143 Generate_ContinueToBuiltinHelper(masm,
true,
true);
2146void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2149 FrameScope scope(masm, StackFrame::INTERNAL);
2150 __ CallRuntime(Runtime::kNotifyDeoptimized);
2160void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2173 Label no_arg_array, no_this_arg;
2174 StackArgumentsAccessor
args(rax);
2175 __ LoadRoot(rdx, RootIndex::kUndefinedValue);
2185 __ bind(&no_arg_array);
2187 __ bind(&no_this_arg);
2188 __ DropArgumentsAndPushNewReceiver(rax, rdx, rcx);
2204 __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments,
Label::kNear);
2205 __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments,
Label::kNear);
2208 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2213 __ bind(&no_arguments);
2221void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2233 StackArgumentsAccessor
args(rax);
2234 __ movq(rdi,
args.GetReceiverOperand());
2238 __ PopReturnAddressTo(rbx);
2246 __ PushRoot(RootIndex::kUndefinedValue);
2253 __ PushReturnAddressFrom(rbx);
2262void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2277 StackArgumentsAccessor
args(rax);
2278 __ LoadRoot(rdi, RootIndex::kUndefinedValue);
2290 __ DropArgumentsAndPushNewReceiver(rax, rdx, rcx);
2305 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2308void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2324 StackArgumentsAccessor
args(rax);
2325 __ LoadRoot(rdi, RootIndex::kUndefinedValue);
2338 __ DropArgumentsAndPushNewReceiver(
2339 rax, masm->RootAsOperand(RootIndex::kUndefinedValue), rcx);
2359 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2368void Generate_AllocateSpaceAndShiftExistingArguments(
2369 MacroAssembler* masm, Register count, Register argc_in_out,
2370 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2375 Register old_rsp = pointer_to_new_space_out;
2377 __ movq(old_rsp, rsp);
2380 __ AllocateStackSpace(new_space);
2387 __ Move(current, 0);
2394 __ cmpq(current, copy_count);
2400 pointer_to_new_space_out,
2405 __ addl(argc_in_out, count);
2426 __ AssertNotSmi(rbx);
2428 __ LoadMap(map, rbx);
2429 __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
2431 __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
2437 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2442 Label stack_overflow;
2443 __ StackOverflowCheck(rcx, &stack_overflow,
2452 Generate_AllocateSpaceAndShiftExistingArguments(masm, rcx, rax, r8, r9, r12);
2456 Register src = rbx, dest = r8, num = rcx, current = r9;
2457 __ Move(current, 0);
2458 Label done,
push, loop;
2460 __ cmpl(current, num);
2465 __ CompareRoot(value, RootIndex::kTheHoleValue);
2467 __ LoadRoot(value, RootIndex::kUndefinedValue);
2476 __ TailCallBuiltin(target_builtin);
2478 __ bind(&stack_overflow);
2479 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2484 CallOrConstructMode mode,
2495 Label new_target_constructor, new_target_not_constructor;
2497 __ LoadMap(rbx, rdx);
2499 Immediate(Map::Bits1::IsConstructorBit::kMask));
2501 __ bind(&new_target_not_constructor);
2504 __ EnterFrame(StackFrame::INTERNAL);
2506 __ CallRuntime(Runtime::kThrowNotConstructor);
2508 __ bind(&new_target_constructor);
2511 Label stack_done, stack_overflow;
2535 Generate_AllocateSpaceAndShiftExistingArguments(masm, r8, rax, r9, r12,
2548 Register src = rcx, dest = r9, num = r8;
2559 __ bind(&stack_done);
2561 __ TailCallBuiltin(target_builtin);
2563 __ bind(&stack_overflow);
2564 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2575 StackArgumentsAccessor
args(rax);
2576 __ AssertCallableFunction(rdi);
2578 __ LoadTaggedField(rdx,
2579 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2589 __ LoadTaggedField(rsi,
FieldOperand(rdi, JSFunction::kContextOffset));
2593 Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2594 SharedFunctionInfo::IsStrictBit::kMask));
2606 __ LoadGlobalProxy(rcx);
2608 Label convert_to_object, convert_receiver;
2609 __ movq(rcx,
args.GetReceiverOperand());
2610 __ JumpIfSmi(rcx, &convert_to_object,
2612 __ JumpIfJSAnyIsNotPrimitive(rcx, rbx, &done_convert,
2615 Label convert_global_proxy;
2616 __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
2618 __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
2620 __ bind(&convert_global_proxy);
2623 __ LoadGlobalProxy(rcx);
2625 __ jmp(&convert_receiver);
2627 __ bind(&convert_to_object);
2632 FrameScope scope(masm, StackFrame::INTERNAL);
2638 __ CallBuiltin(Builtin::kToObject);
2643 __ SmiUntagUnsigned(rax);
2646 rdx,
FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2647 __ bind(&convert_receiver);
2649 __ movq(
args.GetReceiverOperand(), rcx);
2651 __ bind(&done_convert);
2660#ifdef V8_ENABLE_LEAPTIERING
2664 rbx,
FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2671void Generate_PushBoundArguments(MacroAssembler* masm) {
2679 Label no_bound_arguments;
2680 __ LoadTaggedField(rcx,
2681 FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2682 __ SmiUntagFieldUnsigned(rbx,
2685 __ j(zero, &no_bound_arguments);
2710 __ EnterFrame(StackFrame::INTERNAL);
2711 __ CallRuntime(Runtime::kThrowStackOverflow);
2724 rcx,
FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2725 __ SmiUntagFieldUnsigned(
2745 __ bind(&no_bound_arguments);
2756 __ AssertBoundFunction(rdi);
2759 StackArgumentsAccessor
args(rax);
2760 __ LoadTaggedField(rbx,
FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2761 __ movq(
args.GetReceiverOperand(), rbx);
2764 Generate_PushBoundArguments(masm);
2768 rdi,
FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2784 StackArgumentsAccessor
args(argc);
2786 Label non_callable, class_constructor;
2787 __ JumpIfSmi(target, &non_callable);
2788 __ LoadMap(map, target);
2793 __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2794 __ TailCallBuiltin(Builtin::kCallBoundFunction,
equal);
2798 Immediate(Map::Bits1::IsCallableBit::kMask));
2802 __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2803 __ TailCallBuiltin(Builtin::kCallProxy,
equal);
2807 __ cmpw(instance_type, Immediate(JS_WRAPPED_FUNCTION_TYPE));
2808 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
equal);
2812 __ cmpw(instance_type, Immediate(JS_CLASS_CONSTRUCTOR_TYPE));
2813 __ j(
equal, &class_constructor);
2819 __ movq(
args.GetReceiverOperand(), target);
2821 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2826 __ bind(&non_callable);
2828 FrameScope scope(masm, StackFrame::INTERNAL);
2830 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2835 __ bind(&class_constructor);
2837 FrameScope frame(masm, StackFrame::INTERNAL);
2839 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2845void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2851 __ AssertConstructor(rdi);
2852 __ AssertFunction(rdi);
2856 __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2859 const TaggedRegister shared_function_info(rcx);
2860 __ LoadTaggedField(shared_function_info,
2861 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2862 __ testl(
FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset),
2863 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2864 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub,
not_zero);
2866 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2870void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2876 __ AssertConstructor(rdi);
2877 __ AssertBoundFunction(rdi);
2880 Generate_PushBoundArguments(masm);
2888 rdx,
FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2894 rdi,
FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2895 __ TailCallBuiltin(Builtin::kConstruct);
2899void Builtins::Generate_Construct(MacroAssembler* masm) {
2912 StackArgumentsAccessor
args(argc);
2915 Label non_constructor;
2916 __ JumpIfSmi(target, &non_constructor);
2919 __ LoadMap(map, target);
2921 Immediate(Map::Bits1::IsConstructorBit::kMask));
2922 __ j(zero, &non_constructor);
2925 __ CmpInstanceTypeRange(map, instance_type, FIRST_JS_FUNCTION_TYPE,
2926 LAST_JS_FUNCTION_TYPE);
2927 __ TailCallBuiltin(Builtin::kConstructFunction,
below_equal);
2931 __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2932 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
equal);
2935 __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2936 __ TailCallBuiltin(Builtin::kConstructProxy,
equal);
2941 __ movq(
args.GetReceiverOperand(), target);
2943 __ LoadNativeContextSlot(target,
2944 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2950 __ bind(&non_constructor);
2951 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2956void Generate_OSREntry(MacroAssembler* masm, Register entry_address) {
2962 __ jmp(entry_address,
true);
2965enum class OsrSourceTier {
2971void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
2972 Register maybe_target_code,
2973 Register expected_param_count) {
2974 Label jump_to_optimized_code;
2980 __ testq(maybe_target_code, maybe_target_code);
2985 FrameScope scope(masm, StackFrame::INTERNAL);
2987 __ Push(expected_param_count);
2988 __ CallRuntime(Runtime::kCompileOptimizedOSR);
2990 __ Pop(expected_param_count);
2998 __ bind(&jump_to_optimized_code);
3000 if (source == OsrSourceTier::kMaglev) {
3015 __ ExternalReferenceAsOperand(
3021 FrameScope scope(masm, StackFrame::INTERNAL);
3023 __ Push(maybe_target_code);
3024 __ Push(expected_param_count);
3025 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
3026 __ Pop(expected_param_count);
3027 __ Pop(maybe_target_code);
3033 if (source == OsrSourceTier::kInterpreter) {
3041 __ movl(scratch,
FieldOperand(maybe_target_code, Code::kOsrOffsetOffset));
3043 __ SbxCheck(Condition::not_equal, AbortReason::kExpectedOsrCode);
3048 FieldOperand(maybe_target_code, Code::kParameterCountOffset));
3050 __ cmpq(scratch, expected_param_count);
3051 __ SbxCheck(Condition::equal, AbortReason::kOsrUnexpectedStackSize);
3053 __ LoadProtectedPointerField(
3055 Code::kDeoptimizationDataOrInterpreterDataOffset));
3063 __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code,
3067 __ addq(maybe_target_code, scratch);
3069 Generate_OSREntry(masm, maybe_target_code);
3074void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3075 using D = OnStackReplacementDescriptor;
3076 static_assert(D::kParameterCount == 2);
3077 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
3078 D::MaybeTargetCodeRegister(),
3079 D::ExpectedParameterCountRegister());
3082void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
3083 using D = OnStackReplacementDescriptor;
3084 static_assert(D::kParameterCount == 2);
3087 OnStackReplacement(masm, OsrSourceTier::kBaseline,
3088 D::MaybeTargetCodeRegister(),
3089 D::ExpectedParameterCountRegister());
3092#ifdef V8_ENABLE_MAGLEV
3096 bool save_new_target) {
3102 FrameScope scope(masm, StackFrame::INTERNAL);
3104 if (save_new_target) {
3106 __ AssertSmiOrHeapObjectInMainCompressionCage(
3112 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
3113 if (save_new_target) {
3124void Generate_RestoreFrameDescriptionRegisters(MacroAssembler* masm,
3125 Register frame_description) {
3129 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
3130 int code = config->GetAllocatableSimd128Code(
i);
3132 int src_offset = code *
kSimd128Size + simd128_regs_offset;
3133 __ movdqu(xmm_reg, Operand(frame_description, src_offset));
3147#ifdef V8_ENABLE_CET_SHADOW_STACK
3184constexpr int kAdaptShadowStackDispatchFirstEntryOffset = 1;
3188constexpr Register kAdaptShadowStackCountRegister =
r11;
3190void Builtins::Generate_AdaptShadowStackForDeopt(MacroAssembler* masm) {
3191 Register count_reg = kAdaptShadowStackCountRegister;
3198 CHECK_EQ(masm->pc_offset(), kAdaptShadowStackDispatchFirstEntryOffset);
3203 __ pushq(count_reg);
3206 __ PrepareCallCFunction(2);
3208 AllowExternalCallThatCantCauseGC scope(masm);
3212 __ CallCFunction(ExternalReference::ensure_valid_return_address(), 2);
3217 __ cmpl(count_reg, Immediate(0));
3225 __ movb(
__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
3230 Generate_RestoreFrameDescriptionRegisters(masm, rbx);
3235#if V8_ENABLE_WEBASSEMBLY
3238int SaveWasmParams(MacroAssembler* masm) {
3244 "frame size mismatch");
3250 "frame size mismatch");
3262void RestoreWasmParams(MacroAssembler* masm,
int offset) {
3282void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3283 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3293 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3296 Label allocate_vector, done;
3297 __ JumpIfSmi(vector, &allocate_vector);
3301 __ Push(calling_pc);
3304 __ bind(&allocate_vector);
3319 __ set_has_frame(
true);
3320 __ Push(calling_pc);
3321 int offset = SaveWasmParams(masm);
3325 __ SmiTag(func_index);
3326 __ Push(func_index);
3331 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3334 RestoreWasmParams(masm,
offset);
3342void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3349 HardAbortScope hard_abort(masm);
3350 FrameScope scope(masm, StackFrame::INTERNAL);
3352 int offset = SaveWasmParams(masm);
3360 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3366 RestoreWasmParams(masm,
offset);
3372 WasmTrustedInstanceData::kJumpTableStartOffset)));
3379void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3380 HardAbortScope hard_abort(masm);
3382 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3391 constexpr int kFpStackSize =
3393 __ AllocateStackSpace(kFpStackSize);
3394 int offset = kFpStackSize;
3404 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3411 __ addq(rsp, Immediate(kFpStackSize));
3423void SwitchStackState(MacroAssembler* masm, Register stack,
3426#if V8_ENABLE_SANDBOX
3436void FillJumpBuffer(MacroAssembler* masm, Register stack, Label*
pc) {
3446void LoadJumpBuffer(MacroAssembler* masm, Register stack,
bool load_pc,
3457void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3460 MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3461 __ Move(GCScanSlotPlace, 0);
3463 LoadJumpBuffer(masm, target_stack,
false, expected_state);
3467void SwitchStacks(MacroAssembler* masm, Register old_stack,
bool return_switch,
3468 const std::initializer_list<Register> keep) {
3469 using ER = ExternalReference;
3470 for (
auto reg : keep) {
3478 __ PrepareCallCFunction(2);
3480 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3482 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3487void ReloadParentStack(MacroAssembler* masm, Register promise,
3488 Register return_value, Register context, Register tmp1,
3491 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3499 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3501 SwitchStacks(masm, active_stack,
true,
3502 {promise, return_value,
context, parent});
3508void GetContextFromImplicitArg(MacroAssembler* masm, Register data) {
3515 __ LoadTaggedField(data,
3516 FieldOperand(data, WasmImportData::kNativeContextOffset));
3520 data,
FieldOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3524void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3526 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3528 suspender,
FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3529 __ CompareRoot(suspender, RootIndex::kUndefinedValue);
3530 __ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
3533void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3535 __ movq(
MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset),
3537 __ movq(
MemOperand(rbp, StackSwitchFrameConstants::kResultArrayOffset),
3541void SwitchToAllocatedStack(MacroAssembler* masm, Register wasm_instance,
3542 Register wrapper_buffer, Register original_fp,
3543 Register new_wrapper_buffer, Register scratch,
3545 ResetStackSwitchFrameStackSlots(masm);
3546 Register parent_stack = new_wrapper_buffer;
3547 __ LoadRootRelative(parent_stack, IsolateData::active_stack_offset());
3549 FillJumpBuffer(masm, parent_stack, suspend);
3550 SwitchStacks(masm, parent_stack,
false,
3554 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3557 __ movq(original_fp, rbp);
3566 __ EnterFrame(StackFrame::STACK_SWITCH);
3569 JSToWasmWrapperFrameConstants::kWrapperBufferSize;
3570 __ AllocateStackSpace(stack_space);
3571 __ movq(new_wrapper_buffer, rsp);
3575 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3576 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3579 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3581 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount),
3587 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3591 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray),
3595void SwitchBackAndReturnPromise(MacroAssembler* masm, Register tmp1,
3597 Label* return_promise) {
3601 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3603 Register promise = desc.GetRegisterParameter(0);
3604 Register return_value = desc.GetRegisterParameter(1);
3607 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3609 promise,
FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3613 MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset));
3615 ReloadParentStack(masm, promise, return_value,
kContextRegister, tmp1, tmp2);
3616 RestoreParentSuspender(masm, tmp1);
3619 __ Move(
MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3622 __ CallBuiltin(Builtin::kFulfillPromise);
3626 __ bind(return_promise);
3629void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3630 Label* return_promise) {
3636 __ leaq(rsp,
MemOperand(rbp, StackSwitchFrameConstants::kLastSpillOffset));
3639 Register thread_in_wasm_flag_addr = r8;
3641 thread_in_wasm_flag_addr,
3643 __ movl(
MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3644 thread_in_wasm_flag_addr =
no_reg;
3648 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3649 Register promise = desc.GetRegisterParameter(0);
3650 Register reason = desc.GetRegisterParameter(1);
3651 Register debug_event = desc.GetRegisterParameter(2);
3653 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3655 promise,
FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3657 MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset));
3661 RestoreParentSuspender(masm, r8);
3663 __ Move(
MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3666 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3667 __ CallBuiltin(Builtin::kRejectPromise);
3672 __ jmp(return_promise);
3674 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3677void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
3679 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3680 : StackFrame::JS_TO_WASM);
3682 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3687 MemOperand(rbp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3691 Register original_fp = stack_switch ? r9 : rbp;
3692 Register new_wrapper_buffer = stack_switch ? rbx : wrapper_buffer;
3696 original_fp, new_wrapper_buffer, rax, &suspend);
3699 __ movq(
MemOperand(rbp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset),
3700 new_wrapper_buffer);
3702 __ movq(
MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset),
3705 __ movq(result_array,
3707 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3708 __ movq(
MemOperand(rbp, StackSwitchFrameConstants::kResultArrayOffset),
3717 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferSize));
3719 __ subq(rsp, result_size);
3723 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart),
3728 __ movq(params_start,
3730 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3734 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3736 __ LoadWasmCodePointer(
3739 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3744 int stack_params_offset =
3748 __ leaq(last_stack_param,
MemOperand(params_start, stack_params_offset));
3751 __ bind(&loop_start);
3753 Label finish_stack_params;
3754 __ cmpq(last_stack_param, params_end);
3760 __ jmp(&loop_start);
3762 __ bind(&finish_stack_params);
3764 int next_offset = 0;
3778 DCHECK_EQ(next_offset, stack_params_offset);
3780 Register thread_in_wasm_flag_addr = r12;
3782 thread_in_wasm_flag_addr,
3784 __ movl(
MemOperand(thread_in_wasm_flag_addr, 0), Immediate(1));
3786 __ Move(
MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3795 __ CallWasmCodePointerNoSignatureCheck(call_target);
3798 thread_in_wasm_flag_addr,
3800 __ movl(
MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3801 thread_in_wasm_flag_addr =
no_reg;
3803 wrapper_buffer = rcx;
3808 __ movq(wrapper_buffer,
3809 MemOperand(rbp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3813 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1),
3817 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2),
3821 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1),
3825 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2),
3834 MemOperand(rbp, StackSwitchFrameConstants::kResultArrayOffset));
3836 MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset));
3840 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3842 MemOperand(rbp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3844 GetContextFromImplicitArg(masm, rax);
3845 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
3847 Label return_promise;
3849 SwitchBackAndReturnPromise(masm, r8, rdi, mode, &return_promise);
3854 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
3855 : StackFrame::JS_TO_WASM);
3861 GenerateExceptionHandlingLandingPad(masm, &return_promise);
3866void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
3870void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
3874void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
3878void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3885 __ subq(rsp, Immediate(required_stack_space));
3899 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3902void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3905 Immediate(WasmFrameConstants::kProtectedInstructionReturnAddressOffset));
3907#ifdef V8_ENABLE_CET_SHADOW_STACK
3914 Label push_dummy_on_shadow_stack;
3915 __ call(&push_dummy_on_shadow_stack);
3917 __ bind(&push_dummy_on_shadow_stack);
3921 __ TailCallBuiltin(Builtin::kWasmTrapHandlerThrowTrap);
3924void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3926 __ EnterFrame(StackFrame::STACK_SWITCH);
3930 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3933 ResetStackSwitchFrameStackSlots(masm);
3940 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3941 FillJumpBuffer(masm, stack, &resume);
3947 __ LoadExternalPointerField(
3949 FieldOperand(suspender, WasmSuspenderObject::kStackOffset),
3958 __ cmpq(suspender_stack, stack);
3970 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3973 parent,
FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3974 __ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), parent);
3981 SwitchStacks(masm, stack,
false, {caller, suspender});
3984 FieldOperand(suspender, WasmSuspenderObject::kPromiseOffset));
3986 MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3987 __ Move(GCScanSlotPlace, 0);
3992 __ LeaveFrame(StackFrame::STACK_SWITCH);
4001void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
4002 __ EnterFrame(StackFrame::STACK_SWITCH);
4005 __ decq(param_count);
4008 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
4011 ResetStackSwitchFrameStackSlots(masm);
4027 FieldOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
4032 suspender,
FieldOperand(resume_data, WasmResumeData::kSuspenderOffset));
4041 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
4042 FillJumpBuffer(masm, active_stack, &suspend);
4052 DCHECK(slot_address == rbx || slot_address == r8);
4053 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
4054 __ StoreTaggedField(
4055 FieldOperand(suspender, WasmSuspenderObject::kParentOffset),
4057 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
4059 __ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
4062 __ LoadExternalPointerField(
4063 target_stack,
FieldOperand(suspender, WasmSuspenderObject::kStackOffset),
4066 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
4068 SwitchStacks(masm, active_stack,
false, {target_stack});
4075 __ Move(
MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
4082 __ LeaveFrame(StackFrame::STACK_SWITCH);
4086 __ CallRuntime(Runtime::kThrow);
4094 __ LeaveFrame(StackFrame::STACK_SWITCH);
4100void Builtins::Generate_WasmResume(MacroAssembler* masm) {
4104void Builtins::Generate_WasmReject(MacroAssembler* masm) {
4108void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
4109 MemOperand OSRTargetSlot(rbp, -wasm::kOSRTargetOffset);
4111 __ Move(OSRTargetSlot, 0);
4116static constexpr Register kOldSPRegister = r12;
4118void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm,
4119 int r12_stack_slot_index) {
4120 using ER = ExternalReference;
4128 __ Move(kOldSPRegister, 0);
4135 ER on_central_stack_flag = ER::Create(
4136 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4138 Label do_not_need_to_switch;
4139 __ cmpb(
__ ExternalReferenceAsOperand(on_central_stack_flag), Immediate(0));
4144 __ movq(kOldSPRegister, rsp);
4146 static constexpr Register argc_input = rax;
4151 __ pushq(argc_input);
4155 __ PrepareCallCFunction(2);
4156 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4160 __ popq(argc_input);
4164 __ subq(central_stack_sp, Immediate(kReturnAddressSlotOffset));
4165 __ movq(rsp, central_stack_sp);
4168 __ AlignStackPointer();
4170#ifdef V8_TARGET_OS_WIN
4181 __ bind(&do_not_need_to_switch);
4184void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm,
4185 int r12_stack_slot_index) {
4186 using ER = ExternalReference;
4188 Label no_stack_change;
4189 __ cmpq(kOldSPRegister, Immediate(0));
4190 __ j(
equal, &no_stack_change);
4191 __ movq(rsp, kOldSPRegister);
4199 __ PrepareCallCFunction(1);
4200 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4207 __ bind(&no_stack_change);
4219 ArgvMode argv_mode,
bool builtin_exit_frame,
4220 bool switch_to_central_stack) {
4221 CHECK(result_size == 1 || result_size == 2);
4223 using ER = ExternalReference;
4234 const int kSwitchToTheCentralStackSlots = switch_to_central_stack ? 1 : 0;
4235#ifdef V8_TARGET_OS_WIN
4239 static constexpr int kMaxRegisterResultSize = 1;
4240 const int kReservedStackSlots = kSwitchToTheCentralStackSlots +
4241 (result_size <= kMaxRegisterResultSize ? 0 : result_size);
4245 static constexpr int kMaxRegisterResultSize = 2;
4246 const int kReservedStackSlots = kSwitchToTheCentralStackSlots;
4247 CHECK_LE(result_size, kMaxRegisterResultSize);
4249#if V8_ENABLE_WEBASSEMBLY
4250 const int kR12SpillSlot = kReservedStackSlots - 1;
4254 kReservedStackSlots,
4255 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT, rbx);
4260 static constexpr Register kArgvRegister = r15;
4264 __ leaq(kArgvRegister,
4274#if V8_ENABLE_WEBASSEMBLY
4275 if (switch_to_central_stack) {
4276 SwitchToTheCentralStackIfNeeded(masm, kR12SpillSlot);
4282 __ CheckStackAlignment();
4287 if (result_size <= kMaxRegisterResultSize) {
4294#ifdef V8_TARGET_OS_WIN
4308#ifdef V8_TARGET_OS_WIN
4309 if (result_size > kMaxRegisterResultSize) {
4322 Label exception_returned;
4326 __ j(
equal, &exception_returned);
4328#if V8_ENABLE_WEBASSEMBLY
4329 if (switch_to_central_stack) {
4330 SwitchFromTheCentralStackIfNeeded(masm, kR12SpillSlot);
4339 ER exception_address =
4340 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4342 masm->ExternalReferenceAsOperand(exception_address));
4348 __ LeaveExitFrame();
4351 __ PopReturnAddressTo(rcx);
4352 __ leaq(rsp, Operand(kArgvRegister, kReceiverOnStackSize));
4353 __ PushReturnAddressFrom(rcx);
4358 __ bind(&exception_returned);
4360 ER pending_handler_context_address = ER::Create(
4361 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4362 ER pending_handler_entrypoint_address = ER::Create(
4363 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4364 ER pending_handler_fp_address =
4365 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4366 ER pending_handler_sp_address =
4367 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4371 ER find_handler = ER::Create(Runtime::kUnwindAndFindExceptionHandler);
4377 __ PrepareCallCFunction(3);
4381#ifdef V8_ENABLE_CET_SHADOW_STACK
4383 ER num_frames_above_pending_handler_address = ER::Create(
4384 IsolateAddressId::kNumFramesAbovePendingHandlerAddress, masm->isolate());
4385 __ movq(rcx, masm->ExternalReferenceAsOperand(
4386 num_frames_above_pending_handler_address));
4392 masm->ExternalReferenceAsOperand(pending_handler_context_address));
4393 __ movq(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
4394 __ movq(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
4405 ER c_entry_fp_address =
4406 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4407 Operand c_entry_fp_operand =
4408 masm->ExternalReferenceAsOperand(c_entry_fp_address);
4409 __ movq(c_entry_fp_operand, Immediate(0));
4413 masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
4417#if V8_ENABLE_WEBASSEMBLY
4418void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4419 using ER = ExternalReference;
4420 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4429#ifdef V8_TARGET_OS_WIN
4433 __ movq(old_fp, rbp);
4437 FrameScope scope(masm, StackFrame::INTERNAL);
4439 __ PrepareCallCFunction(5);
4442#ifdef V8_TARGET_OS_WIN
4446 __ CallCFunction(ER::wasm_grow_stack(), 5);
4467 __ bind(&call_runtime);
4469 MemOperand(rbp, WasmFrameConstants::kWasmInstanceDataOffset));
4473 WasmTrustedInstanceData::kNativeContextOffset));
4475 __ EnterFrame(StackFrame::INTERNAL);
4478 __ CallRuntime(Runtime::kWasmStackGuard);
4479 __ LeaveFrame(StackFrame::INTERNAL);
4485void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4486 Label check_negative, process_64_bits, done;
4496 MemOperand return_operand = mantissa_operand;
4511 __ movl(scratch1, mantissa_operand);
4513 __ movl(rcx, exponent_operand);
4524 __ subl(rcx, Immediate(delta));
4525 __ xorl(result_reg, result_reg);
4526 __ cmpl(rcx, Immediate(31));
4528 __ shll_cl(scratch1);
4531 __ bind(&process_64_bits);
4536 __ bind(&check_negative);
4537 __ movl(result_reg, scratch1);
4538 __ negl(result_reg);
4539 __ cmpl(exponent_operand, Immediate(0));
4540 __ cmovl(
greater, result_reg, scratch1);
4544 __ movl(return_operand, result_reg);
4587 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4599 api_function_address =
4600 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4606 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4609 using FCA = FunctionCallbackArguments;
4610 using ER = ExternalReference;
4611 using FC = ApiCallbackExitFrameConstants;
4613 static_assert(FCA::kArgsLength == 6);
4614 static_assert(FCA::kNewTargetIndex == 5);
4615 static_assert(FCA::kTargetIndex == 4);
4616 static_assert(FCA::kReturnValueIndex == 3);
4617 static_assert(FCA::kContextIndex == 2);
4618 static_assert(FCA::kIsolateIndex == 1);
4619 static_assert(FCA::kUnusedIndex == 0);
4637 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4638 topmost_script_having_context);
4644 __ PopReturnAddressTo(scratch);
4647 __ Push(func_templ);
4650 __ PushAddress(ER::isolate_address());
4658 __ LoadExternalPointerField(
4659 api_function_address,
4661 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4665 __ PushReturnAddressFrom(scratch);
4666 __ EnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4667 StackFrame::API_CALLBACK_EXIT, api_function_address);
4669 Operand argc_operand = Operand(rbp, FC::kFCIArgcOffset);
4675 __ movq(argc_operand, argc);
4678 __ leaq(scratch, Operand(rbp, FC::kImplicitArgsArrayOffset));
4679 __ movq(Operand(rbp, FC::kFCIImplicitArgsOffset), scratch);
4682 __ leaq(scratch, Operand(rbp, FC::kFirstArgumentOffset));
4683 __ movq(Operand(rbp, FC::kFCIValuesOffset), scratch);
4686 __ RecordComment(
"v8::FunctionCallback's argument.");
4687 __ leaq(function_callback_info_arg,
4688 Operand(rbp, FC::kFunctionCallbackInfoOffset));
4692 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4695 Operand return_value_operand = Operand(rbp, FC::kReturnValueOffset);
4696 static constexpr int kSlotsToDropOnReturn =
4699 const bool with_profiling =
4702 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4703 &argc_operand, return_value_operand);
4706void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4718 Register api_function_address = r8;
4729 using PCA = PropertyCallbackArguments;
4730 using ER = ExternalReference;
4731 using FC = ApiAccessorExitFrameConstants;
4733 static_assert(PCA::kPropertyKeyIndex == 0);
4734 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4735 static_assert(PCA::kHolderIndex == 2);
4736 static_assert(PCA::kIsolateIndex == 3);
4737 static_assert(PCA::kHolderV2Index == 4);
4738 static_assert(PCA::kReturnValueIndex == 5);
4739 static_assert(PCA::kDataIndex == 6);
4740 static_assert(PCA::kThisIndex == 7);
4741 static_assert(PCA::kArgsLength == 8);
4758 __ PopReturnAddressTo(scratch);
4765 __ PushAddress(ER::isolate_address());
4770 __ LoadTaggedField(name_arg,
4774 __ RecordComment(
"Load api_function_address");
4775 __ LoadExternalPointerField(
4776 api_function_address,
4780 __ PushReturnAddressFrom(scratch);
4781 __ EnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4782 StackFrame::API_ACCESSOR_EXIT, api_function_address);
4784 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4789 __ leaq(property_callback_info_arg, Operand(rbp, FC::kArgsArrayOffset));
4791 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
4794#ifdef V8_ENABLE_DIRECT_HANDLE
4800 static_assert(PCA::kPropertyKeyIndex == 0);
4801 __ movq(name_arg, property_callback_info_arg);
4804 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4809 Operand return_value_operand = Operand(rbp, FC::kReturnValueOffset);
4810 static constexpr int kSlotsToDropOnReturn =
4811 FC::kPropertyCallbackInfoArgsLength;
4812 Operand*
const kUseStackSpaceConstant =
nullptr;
4814 const bool with_profiling =
true;
4816 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4817 kUseStackSpaceConstant, return_value_operand);
4820void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4826void Generate_DeoptimizationEntry(MacroAssembler* masm,
4828 Isolate* isolate = masm->isolate();
4833 __ AllocateStackSpace(kXmmRegsSize);
4837 config->num_allocatable_simd128_registers());
4838 DCHECK_EQ(config->num_allocatable_simd128_registers(),
4839 config->num_allocatable_double_registers());
4840 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
4841 int code = config->GetAllocatableSimd128Code(
i);
4844 __ movdqu(Operand(rsp,
offset), xmm_reg);
4854 static constexpr int kSavedRegistersAreaSize =
4856 static constexpr int kCurrentOffsetToReturnAddress = kSavedRegistersAreaSize;
4857 static constexpr int kCurrentOffsetToParentSP =
4866 __ movq(
kCArgRegs[2], Operand(rsp, kCurrentOffsetToReturnAddress));
4868 __ leaq(
kCArgRegs[3], Operand(rsp, kCurrentOffsetToParentSP));
4873 __ PrepareCallCFunction(5);
4875 Label context_check;
4877 __ JumpIfSmi(rdi, &context_check);
4879 __ bind(&context_check);
4881 __ Move(
kCArgRegs[1],
static_cast<int>(deopt_kind));
4886#ifdef V8_TARGET_OS_WIN
4896 AllowExternalCallThatCantCauseGC scope(masm);
4897 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4917 __ addq(rsp, Immediate(kXmmRegsSize));
4921 __ movb(
__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
4936 Label pop_loop_header;
4937 __ jmp(&pop_loop_header);
4940 __ Pop(Operand(rdx, 0));
4941 __ addq(rdx, Immediate(
sizeof(intptr_t)));
4942 __ bind(&pop_loop_header);
4948 __ PrepareCallCFunction(2);
4952 AllowExternalCallThatCantCauseGC scope(masm);
4953 __ CallCFunction(ExternalReference::compute_output_frames_function(), 2);
4956#ifdef V8_ENABLE_CET_SHADOW_STACK
4963 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4969 __ jmp(&outer_loop_header);
4970 __ bind(&outer_push_loop);
4972 __ movq(rbx, Operand(rax, 0));
4974 __ jmp(&inner_loop_header);
4975 __ bind(&inner_push_loop);
4976 __ subq(rcx, Immediate(
sizeof(intptr_t)));
4978 __ bind(&inner_loop_header);
4982 __ bind(&outer_loop_header);
4984 __ j(
below, &outer_push_loop);
4991 Label push_registers;
4993 __ j(zero, &push_registers);
4995 __ bind(&push_registers);
5006#ifdef V8_ENABLE_CET_SHADOW_STACK
5008 Label shadow_stack_push;
5009 __ cmpb(
__ ExternalReferenceAsOperand(
5010 ExternalReference::address_of_cet_compatible_flag(),
5016 Generate_RestoreFrameDescriptionRegisters(masm, rbx);
5018 __ movb(
__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
5024#ifdef V8_ENABLE_CET_SHADOW_STACK
5026 __ bind(&shadow_stack_push);
5033 __ movl(kAdaptShadowStackCountRegister,
5034 Operand(r8, Deoptimizer::shadow_stack_count_offset()));
5035 __ movq(rax, Operand(r8, Deoptimizer::shadow_stack_offset()));
5037 Label check_more_pushes, next_push;
5040 __ bind(&next_push);
5044 __ bind(&check_more_pushes);
5060 Builtin::kAdaptShadowStackForDeopt)));
5066 Immediate(kAdaptShadowStackDispatchFirstEntryOffset));
5075void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5079void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5086void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
5087 MacroAssembler* masm) {
5097 Register shared_function_info(code_obj);
5099 shared_function_info,
5100 FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
5102 ResetSharedFunctionInfoAge(masm, shared_function_info);
5104 __ LoadTrustedPointerField(
5107 SharedFunctionInfo::kTrustedFunctionDataOffset),
5120 __ LoadTaggedField(feedback_cell,
5121 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
5122 __ LoadTaggedField(feedback_vector,
5123 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
5125 Label install_baseline_code;
5132 __ SmiUntagUnsigned(
5146 feedback_vector =
no_reg;
5150 __ LoadAddress(get_baseline_pc,
5151 ExternalReference::baseline_pc_for_next_executed_bytecode());
5161 FrameScope scope(masm, StackFrame::INTERNAL);
5162 __ PrepareCallCFunction(3);
5166 __ CallCFunction(get_baseline_pc, 3);
5172 Generate_OSREntry(masm, code_obj);
5175 __ bind(&install_baseline_code);
5177 FrameScope scope(masm, StackFrame::INTERNAL);
5180 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5187void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5188 Generate_CallToAdaptShadowStackForDeopt(masm,
true);
5189 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
5200 __ LeaveFrame(StackFrame::INTERPRETED);
5204#ifdef V8_ENABLE_LEAPTIERING
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr int kPhysicalSignificandSize
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr BytecodeOffset None()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static V8_EXPORT_PRIVATE const int kAdaptShadowStackOffsetToSubtract
static int output_count_offset()
static constexpr int kXMMRegisterSize
static constexpr int kNextExitFrameFPOffset
static constexpr int kArgvOffset
static constexpr int kArgcOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const int kMantissaBits
static const uint32_t kExponentMask
static const int kExponentBias
static const int kExponentShift
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr int BuiltinEntrySlotOffset(Builtin id)
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static constexpr int8_t kNumRegisters
static constexpr XMMRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr auto registers()
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr int kNumPushedFpRegisters
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr int kNumberOfSavedGpParamRegs
static constexpr int kNumberOfSavedFpParamRegs
static constexpr Register ObjectRegister()
static constexpr Register SlotAddressRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
static constexpr int ToTagged(int offset)
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define COMPRESS_POINTERS_BOOL
#define V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE_BOOL
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void Store(LiftoffAssembler *assm, LiftoffRegister src, MemOperand dst, ValueKind kind)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
constexpr int kStackFpOffset
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
constexpr int kStackPcOffset
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kFunctionEntryBytecodeOffset
constexpr int kTaggedSize
constexpr int kSimd128Size
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
@ kUnknownIndirectPointerTag
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kPCOnStackSize
constexpr int kNumberOfRegisters
Operand FieldOperand(Register object, int offset)
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr Register kReturnRegister0
constexpr Register kScratchRegister
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr Register kInterpreterDispatchTableRegister
@ kFunctionTemplateInfoCallbackTag
constexpr Register kWasmTrapHandlerFaultAddressRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
constexpr int JSParameterCount(int param_count_without_receiver)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
@ kDefaultDerivedConstructor
static constexpr Address kNullAddress
@ times_system_pointer_size
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
constexpr Register kJavaScriptCallDispatchHandleRegister
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr Register kInterpreterBytecodeArrayRegister
constexpr bool PointerCompressionIsEnabled()
i::Address Load(i::Address address)
#define DCHECK_LE(v1, v2)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define OFFSET_OF_DATA_START(Type)