27#if V8_ENABLE_WEBASSEMBLY
37#define __ ACCESS_MASM(masm)
40 int formal_parameter_count,
Address address) {
49enum class ArgumentsElementType {
54void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
55 Register scratch, Register scratch2,
56 ArgumentsElementType element_type) {
65 if (element_type == ArgumentsElementType::kHandle) {
70 __ AddWord(scratch, scratch, Operand(-1));
74void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
86 FrameScope scope(masm, StackFrame::CONSTRUCT);
98 UseScratchRegisterScope temps(masm);
100 Generate_PushArguments(masm, t2, a0, temps.Acquire(), temps.Acquire(),
101 ArgumentsElementType::kRaw);
104 __ PushRoot(RootIndex::kTheHoleValue);
128void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
139 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
140 __ EnterFrame(StackFrame::CONSTRUCT);
144 __ PushRoot(RootIndex::kUndefinedValue);
155 UseScratchRegisterScope temps(masm);
156 temps.Include(t1, t2);
157 Register func_info = temps.Acquire();
159 func_info,
FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
160 __ Load32U(func_info,
162 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(func_info);
167 ¬_create_implicit_receiver);
169 __ CallBuiltin(Builtin::kFastNewObject);
170 __ BranchShort(&post_instantiation_deopt_entry);
173 __ bind(¬_create_implicit_receiver);
174 __ LoadRoot(a0, RootIndex::kTheHoleValue);
185 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
187 __ bind(&post_instantiation_deopt_entry);
220 Label stack_overflow;
222 UseScratchRegisterScope temps(masm);
223 __ StackOverflowCheck(a0, temps.Acquire(), temps.Acquire(),
235 UseScratchRegisterScope temps(masm);
236 Generate_PushArguments(masm, t2, a0, temps.Acquire(), temps.Acquire(),
237 ArgumentsElementType::kRaw);
250 Label use_receiver, do_throw, leave_and_return, check_receiver;
253 __ JumpIfNotRoot(a0, RootIndex::kUndefinedValue, &check_receiver);
260 __ bind(&use_receiver);
262 __ JumpIfRoot(a0, RootIndex::kTheHoleValue, &do_throw);
264 __ bind(&leave_and_return);
268 __ LeaveFrame(StackFrame::CONSTRUCT);
271 __ DropArguments(a1);
274 __ bind(&check_receiver);
275 __ JumpIfSmi(a0, &use_receiver);
280 UseScratchRegisterScope temps(masm);
281 temps.Include(t1, t2);
282 Register map = temps.Acquire(), type = temps.Acquire();
283 __ GetObjectType(a0, map, type);
285 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
287 Operand(FIRST_JS_RECEIVER_TYPE));
288 __ Branch(&use_receiver);
293 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
296 __ bind(&stack_overflow);
299 __ CallRuntime(Runtime::kThrowStackOverflow);
303void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
304 Generate_JSBuiltinsConstructStubHelper(masm);
312 __ DecodeField<Code::KindField>(scratch);
313 __ Assert(
eq, AbortReason::kExpectedBaselineData, scratch,
314 Operand(
static_cast<int64_t
>(CodeKind::BASELINE)));
327 __ LoadTrustedPointerField(
332 __ GetObjectType(data, scratch1, scratch1);
336 __ Branch(¬_baseline,
ne, scratch1,
Operand(CODE_TYPE));
338 __ Branch(is_baseline);
339 __ bind(¬_baseline);
341 __ Branch(is_baseline,
eq, scratch1,
Operand(CODE_TYPE));
344 __ Branch(&done,
eq, scratch1,
Operand(BYTECODE_ARRAY_TYPE));
345 __ Branch(is_unavailable,
ne, scratch1,
Operand(INTERPRETER_DATA_TYPE));
346 __ LoadProtectedPointerField(
347 bytecode,
FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset));
352void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
361 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, a0,
364 __ AssertGeneratorObject(a1);
367 __ LoadTaggedField(a4,
372 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
373 Label stepping_prepared;
374 ExternalReference debug_hook =
375 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
376 __ li(a5, debug_hook);
378 __ Branch(&prepare_step_in_if_stepping,
ne, a5, Operand(zero_reg));
381 ExternalReference debug_suspended_generator =
382 ExternalReference::debug_suspended_generator_address(masm->isolate());
383 __ li(a5, debug_suspended_generator);
385 __ Branch(&prepare_step_in_suspended_generator,
eq, a1, Operand(a5));
386 __ bind(&stepping_prepared);
390 Label stack_overflow;
412 FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
414 Label done_loop, loop;
416 __ SubWord(a3, a3, Operand(1));
433 Label ok, is_baseline, is_unavailable;
435 Register bytecode = a3;
439 &is_baseline, &is_unavailable);
441 __ bind(&is_unavailable);
442 __ Abort(AbortReason::kMissingBytecodeArray);
443 __ bind(&is_baseline);
444 __ GetObjectType(bytecode, t5, t5);
445 __ Assert(
eq, AbortReason::kMissingBytecodeArray, t5, Operand(CODE_TYPE));
456 a0, SharedFunctionInfo::kFormalParameterCountOffset));
462 __ JumpJSFunction(a1);
465 __ bind(&prepare_step_in_if_stepping);
467 FrameScope scope(masm, StackFrame::INTERNAL);
470 __ PushRoot(RootIndex::kTheHoleValue);
471 __ CallRuntime(Runtime::kDebugOnFunctionCall);
474 __ LoadTaggedField(a4,
476 __ Branch(&stepping_prepared);
478 __ bind(&prepare_step_in_suspended_generator);
480 FrameScope scope(masm, StackFrame::INTERNAL);
482 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
485 __ LoadTaggedField(a4,
487 __ Branch(&stepping_prepared);
489 __ bind(&stack_overflow);
491 FrameScope scope(masm, StackFrame::INTERNAL);
492 __ CallRuntime(Runtime::kThrowStackOverflow);
497void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
498 FrameScope scope(masm, StackFrame::INTERNAL);
500 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
513 __ SubWord(scratch1, sp, scratch1);
538 Label invoke, handler_entry, exit;
541 NoRootArrayScope no_root_array(masm);
569#ifdef V8_COMPRESS_POINTERS
572 IsolateData::cage_base_offset());
583 __ li(s1, Operand(-1));
587 IsolateAddressId::kCEntryFPAddress, masm->isolate());
588 __ li(s5, c_entry_fp);
590 __ Push(s1, s2, s3, s4);
597 __ LoadIsolateField(s1, IsolateFieldId::kFastCCallCallerFP);
601 __ LoadIsolateField(s1, IsolateFieldId::kFastCCallCallerPC);
627 Label non_outermost_js;
629 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
630 __ li(s1, js_entry_sp);
632 __ Branch(&non_outermost_js,
ne, s2, Operand(zero_reg),
638 __ bind(&non_outermost_js);
645 __ BranchShort(&invoke);
646 __ bind(&handler_entry);
650 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
660 __ LoadRoot(a0, RootIndex::kException);
661 __ BranchShort(&exit);
665 __ PushStackHandler();
699 __ CallBuiltin(entry_trampoline);
702 __ PopStackHandler();
707 Label non_outermost_js_2;
709 __ Branch(&non_outermost_js_2,
ne, a5,
712 __ li(a5, js_entry_sp);
714 __ bind(&non_outermost_js_2);
717 __ LoadIsolateField(s1, IsolateFieldId::kFastCCallCallerFP);
719 __ LoadIsolateField(s1, IsolateFieldId::kFastCCallCallerPC);
741void Builtins::Generate_JSEntry(MacroAssembler* masm) {
742 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
745void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
746 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
747 Builtin::kJSConstructEntryTrampoline);
750void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
751 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
752 Builtin::kRunMicrotasksTrampoline);
771 IsolateAddressId::kContextAddress, masm->
isolate());
772 __ li(
cp, context_address);
787 Generate_PushArguments(masm, a5, a4, temps.
Acquire(), temps.
Acquire(),
788 ArgumentsElementType::kHandle);
802 __ LoadRoot(a4, RootIndex::kUndefinedValue);
812#ifndef V8_COMPRESS_POINTERS
821 __ CallBuiltin(builtin);
828void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
832void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
836void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
839 __ TailCallBuiltin(Builtin::kRunMicrotasks);
848 __ LoadWord(params_size,
853 Register actual_params_size = scratch2;
856 __ LoadWord(actual_params_size,
860 __ Branch(&L1,
le, actual_params_size,
Operand(params_size),
862 __ Move(params_size, actual_params_size);
866 __ LeaveFrame(StackFrame::INTERPRETED);
869 __ DropArguments(params_size);
884 Register bytecode_size_table = scratch1;
890 Register original_bytecode_offset = scratch3;
892 bytecode_size_table, original_bytecode_offset));
893 __ Move(original_bytecode_offset, bytecode_offset);
894 __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
897 Label process_bytecode, extra_wide;
898 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
899 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
900 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
902 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
906 __ Branch(&extra_wide,
ne, scratch2,
Operand(zero_reg),
910 __ AddWord(bytecode_offset, bytecode_offset,
Operand(1));
911 __ AddWord(scratch2, bytecode_array, bytecode_offset);
913 __ AddWord(bytecode_size_table, bytecode_size_table,
915 __ BranchShort(&process_bytecode);
917 __ bind(&extra_wide);
919 __ AddWord(bytecode_offset, bytecode_offset,
Operand(1));
920 __ AddWord(scratch2, bytecode_array, bytecode_offset);
922 __ AddWord(bytecode_size_table, bytecode_size_table,
925 __ bind(&process_bytecode);
928#define JUMP_IF_EQUAL(NAME) \
929 __ Branch(if_return, eq, bytecode, \
930 Operand(static_cast<int64_t>(interpreter::Bytecode::k##NAME)));
937 __ Branch(¬_jump_loop,
ne, bytecode,
938 Operand(
static_cast<int64_t
>(interpreter::Bytecode::kJumpLoop)),
942 __ Move(bytecode_offset, original_bytecode_offset);
943 __ BranchShort(&
end);
945 __ bind(¬_jump_loop);
947 __ AddWord(scratch2, bytecode_size_table, bytecode);
949 __ AddWord(bytecode_offset, bytecode_offset, scratch2);
955void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
959void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
961 const Register shared_function_info(scratch);
963 shared_function_info,
965 ResetSharedFunctionInfoAge(masm, shared_function_info);
968void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
969 Register feedback_vector, Register scratch) {
973 __ And(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
981void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
983 UseScratchRegisterScope temps(masm);
987 Register closure = descriptor.GetRegisterParameter(
988 BaselineOutOfLinePrologueDescriptor::kClosure);
990 Register feedback_cell = temps.Acquire();
991 Register feedback_vector = temps.Acquire();
992 __ LoadTaggedField(feedback_cell,
998 UseScratchRegisterScope temp(masm);
1000 __ AssertFeedbackVector(feedback_vector, type);
1003#ifndef V8_ENABLE_LEAPTIERING
1005 Label flags_need_processing;
1007 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1008 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1012 UseScratchRegisterScope temps(masm);
1013 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, temps.Acquire());
1017 UseScratchRegisterScope temps(masm);
1018 Register invocation_count = temps.Acquire();
1019 __ Lw(invocation_count,
1021 FeedbackVector::kInvocationCountOffset));
1022 __ Add32(invocation_count, invocation_count, Operand(1));
1023 __ Sw(invocation_count,
1025 FeedbackVector::kInvocationCountOffset));
1034 Register callee_context = descriptor.GetRegisterParameter(
1035 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1036 Register callee_js_function = descriptor.GetRegisterParameter(
1037 BaselineOutOfLinePrologueDescriptor::kClosure);
1039 UseScratchRegisterScope temps(masm);
1040 ResetJSFunctionAge(masm, callee_js_function, temps.Acquire());
1042 __ Push(callee_context, callee_js_function);
1046 Register argc = descriptor.GetRegisterParameter(
1047 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1050 Register bytecode_array = descriptor.GetRegisterParameter(
1051 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1052 __ Push(argc, bytecode_array, feedback_cell, feedback_vector);
1056 UseScratchRegisterScope temp(masm);
1058 __ AssertFeedbackVector(feedback_vector, type);
1062 Label call_stack_guard;
1063 Register frame_size = descriptor.GetRegisterParameter(
1064 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1072 UseScratchRegisterScope temps(masm);
1073 Register sp_minus_frame_size = temps.Acquire();
1074 __ SubWord(sp_minus_frame_size, sp, frame_size);
1075 Register interrupt_limit = temps.Acquire();
1077 __ Branch(&call_stack_guard,
Uless, sp_minus_frame_size,
1078 Operand(interrupt_limit));
1085#ifndef V8_ENABLE_LEAPTIERING
1086 __ bind(&flags_need_processing);
1091 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1096 __ bind(&call_stack_guard);
1099 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1102#if defined(V8_ENABLE_LEAPTIERING) && defined(V8_TARGET_ARCH_RISCV64)
1104 static_assert(kJSDispatchHandleShift > 0);
1107 __ SmiTag(frame_size);
1108 __ Push(frame_size);
1109 __ CallRuntime(Runtime::kStackGuardWithGap);
1110#if defined(V8_ENABLE_LEAPTIERING) && defined(V8_TARGET_ARCH_RISCV64)
1120void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1133 __ LeaveFrame(StackFrame::BASELINE);
1136 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1156 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1162 sfi,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1163 ResetSharedFunctionInfoAge(masm, sfi);
1167 Label is_baseline, compile_lazy;
1172#ifdef V8_ENABLE_SANDBOX
1182 __ LoadParameterCountFromJSDispatchTable(a2, dispatch_handle, a6);
1184 BytecodeArray::kParameterSizeOffset));
1185 __ SbxCheck(
eq, AbortReason::kJSSignatureMismatch, a2, Operand(a6));
1188 Label push_stack_frame;
1190 __ LoadFeedbackVector(feedback_vector, closure, a6, &push_stack_frame);
1193#ifndef V8_ENABLE_LEAPTIERING
1198 Label flags_need_processing;
1200 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1201 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1202 &flags_need_processing);
1204 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, a4);
1208 FeedbackVector::kInvocationCountOffset));
1209 __ Add32(a6, a6, Operand(1));
1211 FeedbackVector::kInvocationCountOffset));
1223 __ bind(&push_stack_frame);
1225 __ PushStandardFrame(closure);
1237 Label stack_overflow;
1241 BytecodeArray::kFrameSizeOffset));
1244 __ SubWord(a5, sp, Operand(a6));
1246 __ Branch(&stack_overflow,
Uless, a5, Operand(a2));
1252 __ BranchShort(&loop_check);
1253 __ bind(&loop_header);
1257 __ bind(&loop_check);
1259 __ Branch(&loop_header,
ge, a6, Operand(zero_reg));
1264 Label no_incoming_new_target_or_generator_register;
1267 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1268 __ Branch(&no_incoming_new_target_or_generator_register,
eq, a5,
1272 __ bind(&no_incoming_new_target_or_generator_register);
1276 Label stack_check_interrupt, after_stack_check_interrupt;
1278 __ Branch(&stack_check_interrupt,
Uless, sp, Operand(a5),
1280 __ bind(&after_stack_check_interrupt);
1285 __ bind(&do_dispatch);
1287 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1296 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1298 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1305 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1327 __ Branch(&do_dispatch);
1329 __ bind(&do_return);
1334 __ bind(&stack_check_interrupt);
1343 __ CallRuntime(Runtime::kStackGuard);
1358 __ Branch(&after_stack_check_interrupt);
1361#ifndef V8_ENABLE_LEAPTIERING
1362 __ bind(&flags_need_processing);
1363 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1365 __ bind(&is_baseline);
1367#ifndef V8_ENABLE_LEAPTIERING
1376 Label install_baseline_code;
1382 __ Branch(&install_baseline_code,
ne, t0, Operand(FEEDBACK_VECTOR_TYPE));
1385 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1386 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1395 __ ReplaceClosureCodeWithOptimizedCode(a2, closure);
1398 __ bind(&install_baseline_code);
1400 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1404 __ bind(&compile_lazy);
1405 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1409 __ bind(&stack_overflow);
1410 __ CallRuntime(Runtime::kThrowStackOverflow);
1420 __ SubWord(scratch, num_args,
Operand(1));
1422 __ SubWord(start_address, start_address, scratch);
1425 __ PushArray(start_address, num_args,
1441 Label stack_overflow;
1452 __ StackOverflowCheck(a3, a4, t0, &stack_overflow);
1457 __ PushRoot(RootIndex::kUndefinedValue);
1469 __ TailCallBuiltin(Builtin::kCallWithSpread);
1474 __ bind(&stack_overflow);
1476 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1492 Label stack_overflow;
1493 __ StackOverflowCheck(a0, a5, t0, &stack_overflow);
1499 Register argc_without_receiver = a6;
1513 __ AssertUndefinedOrAllocationSite(a2, t0);
1517 __ AssertFunction(a1);
1521 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1524 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1528 __ TailCallBuiltin(Builtin::kConstruct);
1531 __ bind(&stack_overflow);
1533 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1546 Label stack_overflow;
1549 switch (which_frame) {
1560 __ StackOverflowCheck(a0, a5, t0, &stack_overflow);
1569 Register argc_without_receiver = a6;
1571 __ PushArray(a4, argc_without_receiver);
1577 __ TailCallBuiltin(Builtin::kConstruct);
1579 __ bind(&stack_overflow);
1581 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1607 __ Push(a0, a1, a3);
1608 __ CallBuiltin(Builtin::kFastNewObject);
1610 __ mv(implicit_receiver, a0);
1615 __ StoreReceiver(implicit_receiver);
1628void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1629 MacroAssembler* masm) {
1637 __ AssertFunction(a1);
1640 Label non_constructor;
1643 __ And(a2, a2, Operand(Map::Bits1::IsConstructorBit::kMask));
1644 __ Branch(&non_constructor,
eq, a2, Operand(zero_reg));
1647 Label stack_overflow;
1648 __ StackOverflowCheck(a0, a2, a5, &stack_overflow);
1652 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1655 __ LoadRoot(a2, RootIndex::kTheHoleValue);
1659 Register argc_without_receiver = a7;
1669 __ And(a5, a2, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1670 __ Branch(&builtin_call,
ne, a5, Operand(zero_reg));
1673 Label not_create_implicit_receiver;
1674 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(a2);
1678 ¬_create_implicit_receiver);
1679 NewImplicitReceiver(masm);
1680 __ bind(¬_create_implicit_receiver);
1696 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1702 Label use_receiver, do_throw, leave_and_return, check_receiver;
1705 __ JumpIfNotRoot(a0, RootIndex::kUndefinedValue, &check_receiver);
1708 __ bind(&use_receiver);
1711 __ JumpIfRoot(a0, RootIndex::kTheHoleValue, &do_throw);
1713 __ bind(&leave_and_return);
1715 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1720 __ bind(&check_receiver);
1723 __ JumpIfSmi(a0, &use_receiver);
1726 __ JumpIfJSAnyIsNotPrimitive(a0, a4, &leave_and_return);
1727 __ Branch(&use_receiver);
1729 __ bind(&builtin_call);
1732 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1738 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1742 __ bind(&stack_overflow);
1743 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1749 __ bind(&non_constructor);
1750 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1756 Label builtin_trampoline, trampoline_loaded;
1758 masm->
isolate()->
heap()->interpreter_entry_return_pc_offset());
1768 __ LoadTrustedPointerField(
1769 t0,
FieldMemOperand(t0, SharedFunctionInfo::kTrustedFunctionDataOffset),
1776 __ LoadProtectedPointerField(
1777 t0,
FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
1779 __ BranchShort(&trampoline_loaded);
1781 __ bind(&builtin_trampoline);
1783 address_of_interpreter_entry_trampoline_instruction_start(
1787 __ bind(&trampoline_loaded);
1788 __ AddWord(ra, t0,
Operand(interpreter_entry_return_pc_offset.value()));
1792 ExternalReference::interpreter_dispatch_table_address(masm->
isolate()));
1802 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1806 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1807 a1,
Operand(BYTECODE_ARRAY_TYPE));
1834void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1844 Label enter_bytecode, function_entry_bytecode;
1860 __ bind(&enter_bytecode);
1868 __ bind(&function_entry_bytecode);
1875 __ Branch(&enter_bytecode);
1878 __ bind(&if_return);
1879 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1882void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1887void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1888 bool javascript_builtin,
1891 int allocatable_register_count = config->num_allocatable_general_registers();
1892 UseScratchRegisterScope temp(masm);
1895 if (javascript_builtin) {
1896 __ Move(scratch, a0);
1902 sp, config->num_allocatable_general_registers() *
1907 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1908 int code = config->GetAllocatableGeneralCode(
i);
1915 if (with_result && javascript_builtin) {
1919 constexpr int return_value_offset =
1922 __ AddWord(a0, a0, Operand(return_value_offset));
1926 __ SubWord(a0, a0, Operand(return_value_offset));
1938 __ LoadEntryFromBuiltinIndex(t6, t6);
1943void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1944 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1947void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1948 MacroAssembler* masm) {
1949 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1952void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1953 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1956void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1957 MacroAssembler* masm) {
1958 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1961void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1963 FrameScope scope(masm, StackFrame::INTERNAL);
1964 __ CallRuntime(Runtime::kNotifyDeoptimized);
1975void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
1976 Operand
offset = Operand(0)) {
1977 __ AddWord(ra, entry_address,
offset);
1982enum class OsrSourceTier {
1987void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
1988 Register maybe_target_code,
1989 Register expected_param_count) {
1990 Label jump_to_optimized_code;
1996 __ CompareTaggedAndBranch(&jump_to_optimized_code,
ne, maybe_target_code,
2001 FrameScope scope(masm, StackFrame::INTERNAL);
2002 __ Push(expected_param_count);
2003 __ CallRuntime(Runtime::kCompileOptimizedOSR);
2004 __ Pop(expected_param_count);
2009 __ CompareTaggedAndBranch(&jump_to_optimized_code,
ne, maybe_target_code,
2014 __ bind(&jump_to_optimized_code);
2021 __ li(scratch, ExternalReference::address_of_log_or_trace_osr());
2023 __ Branch(&next,
eq, scratch, Operand(zero_reg));
2026 FrameScope scope(masm, StackFrame::INTERNAL);
2027 __ Push(maybe_target_code, expected_param_count);
2028 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
2029 __ Pop(maybe_target_code, expected_param_count);
2035 if (source == OsrSourceTier::kInterpreter) {
2038 __ LeaveFrame(StackFrame::STUB);
2045 __ SbxCheck(
ne, AbortReason::kExpectedOsrCode, scratch,
2053 __ SbxCheck(
eq, AbortReason::kOsrUnexpectedStackSize, scratch,
2054 Operand(expected_param_count));
2058 __ LoadProtectedPointerField(
2061 Code::kDeoptimizationDataOrInterpreterDataOffset));
2070 __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code,
2075 Generate_OSREntry(masm, maybe_target_code, Operand(scratch));
2079void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2080 using D = OnStackReplacementDescriptor;
2081 static_assert(D::kParameterCount == 2);
2082 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2083 D::MaybeTargetCodeRegister(),
2084 D::ExpectedParameterCountRegister());
2087void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2088 using D = OnStackReplacementDescriptor;
2089 static_assert(D::kParameterCount == 2);
2093 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2094 D::MaybeTargetCodeRegister(),
2095 D::ExpectedParameterCountRegister());
2098#ifdef V8_ENABLE_MAGLEV
2101 bool save_new_target) {
2108 FrameScope scope(masm, StackFrame::INTERNAL);
2109 __ AssertSmi(stack_size);
2110 if (save_new_target) {
2112 __ AssertSmiOrHeapObjectInMainCompressionCage(
2117 __ Push(stack_size);
2118 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2119 if (save_new_target) {
2129void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2144 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2156 __ MoveIfZero(arg_array, undefined_value, scratch);
2157 __ MoveIfZero(
this_arg, undefined_value, scratch);
2158 __ SubWord(scratch, scratch, Operand(1));
2159 __ MoveIfZero(arg_array, undefined_value, scratch);
2163 __ Move(arg_array, undefined_value);
2168 __ Move(arg_array, undefined_value);
2172 __ DropArgumentsAndPushNewReceiver(argc,
this_arg);
2188 __ LoadRoot(scratch, RootIndex::kNullValue);
2189 __ CompareTaggedAndBranch(&no_arguments,
eq, arg_array, Operand(scratch));
2190 __ CompareTaggedAndBranch(&no_arguments,
eq, arg_array,
2191 Operand(undefined_value));
2194 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2198 __ bind(&no_arguments);
2207void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2217 __ PushRoot(RootIndex::kUndefinedValue);
2218 __ AddWord(a0, a0, Operand(1));
2223 __ AddWord(a0, a0, -1);
2229void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2244 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2257 UseScratchRegisterScope temps(masm);
2258 Register scratch = temps.Acquire();
2261 __ MoveIfZero(arguments_list, undefined_value, scratch);
2262 __ MoveIfZero(this_argument, undefined_value, scratch);
2263 __ MoveIfZero(target, undefined_value, scratch);
2264 __ SubWord(scratch, scratch, Operand(1));
2265 __ MoveIfZero(arguments_list, undefined_value, scratch);
2266 __ MoveIfZero(this_argument, undefined_value, scratch);
2267 __ SubWord(scratch, scratch, Operand(1));
2268 __ MoveIfZero(arguments_list, undefined_value, scratch);
2270 Label done0, done1, done2;
2272 __ Move(arguments_list, undefined_value);
2273 __ Move(this_argument, undefined_value);
2274 __ Move(target, undefined_value);
2278 __ Move(arguments_list, undefined_value);
2279 __ Move(this_argument, undefined_value);
2283 __ Move(arguments_list, undefined_value);
2287 __ DropArgumentsAndPushNewReceiver(argc, this_argument);
2302 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2305void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2319 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2332 UseScratchRegisterScope temps(masm);
2333 Register scratch = temps.Acquire();
2336 __ MoveIfZero(arguments_list, undefined_value, scratch);
2338 __ MoveIfZero(target, undefined_value, scratch);
2339 __ SubWord(scratch, scratch, Operand(1));
2340 __ MoveIfZero(arguments_list, undefined_value, scratch);
2342 __ SubWord(scratch, scratch, Operand(1));
2345 Label done0, done1, done2;
2347 __ Move(arguments_list, undefined_value);
2349 __ Move(target, undefined_value);
2353 __ Move(arguments_list, undefined_value);
2362 __ DropArgumentsAndPushNewReceiver(argc, undefined_value);
2381 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2390void Generate_AllocateSpaceAndShiftExistingArguments(
2391 MacroAssembler* masm, Register count, Register argc_in_out,
2392 Register pointer_to_new_space_out) {
2393 UseScratchRegisterScope temps(masm);
2394 Register scratch1 = temps.Acquire();
2395 Register scratch2 = temps.Acquire();
2396 Register scratch3 = temps.Acquire();
2403 __ SubWord(sp, sp, Operand(new_space));
2407 Register dest = pointer_to_new_space_out;
2411 __ Branch(&done,
ge, old_sp, Operand(
end));
2417 __ Branch(&loop,
lt, old_sp, Operand(
end));
2421 __ AddWord(argc_in_out, argc_in_out, count);
2441 __ AssertNotSmi(a2);
2450 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2459 Label stack_overflow;
2460 __ StackOverflowCheck(len,
kScratchReg, a5, &stack_overflow);
2467 Generate_AllocateSpaceAndShiftExistingArguments(masm, a4, a0, a7);
2471 Label done, push, loop;
2478 __ SubWord(scratch, sp,
Operand(scratch));
2479#if !V8_STATIC_ROOTS_BOOL
2484 __ LoadTaggedRoot(hole_value, RootIndex::kTheHoleValue);
2489#if V8_STATIC_ROOTS_BOOL
2490 __ CompareRootAndBranch(a5, RootIndex::kTheHoleValue,
ne, &push);
2492 __ CompareTaggedAndBranch(&push,
ne, a5,
Operand(hole_value));
2494 __ LoadRoot(a5, RootIndex::kUndefinedValue);
2504 __ TailCallBuiltin(target_builtin);
2506 __ bind(&stack_overflow);
2507 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2525 Label new_target_constructor, new_target_not_constructor;
2528 __ JumpIfSmi(a3, &new_target_not_constructor);
2531 __ And(scratch, scratch,
Operand(Map::Bits1::IsConstructorBit::kMask));
2532 __ Branch(&new_target_constructor,
ne, scratch,
Operand(zero_reg),
2534 __ bind(&new_target_not_constructor);
2537 __ EnterFrame(StackFrame::INTERNAL);
2541 __ bind(&new_target_constructor);
2547 Label stack_done, stack_overflow;
2549 __ SubWord(a7, a7, a2);
2550 __ Branch(&stack_done,
le, a7,
Operand(zero_reg));
2553 __ StackOverflowCheck(a7, a4, a5, &stack_overflow);
2568 Generate_AllocateSpaceAndShiftExistingArguments(masm, a7, a0, a2);
2588 __ bind(&stack_done);
2590 __ TailCallBuiltin(target_builtin);
2592 __ bind(&stack_overflow);
2593 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2603 __ AssertFunction(a1);
2616 Operand(SharedFunctionInfo::IsNativeBit::kMask |
2617 SharedFunctionInfo::IsStrictBit::kMask));
2629 __ LoadGlobalProxy(a3);
2631 Label convert_to_object, convert_receiver;
2632 __ LoadReceiver(a3);
2633 __ JumpIfSmi(a3, &convert_to_object);
2634 __ JumpIfJSAnyIsNotPrimitive(a3, a4, &done_convert);
2636 Label convert_global_proxy;
2637 __ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
2638 __ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
2639 __ bind(&convert_global_proxy);
2642 __ LoadGlobalProxy(a3);
2644 __ Branch(&convert_receiver);
2646 __ bind(&convert_to_object);
2651 FrameScope scope(masm, StackFrame::INTERNAL);
2656 __ CallBuiltin(Builtin::kToObject);
2664 __ bind(&convert_receiver);
2666 __ StoreReceiver(a3);
2668 __ bind(&done_convert);
2676#if defined(V8_ENABLE_LEAPTIERING) && defined(V8_TARGET_ARCH_RISCV64)
2680 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2694 temps.Include(t0, t1);
2698 Label no_bound_arguments;
2700 bound_argv,
FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2701 __ SmiUntagField(bound_argc,
2703 __ Branch(&no_bound_arguments,
eq, bound_argc,
Operand(zero_reg));
2713 Register scratch = temps.Acquire();
2720 __ StackOverflowCheck(a4, temps.Acquire(), temps.Acquire(),
nullptr,
2724 __ EnterFrame(StackFrame::INTERNAL);
2735 Label loop, done_loop;
2737 __ AddWord(a0, a0, Operand(a4));
2741 __ SubWord(a4, a4, Operand(1));
2747 __ bind(&done_loop);
2753 __ bind(&no_bound_arguments);
2764 __ AssertBoundFunction(a1);
2770 __ LoadTaggedField(scratch,
2772 __ StoreReceiver(scratch);
2776 Generate_PushBoundArguments(masm);
2780 a1,
FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2797 Label non_callable, class_constructor;
2798 __ JumpIfSmi(target, &non_callable);
2799 __ LoadMap(map, target);
2805 __ TailCallBuiltin(Builtin::kCallBoundFunction,
eq, instance_type,
2806 Operand(JS_BOUND_FUNCTION_TYPE));
2813 __ And(flags, flags,
Operand(Map::Bits1::IsCallableBit::kMask));
2814 __ Branch(&non_callable,
eq, flags,
Operand(zero_reg));
2817 __ TailCallBuiltin(Builtin::kCallProxy,
eq, instance_type,
2822 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
eq, instance_type,
2823 Operand(JS_WRAPPED_FUNCTION_TYPE));
2827 __ Branch(&class_constructor,
eq, instance_type,
2828 Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2833 __ StoreReceiver(target);
2835 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2840 __ bind(&non_callable);
2842 FrameScope scope(masm, StackFrame::INTERNAL);
2848 __ bind(&class_constructor);
2850 FrameScope frame(masm, StackFrame::INTERNAL);
2862 __ AssertConstructor(a1);
2863 __ AssertFunction(a1);
2867 __ LoadRoot(a2, RootIndex::kUndefinedValue);
2869 Label call_generic_stub;
2875 __ And(a4, a4,
Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2876 __ Branch(&call_generic_stub,
eq, a4,
Operand(zero_reg),
2879 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2881 __ bind(&call_generic_stub);
2882 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2886void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2892 __ AssertBoundFunction(a1);
2895 Generate_PushBoundArguments(masm);
2899 __ CompareTaggedAndBranch(&skip,
ne, a1, Operand(a3));
2901 a3,
FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2906 a1,
FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2907 __ TailCallBuiltin(Builtin::kConstruct);
2910void Builtins::Generate_Construct(MacroAssembler* masm) {
2918 Register target = a1;
2920 Register instance_type = t2;
2921 Register scratch = t6;
2925 Label non_constructor, non_proxy;
2926 __ JumpIfSmi(target, &non_constructor);
2931 Register flags = t3;
2933 __ And(flags, flags, Operand(Map::Bits1::IsConstructorBit::kMask));
2934 __ Branch(&non_constructor,
eq, flags, Operand(zero_reg));
2938 __ GetInstanceTypeRange(map, instance_type, FIRST_JS_FUNCTION_TYPE, scratch);
2939 __ TailCallBuiltin(Builtin::kConstructFunction,
Uless_equal, scratch,
2940 Operand(LAST_JS_FUNCTION_TYPE - FIRST_JS_FUNCTION_TYPE));
2944 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
eq, instance_type,
2945 Operand(JS_BOUND_FUNCTION_TYPE));
2948 __ Branch(&non_proxy,
ne, instance_type, Operand(JS_PROXY_TYPE));
2949 __ TailCallBuiltin(Builtin::kConstructProxy);
2952 __ bind(&non_proxy);
2955 __ StoreReceiver(target);
2957 __ LoadNativeContextSlot(target,
2958 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2964 __ bind(&non_constructor);
2965 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2968#if V8_ENABLE_WEBASSEMBLY
2973constexpr RegList kSavedGpRegs = ([]()
constexpr {
2976 "frame size mismatch");
2979 saved_gp_regs.set(gp_param_reg);
2987 saved_gp_regs.Count());
2988 return saved_gp_regs;
2994 "frame size mismatch");
2997 saved_fp_regs.set(fp_param_reg);
3002 saved_fp_regs.Count());
3003 return saved_fp_regs;
3018void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3019 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3022 Label allocate_vector, done;
3026 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3028 __ LoadTaggedField(vector,
3030 __ JumpIfSmi(vector, &allocate_vector);
3035 __ bind(&allocate_vector);
3043 __ MultiPush(kSavedGpRegs);
3044 __ MultiPushFPU(kSavedFpRegs);
3049 __ SmiTag(func_index);
3052 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3057 __ MultiPopFPU(kSavedFpRegs);
3058 __ MultiPop(kSavedGpRegs);
3060 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
3066void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3072 HardAbortScope hard_abort(masm);
3073 FrameScope scope(masm, StackFrame::INTERNAL);
3077 __ MultiPush(kSavedGpRegs);
3078 __ MultiPushFPU(kSavedFpRegs);
3084 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3088 CHECK(!kSavedGpRegs.has(s1));
3091 __ MultiPopFPU(kSavedFpRegs);
3092 __ MultiPop(kSavedGpRegs);
3100 WasmTrustedInstanceData::kJumpTableStartOffset));
3106void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3107 HardAbortScope hard_abort(masm);
3109 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3119 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3131void SwitchSimulatorStackLimit(MacroAssembler* masm) {
3132 if (masm->options().enable_simulator_code) {
3133 UseScratchRegisterScope temps(masm);
3135 __ RecordComment(
"-- Set simulator stack limit --");
3141static constexpr Register kOldSPRegister = s9;
3142static constexpr Register kSwitchFlagRegister = s10;
3144void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
3145 Register target_input,
3146 Register argv_input) {
3147 using ER = ExternalReference;
3149 __ li(kSwitchFlagRegister, 0);
3150 __ mv(kOldSPRegister, sp);
3155 ER on_central_stack_flag_loc = ER::Create(
3156 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
3157 const Register& on_central_stack_flag = a2;
3158 __ li(on_central_stack_flag, on_central_stack_flag_loc);
3159 __ Lb(on_central_stack_flag,
MemOperand(on_central_stack_flag));
3161 Label do_not_need_to_switch;
3162 __ Branch(&do_not_need_to_switch,
ne, on_central_stack_flag,
3166 static constexpr Register central_stack_sp = a4;
3167 DCHECK(!
AreAliased(central_stack_sp, argc_input, argv_input, target_input));
3169 __ Push(argc_input, target_input, argv_input);
3170 __ PrepareCallCFunction(2, argc_input);
3171 __ li(
kCArgRegs[0], ER::isolate_address(masm->isolate()));
3173 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
3176 __ Pop(argc_input, target_input, argv_input);
3179 SwitchSimulatorStackLimit(masm);
3183 __ SubWord(sp, central_stack_sp, kReturnAddressSlotOffset +
kPadding);
3185#ifdef V8_TARGET_ARCH_RISCV32
3186 __ EnforceStackAlignment();
3188 __ li(kSwitchFlagRegister, 1);
3196 __ bind(&do_not_need_to_switch);
3199void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
3200 using ER = ExternalReference;
3202 Label no_stack_change;
3203 __ Branch(&no_stack_change,
eq, kSwitchFlagRegister, Operand(zero_reg));
3207 __ li(
kCArgRegs[0], ER::isolate_address(masm->isolate()));
3210 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
3215 SwitchSimulatorStackLimit(masm);
3217 __ mv(sp, kOldSPRegister);
3219 __ bind(&no_stack_change);
3224 ArgvMode argv_mode,
bool builtin_exit_frame,
3225 bool switch_to_central_stack) {
3236 static constexpr Register argc_input = a0;
3237 static constexpr Register target_input = a1;
3239 static constexpr Register argv_input = s1;
3240 static constexpr Register argc_sav = s3;
3241 static constexpr Register scratch = a3;
3255 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
3263 __ Move(argc_sav, argc_input);
3264 __ Move(s2, target_input);
3269 __ AssertStackIsAligned();
3271#if V8_ENABLE_WEBASSEMBLY
3272 if (switch_to_central_stack) {
3273 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_input, argv_input);
3278 __ li(a2, ER::isolate_address(masm->
isolate()));
3281 __ StoreReturnAddressAndCall(s2);
3284 Label exception_returned;
3287 __ CompareRootAndBranch(a0, RootIndex::kException,
eq, &exception_returned,
3291#if V8_ENABLE_WEBASSEMBLY
3292 if (switch_to_central_stack) {
3293 SwitchFromTheCentralStackIfNeeded(masm);
3301 __ LeaveExitFrame(scratch);
3304 __ DropArguments(argc_sav);
3309 __ bind(&exception_returned);
3311 ER pending_handler_context_address = ER::Create(
3312 IsolateAddressId::kPendingHandlerContextAddress, masm->
isolate());
3313 ER pending_handler_entrypoint_address = ER::Create(
3314 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->
isolate());
3315 ER pending_handler_fp_address =
3316 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->
isolate());
3317 ER pending_handler_sp_address =
3318 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->
isolate());
3322 ER find_handler = ER::Create(Runtime::kUnwindAndFindExceptionHandler);
3325 __ PrepareCallCFunction(3, 0, a0);
3326 __ Move(a0, zero_reg);
3327 __ Move(a1, zero_reg);
3328 __ li(a2, ER::isolate_address());
3333 __ li(
cp, pending_handler_context_address);
3335 __ li(sp, pending_handler_sp_address);
3337 __ li(fp, pending_handler_fp_address);
3348 ER c_entry_fp_address =
3349 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->
isolate());
3350 __ StoreWord(zero_reg,
3351 __ ExternalReferenceAsOperand(c_entry_fp_address,
no_reg));
3354 __ LoadWord(scratch,
__ ExternalReferenceAsOperand(
3355 pending_handler_entrypoint_address,
no_reg));
3359#if V8_ENABLE_WEBASSEMBLY
3360void Builtins::Generate_WasmHandleStackOverflow(
MacroAssembler* masm) {
3362 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
3371 FrameScope scope(masm, StackFrame::INTERNAL);
3375 __ CallCFunction(ER::wasm_grow_stack(), 5);
3383 UseScratchRegisterScope temps(masm);
3384 Register new_fp = temps.Acquire();
3386 __ SubWord(new_fp, fp, sp);
3392 UseScratchRegisterScope temps(masm);
3393 Register scratch = temps.Acquire();
3395 __ StoreWord(scratch,
3400 __ bind(&call_runtime);
3405 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
3408 WasmTrustedInstanceData::kNativeContextOffset));
3410 __ EnterFrame(StackFrame::INTERNAL);
3413 __ CallRuntime(Runtime::kWasmStackGuard);
3414 __ LeaveFrame(StackFrame::INTERNAL);
3420void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
3432 __ Push(result_reg);
3433 __ Push(scratch, scratch2, scratch3);
3436 __ LoadDouble(double_scratch,
MemOperand(sp, kArgumentOffset));
3440 __ Trunc_w_d(scratch3, double_scratch, scratch);
3445 __ Move(result_reg, scratch3);
3457 Label normal_exponent;
3464 __ LoadZeroIfConditionZero(
3467 __ Branch(&done,
eq, scratch, Operand(zero_reg));
3470 __ Sub32(result_reg, result_reg,
3475 __ Branch(&normal_exponent,
le, result_reg, Operand(zero_reg),
3477 __ Move(result_reg, zero_reg);
3480 __ bind(&normal_exponent);
3483 __ Add32(scratch, result_reg,
3492 Label high_shift_needed, high_shift_done;
3493 __ Branch(&high_shift_needed,
lt, scratch, Operand(32),
3495 __ Move(input_high, zero_reg);
3496 __ BranchShort(&high_shift_done);
3497 __ bind(&high_shift_needed);
3500 __ Or(input_high, input_high,
3505 __ Sll32(input_high, input_high, scratch);
3507 __ bind(&high_shift_done);
3510 Label pos_shift, shift_done, sign_negative;
3516 __ Sub32(scratch, zero_reg, scratch);
3517 __ Sll32(input_low, input_low, scratch);
3518 __ BranchShort(&shift_done);
3520 __ bind(&pos_shift);
3521 __ Srl32(input_low, input_low, scratch);
3523 __ bind(&shift_done);
3524 __ Or(input_high, input_high, Operand(input_low));
3529 __ Sub32(result_reg, zero_reg, input_high);
3530 __ Branch(&sign_negative,
ne, scratch, Operand(zero_reg),
3532 __ Move(result_reg, input_high);
3533 __ bind(&sign_negative);
3537 __ StoreWord(result_reg,
MemOperand(sp, kArgumentOffset));
3538 __ Pop(scratch, scratch2, scratch3);
3543void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3545 __ SubWord(sp, sp, Operand(required_stack_space));
3553 __ SubWord(sp, sp, Operand(required_stack_space));
3561 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3564void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3585 __ TailCallBuiltin(Builtin::kWasmTrapHandlerThrowTrap);
3591void SwitchStackState(MacroAssembler* masm, Register jmpbuf, Register tmp,
3595#if V8_ENABLE_SANDBOX
3596 __ Lw(tmp,
MemOperand(jmpbuf, wasm::kJmpBufStateOffset));
3599 __ Branch(&ok,
eq, tmp, Operand(old_state));
3603 __ li(tmp, new_state);
3604 __ Sw(tmp,
MemOperand(jmpbuf, wasm::kJmpBufStateOffset));
3611void SwitchStackPointerAndSimulatorStackLimit(MacroAssembler* masm,
3614 if (masm->options().enable_simulator_code) {
3615 UseScratchRegisterScope temps(masm);
3617 __ LoadWord(sp,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3619 MemOperand(jmpbuf, wasm::kJmpBufStackLimitOffset));
3622 __ LoadWord(sp,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3626void FillJumpBuffer(MacroAssembler* masm, Register jmpbuf, Label*
pc,
3630 __ StoreWord(tmp,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3631 __ StoreWord(fp,
MemOperand(jmpbuf, wasm::kJmpBufFpOffset));
3633 __ StoreWord(tmp,
MemOperand(jmpbuf, wasm::kJmpBufStackLimitOffset));
3634 __ LoadAddress(tmp,
pc);
3635 __ StoreWord(tmp,
MemOperand(jmpbuf, wasm::kJmpBufPcOffset));
3638void LoadJumpBuffer(MacroAssembler* masm, Register jmpbuf,
bool load_pc,
3641 SwitchStackPointerAndSimulatorStackLimit(masm, jmpbuf);
3642 __ LoadWord(fp,
MemOperand(jmpbuf, wasm::kJmpBufFpOffset));
3645 __ LoadWord(tmp,
MemOperand(jmpbuf, wasm::kJmpBufPcOffset));
3653void SwitchStacks(MacroAssembler* masm, Register old_continuation,
3654 bool return_switch, Register tmp,
3655 const std::initializer_list<Register> keep) {
3658 using ER = ExternalReference;
3659 for (
auto reg : keep) {
3666 __ PrepareCallCFunction(2, tmp);
3668 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3670 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3675void ReloadParentContinuation(MacroAssembler* masm, Register return_reg,
3676 Register return_value, Register context,
3677 Register tmp1, Register tmp2, Register tmp3) {
3679 Register active_continuation = tmp1;
3680 __ LoadRoot(active_continuation, RootIndex::kActiveContinuation);
3685 __ LoadExternalPointerField(
3688 WasmContinuationObject::kStackOffset),
3691 __ StoreWord(zero_reg,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3693 UseScratchRegisterScope temps(masm);
3694 Register scratch = temps.Acquire();
3699 __ LoadTaggedField(parent,
3701 WasmContinuationObject::kParentOffset));
3704 int32_t active_continuation_offset =
3706 RootIndex::kActiveContinuation);
3709 __ LoadExternalPointerField(
3710 jmpbuf,
FieldMemOperand(parent, WasmContinuationObject::kStackOffset),
3715 SwitchStacks(masm, active_continuation,
true, tmp3,
3716 {return_reg, return_value,
context, jmpbuf});
3720void RestoreParentSuspender(MacroAssembler* masm, Register tmp1,
3724 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3729 int32_t active_suspender_offset =
3731 RootIndex::kActiveSuspender);
3735class RegisterAllocator {
3739 Scoped(RegisterAllocator* allocator, Register*
reg)
3750 void Ask(Register*
reg) {
3757 bool registerIsAvailable(
const Register&
reg) {
return available_.has(
reg); }
3759 void Pinned(
const Register& requested, Register*
reg) {
3760 if (!registerIsAvailable(requested)) {
3761 printf(
"%s register is ocupied!", RegisterName(requested));
3763 DCHECK(registerIsAvailable(requested));
3777 void Reserve(
const Register&
reg) {
3785 void Reserve(
const Register& reg1,
const Register& reg2,
3786 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3787 const Register& reg5 =
no_reg,
const Register& reg6 =
no_reg) {
3796 bool IsUsed(
const Register&
reg) {
3800 void ResetExcept(
const Register& reg1 =
no_reg,
const Register& reg2 =
no_reg,
3801 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3802 const Register& reg5 =
no_reg,
3803 const Register& reg6 =
no_reg) {
3814 if (registerIsAvailable(**it)) {
3823 static RegisterAllocator WithAllocatableGeneralRegisters() {
3827 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
3828 int code = config->GetAllocatableGeneralCode(
i);
3830 list.set(candidate);
3832 return RegisterAllocator(list);
3841#define DEFINE_REG(Name) \
3842 Register Name = no_reg; \
3845#define ASSIGN_REG(Name) regs.Ask(&Name);
3847#define DEFINE_PINNED(Name, Reg) \
3848 Register Name = no_reg; \
3849 regs.Pinned(Reg, &Name);
3851#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3853#define DEFINE_SCOPED(Name) \
3855 RegisterAllocator::Scoped scope_##Name(®s, &Name);
3857#define FREE_REG(Name) regs.Free(&Name);
3859void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3861 __ StoreWord(zero_reg,
3862 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3863 __ StoreWord(zero_reg,
3864 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3867void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_continuation,
3871 Register target_jmpbuf = target_continuation;
3872 __ LoadExternalPointerField(
3875 WasmContinuationObject::kStackOffset),
3880 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3882 LoadJumpBuffer(masm, target_jmpbuf,
false, tmp, expected_state);
3886void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3887 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3889 __ EnterFrame(StackFrame::STACK_SWITCH);
3898 ResetStackSwitchFrameStackSlots(masm);
3908 __ LoadExternalPointerField(
3913 FillJumpBuffer(masm, jmpbuf, &resume, scratch);
3920 suspender_continuation,
3921 FieldMemOperand(suspender, WasmSuspenderObject::kContinuationOffset));
3938 __ LoadTaggedField(caller,
3940 WasmContinuationObject::kParentOffset));
3941 int32_t active_continuation_offset =
3943 RootIndex::kActiveContinuation);
3947 parent,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3948 int32_t active_suspender_offset =
3950 RootIndex::kActiveSuspender);
3956 SwitchStacks(masm,
continuation,
false, caller, {caller, suspender});
3959 __ LoadExternalPointerField(
3960 jmpbuf,
FieldMemOperand(caller, WasmContinuationObject::kStackOffset),
3967 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3968 __ StoreWord(zero_reg, GCScanSlotPlace);
3973 __ LeaveFrame(StackFrame::STACK_SWITCH);
3983void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3984 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3985 UseScratchRegisterScope temps(masm);
3986 __ EnterFrame(StackFrame::STACK_SWITCH);
3994 ResetStackSwitchFrameStackSlots(masm);
3996 regs.ResetExcept(closure);
4014 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
4018 regs.ResetExcept(suspender);
4025 __ LoadRoot(active_continuation, RootIndex::kActiveContinuation);
4028 __ LoadExternalPointerField(
4031 WasmContinuationObject::kStackOffset),
4033 __ AddWord(current_jmpbuf, current_jmpbuf,
4035 FillJumpBuffer(masm, current_jmpbuf, &suspend, scratch);
4043 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
4044 __ StoreTaggedField(
4047 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
4050 int32_t active_suspender_offset =
4052 RootIndex::kActiveSuspender);
4060 suspender = target_continuation;
4062 target_continuation,
4063 FieldMemOperand(suspender, WasmSuspenderObject::kContinuationOffset));
4066 __ StoreTaggedField(active_continuation,
4068 WasmContinuationObject::kParentOffset));
4070 __ mv(old_continuation, active_continuation);
4071 __ RecordWriteField(
4072 target_continuation, WasmContinuationObject::kParentOffset,
4074 int32_t active_continuation_offset =
4076 RootIndex::kActiveContinuation);
4077 __ StoreWord(target_continuation,
4080 SwitchStacks(masm, old_continuation,
false, scratch, {target_continuation});
4081 regs.ResetExcept(target_continuation);
4090 __ LoadExternalPointerField(
4093 WasmContinuationObject::kStackOffset),
4100 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
4101 __ StoreWord(zero_reg, GCScanSlotPlace);
4104 LoadJumpBuffer(masm, target_jmpbuf,
false, scratch,
4108 __ LeaveFrame(StackFrame::STACK_SWITCH);
4111 __ CallRuntime(Runtime::kThrow);
4114 LoadJumpBuffer(masm, target_jmpbuf,
true, scratch,
4119 __ LeaveFrame(StackFrame::STACK_SWITCH);
4127void Builtins::Generate_WasmResume(MacroAssembler* masm) {
4131void Builtins::Generate_WasmReject(MacroAssembler* masm) {
4135void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
4142void SaveState(MacroAssembler* masm, Register active_continuation, Register tmp,
4146 __ LoadExternalPointerField(
4149 WasmContinuationObject::kStackOffset),
4152 UseScratchRegisterScope temps(masm);
4153 Register scratch = temps.Acquire();
4154 FillJumpBuffer(masm, jmpbuf, suspend, scratch);
4157void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
4158 Register wasm_instance, Register wrapper_buffer,
4159 Register& original_fp, Register& new_wrapper_buffer,
4162 UseScratchRegisterScope temps(masm);
4163 ResetStackSwitchFrameStackSlots(masm);
4166 __ LoadRoot(target_continuation, RootIndex::kActiveContinuation);
4168 __ LoadTaggedField(parent_continuation,
4170 WasmContinuationObject::kParentOffset));
4171 SaveState(masm, parent_continuation, scratch, suspend);
4172 SwitchStacks(masm, parent_continuation,
false, scratch,
4173 {wasm_instance, wrapper_buffer});
4177 regs.Pinned(t4, &original_fp);
4178 __ mv(original_fp, fp);
4179 __ LoadRoot(target_continuation, RootIndex::kActiveContinuation);
4180 LoadTargetJumpBuffer(masm, target_continuation, scratch,
4188 __ EnterFrame(StackFrame::STACK_SWITCH);
4192 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
4194 __ SubWord(sp, sp, Operand(stack_space));
4198 __ mv(new_wrapper_buffer, sp);
4202 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
4203 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
4207 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
4211 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
4216 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
4221 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
4226void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
4231 __ GetInstanceTypeRange(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE,
4234 __ Branch(&instance,
eq, scratch, Operand(zero_reg));
4241 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
4245void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
4249 UseScratchRegisterScope temps(masm);
4253 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
4261 __ LoadRoot(promise, RootIndex::kActiveSuspender);
4263 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
4266 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4269 ReloadParentContinuation(masm, promise, return_value,
kContextRegister, tmp,
4271 RestoreParentSuspender(masm, tmp, tmp2);
4276 tmp,
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
4278 __ CallBuiltin(Builtin::kFulfillPromise);
4283 __ bind(return_promise);
4286void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
4287 RegisterAllocator& regs,
4288 Label* return_promise) {
4290 static const Builtin_RejectPromise_InterfaceDescriptor desc;
4298 __ LoadWord(thread_in_wasm_flag_addr,
4301 __ StoreWord(zero_reg,
MemOperand(thread_in_wasm_flag_addr, 0));
4306 __ LoadRoot(promise, RootIndex::kActiveSuspender);
4308 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
4311 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4316 ReloadParentContinuation(masm, promise, reason,
kContextRegister, tmp, tmp2,
4318 RestoreParentSuspender(masm, tmp, tmp2);
4322 tmp,
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
4324 __ LoadRoot(debug_event, RootIndex::kTrueValue);
4325 __ CallBuiltin(Builtin::kRejectPromise);
4329 __ jmp(return_promise);
4331 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
4334void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
4336 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
4338 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
4339 : StackFrame::JS_TO_WASM);
4349 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4358 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
4359 original_fp, new_wrapper_buffer, &suspend);
4362 new_wrapper_buffer = wrapper_buffer;
4365 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
4366 new_wrapper_buffer);
4371 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
4375 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4376 UseScratchRegisterScope temps(masm);
4377 Register scratch = temps.Acquire();
4381 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
4384 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
4391 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
4392 kWrapperBufferStackReturnBufferSize));
4394 __ SubWord(sp, sp, Operand(result_size));
4401 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
4412 int stack_params_offset =
4421 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
4428 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
4431 __ AddWord(last_stack_param, params_start, Operand(stack_params_offset));
4433 __ bind(&loop_start);
4435 Label finish_stack_params;
4436 __ Branch(&finish_stack_params,
ge, last_stack_param,
4437 Operand(params_end));
4446 __ Branch(&loop_start);
4448 __ bind(&finish_stack_params);
4466 DCHECK_EQ(next_offset, stack_params_offset);
4471 __ LoadWord(thread_in_wasm_flag_addr,
4476 __ Sw(scratch,
MemOperand(thread_in_wasm_flag_addr, 0));
4480 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
4483 __ LoadWasmCodePointer(
4486 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
4492 __ CallWasmCodePointerNoSignatureCheck(call_target);
4501 __ LoadWord(thread_in_wasm_flag_addr,
4504 __ Sw(zero_reg,
MemOperand(thread_in_wasm_flag_addr, 0));
4509 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
4515 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
4520 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
4525 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
4530 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
4537 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
4539 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4543 MemOperand(fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
4545 a0,
MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4548 UseScratchRegisterScope temps(masm);
4549 GetContextFromImplicitArg(masm, a0, temps.Acquire());
4551 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
4553 Label return_promise;
4555 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4559 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4560 : StackFrame::JS_TO_WASM);
4565 constexpr int64_t stack_arguments_in = 2;
4573 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4578void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4579 UseScratchRegisterScope temps(masm);
4584void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4585 UseScratchRegisterScope temps(masm);
4590void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4591 UseScratchRegisterScope temps(masm);
4623 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4633 api_function_address =
4634 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4640 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4641 func_templ, scratch));
4647 static_assert(FCA::kArgsLength == 6);
4648 static_assert(FCA::kNewTargetIndex == 5);
4649 static_assert(FCA::kTargetIndex == 4);
4650 static_assert(FCA::kReturnValueIndex == 3);
4651 static_assert(FCA::kContextIndex == 2);
4652 static_assert(FCA::kIsolateIndex == 1);
4653 static_assert(FCA::kUnusedIndex == 0);
4666 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4667 topmost_script_having_context);
4672 static constexpr int kStackSize = FCA::kArgsLength;
4673 static_assert(kStackSize % 2 == 0);
4677 __ li(scratch, ER::isolate_address());
4678 __ StoreWord(scratch,
4685 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4686 __ StoreWord(scratch,
4690 __ StoreWord(func_templ,
4694 __ StoreWord(scratch,
4702 __ LoadExternalPointerField(
4703 api_function_address,
4705 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4709 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4710 StackFrame::API_CALLBACK_EXIT);
4717 __ StoreWord(argc, argc_operand);
4719 __ AddWord(scratch, fp,
Operand(FC::kImplicitArgsArrayOffset));
4720 __ StoreWord(scratch,
MemOperand(fp, FC::kFCIImplicitArgsOffset));
4722 __ AddWord(scratch, fp,
Operand(FC::kFirstArgumentOffset));
4723 __ StoreWord(scratch,
MemOperand(fp, FC::kFCIValuesOffset));
4725 __ RecordComment(
"v8::FunctionCallback's argument");
4726 __ AddWord(function_callback_info_arg, fp,
4727 Operand(FC::kFunctionCallbackInfoOffset));
4732 static constexpr int kSlotsToDropOnReturn =
4734 const bool with_profiling =
4737 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4738 &argc_operand, return_value_operand);
4765 static_assert(PCA::kPropertyKeyIndex == 0);
4766 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4767 static_assert(PCA::kHolderIndex == 2);
4768 static_assert(PCA::kIsolateIndex == 3);
4769 static_assert(PCA::kHolderV2Index == 4);
4770 static_assert(PCA::kReturnValueIndex == 5);
4771 static_assert(PCA::kDataIndex == 6);
4772 static_assert(PCA::kThisIndex == 7);
4773 static_assert(PCA::kArgsLength == 8);
4786 __ LoadTaggedField(scratch,
4789 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4790 __ StoreWord(scratch,
4792 __ StoreWord(zero_reg,
4794 __ li(scratch, ER::isolate_address());
4795 __ StoreWord(scratch,
4803 __ LoadTaggedField(scratch,
4807 __ RecordComment(
"Load api_function_address");
4808 __ LoadExternalPointerField(
4809 api_function_address,
4814 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4815 StackFrame::API_ACCESSOR_EXIT);
4816 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4818 __ AddWord(property_callback_info_arg, fp,
Operand(FC::kArgsArrayOffset));
4819 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
4821#ifdef V8_ENABLE_DIRECT_HANDLE
4826 static_assert(PCA::kPropertyKeyIndex == 0);
4827 __ mv(name_arg, property_callback_info_arg);
4836 static constexpr int kSlotsToDropOnReturn =
4837 FC::kPropertyCallbackInfoArgsLength;
4838 MemOperand*
const kUseStackSpaceConstant =
nullptr;
4840 const bool with_profiling =
true;
4842 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4843 kUseStackSpaceConstant, return_value_operand);
4846void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4870 __ Assert(
ne, AbortReason::kReceivedInvalidReturnAddress, a4,
4881void Generate_DeoptimizationEntry(MacroAssembler* masm,
4883 Isolate* isolate = masm->isolate();
4890 RegList saved_regs = restored_regs |
sp | ra;
4895 __ SubWord(sp, sp, Operand(kDoubleRegsSize));
4897 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4898 int code = config->GetAllocatableDoubleCode(
i);
4908 if ((saved_regs.bits() & (1 <<
i)) != 0) {
4917 const int kSavedRegistersAreaSize =
4924 __ AddWord(a3, sp, Operand(kSavedRegistersAreaSize));
4926 __ SubWord(a3, fp, a3);
4929 __ PrepareCallCFunction(5, a4);
4931 __ Move(a0, zero_reg);
4932 Label context_check;
4935 __ JumpIfSmi(a1, &context_check);
4937 __ bind(&context_check);
4938 __ li(a1, Operand(
static_cast<int64_t
>(deopt_kind)));
4945 AllowExternalCallThatCantCauseGC scope(masm);
4946 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4958 if ((saved_regs.bits() & (1 <<
i)) != 0) {
4967 int double_regs_offset = FrameDescription::double_registers_offset();
4971 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4972 int code = config->GetAllocatableDoubleCode(
i);
4973 int dst_offset = code *
kDoubleSize + double_regs_offset;
4982 __ AddWord(sp, sp, Operand(kSavedRegistersAreaSize));
4987 __ AddWord(a2, a2, sp);
4994 Label pop_loop_header;
4995 __ BranchShort(&pop_loop_header);
5000 __ bind(&pop_loop_header);
5005 __ PrepareCallCFunction(1, a1);
5008 AllowExternalCallThatCantCauseGC scope(masm);
5009 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
5016 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
5023 __ BranchShort(&outer_loop_header);
5024 __ bind(&outer_push_loop);
5028 __ BranchShort(&inner_loop_header);
5029 __ bind(&inner_push_loop);
5031 __ AddWord(a6, a2, Operand(a3));
5034 __ bind(&inner_loop_header);
5035 __ Branch(&inner_push_loop,
ne, a3, Operand(zero_reg));
5038 __ bind(&outer_loop_header);
5039 __ Branch(&outer_push_loop,
lt, a4, Operand(a1));
5042 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
5043 int code = config->GetAllocatableDoubleCode(
i);
5045 int src_offset = code *
kDoubleSize + double_regs_offset;
5057 DCHECK(!(restored_regs.has(t3)));
5063 if ((restored_regs.bits() & (1 <<
i)) != 0) {
5071 __ Branch(&
end,
eq, t6, Operand(zero_reg));
5080void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5084void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5091void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
5092 MacroAssembler* masm) {
5106 ResetSharedFunctionInfoAge(masm, code_obj);
5108 __ LoadTrustedPointerField(
5110 FieldMemOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset),
5115 UseScratchRegisterScope temps(masm);
5116 Register scratch = temps.Acquire();
5117 __ GetObjectType(code_obj, scratch, scratch);
5118 __ Assert(
eq, AbortReason::kExpectedBaselineData, scratch,
5119 Operand(CODE_TYPE));
5126 __ LoadTaggedField(feedback_cell,
5131 Label install_baseline_code;
5135 UseScratchRegisterScope temps(masm);
5137 __ GetObjectType(feedback_vector, type, type);
5138 __ Branch(&install_baseline_code,
ne, type, Operand(FEEDBACK_VECTOR_TYPE));
5146 __ StoreWord(feedback_cell,
5155 feedback_vector =
no_reg;
5159 __ li(get_baseline_pc,
5160 ExternalReference::baseline_pc_for_next_executed_bytecode());
5174 FrameScope scope(masm, StackFrame::INTERNAL);
5175 __ PrepareCallCFunction(3, 0, a4);
5176 __ CallCFunction(get_baseline_pc, 3, 0);
5187 Generate_OSREntry(masm, code_obj);
5190 __ bind(&install_baseline_code);
5192 FrameScope scope(masm, StackFrame::INTERNAL);
5195 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5202void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5212 __ LeaveFrame(StackFrame::INTERPRETED);
5214#if defined(V8_ENABLE_LEAPTIERING) && defined(V8_TARGET_ARCH_RISCV64)
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr BytecodeOffset None()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static bool IsSupported(CpuFeature f)
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const int kMantissaBits
static const uint32_t kSignMask
static const uint32_t kExponentMask
static const int kMantissaBitsInTopWord
static const int kExponentBits
static const int kExponentBias
static const int kExponentShift
static const int kNonMantissaBitsInTopWord
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
Isolate * isolate() const
constexpr void clear(RegisterT reg)
static constexpr int8_t kNumRegisters
static constexpr DwVfpRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr int kMantissaOffset
static constexpr int kExponentOffset
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedSlotCount
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
void Include(const Register ®1, const Register ®2=no_reg)
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr int kNumberOfSavedGpParamRegs
static constexpr int kNumberOfSavedFpParamRegs
static constexpr Register ObjectRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
static constexpr uint32_t jmpbuf_offset()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE_BOOL
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
MovableLabel continuation
RegListBase< RegisterT > registers
void Or(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr DoubleRegister kFpReturnRegisters[]
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr Register kGpReturnRegisters[]
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kFunctionEntryBytecodeOffset
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
const uint32_t kExceptionIsSwitchStackLimit
DwVfpRegister DoubleRegister
static void Generate_CheckStackOverflow(MacroAssembler *masm, Register argc, Register scratch1, Register scratch2)
constexpr DoubleRegister kScratchDoubleReg
const RegList kCalleeSaved
@ kUnknownIndirectPointerTag
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr Register kScratchReg2
constexpr int kSystemPointerSizeLog2
constexpr Register kScratchReg
constexpr Register kSimulatorBreakArgument
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr DoubleRegister kSingleRegZero
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
constexpr uint32_t kDebugZapValue
constexpr uint32_t kZapValue
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const int kCArgsSlotsSize
constexpr Register kInterpreterDispatchTableRegister
@ kFunctionTemplateInfoCallbackTag
constexpr Register kWasmTrapHandlerFaultAddressRegister
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
constexpr int JSParameterCount(int param_count_without_receiver)
Register ToRegister(int num)
const DoubleRegList kCalleeSavedFPU
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
@ kDefaultDerivedConstructor
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
constexpr Register kJavaScriptCallDispatchHandleRegister
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
constexpr bool PointerCompressionIsEnabled()
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define OFFSET_OF_DATA_START(Type)