5#if V8_TARGET_ARCH_LOONG64
30#if V8_ENABLE_WEBASSEMBLY
40#define __ ACCESS_MASM(masm)
43 int formal_parameter_count,
Address address) {
51enum class ArgumentsElementType {
56void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
57 Register scratch, Register scratch2,
58 ArgumentsElementType element_type) {
66 if (element_type == ArgumentsElementType::kHandle) {
71 __ Add_d(scratch, scratch, Operand(-1));
75void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
87 FrameScope scope(masm, StackFrame::CONSTRUCT);
99 Generate_PushArguments(masm, t2, a0, t3, t0, ArgumentsElementType::kRaw);
101 __ PushRoot(RootIndex::kTheHoleValue);
117 __ DropArguments(t3);
124void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
136 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
137 __ EnterFrame(StackFrame::CONSTRUCT);
141 __ PushRoot(RootIndex::kUndefinedValue);
155 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(t2);
159 ¬_create_implicit_receiver);
162 __ CallBuiltin(Builtin::kFastNewObject);
163 __ Branch(&post_instantiation_deopt_entry);
166 __ bind(¬_create_implicit_receiver);
167 __ LoadRoot(a0, RootIndex::kTheHoleValue);
178 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
180 __ bind(&post_instantiation_deopt_entry);
213 Label stack_overflow;
214 __ StackOverflowCheck(a0, t0, t1, &stack_overflow);
224 Generate_PushArguments(masm, t2, a0, t0, t1, ArgumentsElementType::kRaw);
236 Label use_receiver, do_throw, leave_and_return, check_receiver;
239 __ JumpIfNotRoot(a0, RootIndex::kUndefinedValue, &check_receiver);
246 __ bind(&use_receiver);
248 __ JumpIfRoot(a0, RootIndex::kTheHoleValue, &do_throw);
250 __ bind(&leave_and_return);
254 __ LeaveFrame(StackFrame::CONSTRUCT);
257 __ DropArguments(a1);
260 __ bind(&check_receiver);
261 __ JumpIfSmi(a0, &use_receiver);
264 __ JumpIfJSAnyIsNotPrimitive(a0, t2, &leave_and_return);
265 __ Branch(&use_receiver);
270 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
273 __ bind(&stack_overflow);
276 __ CallRuntime(Runtime::kThrowStackOverflow);
280void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
281 Generate_JSBuiltinsConstructStubHelper(masm);
289 __ DecodeField<Code::KindField>(scratch);
290 __ Assert(
eq, AbortReason::kExpectedBaselineData, scratch,
291 Operand(
static_cast<int>(CodeKind::BASELINE)));
297 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
298 Label* is_baseline, Label* is_unavailable) {
304 __ LoadTrustedPointerField(
309 __ GetObjectType(data, scratch1, scratch1);
314 __ Branch(¬_baseline,
ne, scratch1, Operand(CODE_TYPE));
316 __ Branch(is_baseline);
317 __ bind(¬_baseline);
319 __ Branch(is_baseline,
eq, scratch1, Operand(CODE_TYPE));
323 __ Branch(&done,
ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
324 __ LoadProtectedPointerField(
325 bytecode,
FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset));
329 __ GetObjectType(bytecode, scratch1, scratch1);
330 __ Branch(is_unavailable,
ne, scratch1, Operand(BYTECODE_ARRAY_TYPE));
334void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
343 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, a0,
346 __ AssertGeneratorObject(a1);
349 __ LoadTaggedField(a5,
354 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
355 Label stepping_prepared;
356 ExternalReference debug_hook =
357 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
358 __ li(a6, debug_hook);
360 __ Branch(&prepare_step_in_if_stepping,
ne, a6, Operand(zero_reg));
363 ExternalReference debug_suspended_generator =
364 ExternalReference::debug_suspended_generator_address(masm->isolate());
365 __ li(a6, debug_suspended_generator);
367 __ Branch(&prepare_step_in_suspended_generator,
eq, a1, Operand(a6));
368 __ bind(&stepping_prepared);
372 Label stack_overflow;
374 MacroAssembler::StackLimitKind::kRealStackLimit);
381#if V8_ENABLE_LEAPTIERING
385 __ Ld_w(dispatch_handle,
387 __ LoadEntrypointAndParameterCountFromJSDispatchTable(
388 code, argc, dispatch_handle, scratch);
401 argc, SharedFunctionInfo::kFormalParameterCountOffset));
420 Label done_loop, loop;
424 FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
426 __ Sub_d(a3, a3, Operand(1));
427 __ Branch(&done_loop,
lt, a3, Operand(zero_reg));
443 Label ok, is_baseline, is_unavailable;
449 &is_baseline, &is_unavailable);
452 __ bind(&is_unavailable);
453 __ Abort(AbortReason::kMissingBytecodeArray);
455 __ bind(&is_baseline);
456 __ GetObjectType(a3, a3, bytecode);
457 __ Assert(
eq, AbortReason::kMissingBytecodeArray, bytecode,
469#if V8_ENABLE_LEAPTIERING
474 __ JumpJSFunction(a1);
478 __ bind(&prepare_step_in_if_stepping);
480 FrameScope scope(masm, StackFrame::INTERNAL);
483 __ PushRoot(RootIndex::kTheHoleValue);
484 __ CallRuntime(Runtime::kDebugOnFunctionCall);
487 __ LoadTaggedField(a5,
489 __ Branch(&stepping_prepared);
491 __ bind(&prepare_step_in_suspended_generator);
493 FrameScope scope(masm, StackFrame::INTERNAL);
495 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
498 __ LoadTaggedField(a5,
500 __ Branch(&stepping_prepared);
502 __ bind(&stack_overflow);
504 FrameScope scope(masm, StackFrame::INTERNAL);
505 __ CallRuntime(Runtime::kThrowStackOverflow);
510void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
511 FrameScope scope(masm, StackFrame::INTERNAL);
513 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
518 Register scratch1, Register scratch2) {
523 __ LoadStackLimit(scratch1, MacroAssembler::StackLimitKind::kRealStackLimit);
526 __ sub_d(scratch1, sp, scratch1);
529 __ Branch(&okay,
gt, scratch1, Operand(scratch2));
532 __ CallRuntime(Runtime::kThrowStackOverflow);
550 Label
invoke, handler_entry, exit;
553 NoRootArrayScope no_root_array(masm);
579#ifdef V8_COMPRESS_POINTERS
582 IsolateData::cage_base_offset());
593 __ li(s1, Operand(-1));
597 IsolateAddressId::kCEntryFPAddress, masm->isolate());
598 __ li(s5, c_entry_fp);
600 __ Push(s1, s2, s3, s4);
608 __ LoadIsolateField(s1, IsolateFieldId::kFastCCallCallerFP);
611 __ LoadIsolateField(s1, IsolateFieldId::kFastCCallCallerPC);
639 Label non_outermost_js;
641 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
642 __ li(s1, js_entry_sp);
644 __ Branch(&non_outermost_js,
ne, s2, Operand(zero_reg));
650 __ bind(&non_outermost_js);
658 __ bind(&handler_entry);
662 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
672 __ LoadRoot(a0, RootIndex::kException);
678 __ PushStackHandler();
709 __ CallBuiltin(entry_trampoline);
712 __ PopStackHandler();
716 Label non_outermost_js_2;
718 __ Branch(&non_outermost_js_2,
ne, a5,
720 __ li(a5, js_entry_sp);
722 __ bind(&non_outermost_js_2);
726 __ LoadIsolateField(a6, IsolateFieldId::kFastCCallCallerFP);
728 __ LoadIsolateField(a6, IsolateFieldId::kFastCCallCallerPC);
750void Builtins::Generate_JSEntry(MacroAssembler* masm) {
751 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
754void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
755 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
756 Builtin::kJSConstructEntryTrampoline);
759void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
760 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
761 Builtin::kRunMicrotasksTrampoline);
776 FrameScope scope(masm, StackFrame::INTERNAL);
780 IsolateAddressId::kContextAddress, masm->isolate());
781 __ li(
cp, context_address);
794 Generate_PushArguments(masm, a5, a4, s1, s2, ArgumentsElementType::kHandle);
808 __ LoadRoot(a4, RootIndex::kUndefinedValue);
815#ifndef V8_COMPRESS_POINTERS
824 __ CallBuiltin(builtin);
831void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
835void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
839void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
842 __ TailCallBuiltin(Builtin::kRunMicrotasks);
852 __ Ld_hu(params_size,
855 Register actual_params_size = scratch2;
857 __ Ld_d(actual_params_size,
862 __ slt(t2, params_size, actual_params_size);
863 __ Movn(params_size, actual_params_size, t2);
866 __ LeaveFrame(StackFrame::INTERPRETED);
869 __ DropArguments(params_size);
878 Register bytecode_array,
879 Register bytecode_offset,
880 Register bytecode, Register scratch1,
881 Register scratch2, Register scratch3,
883 Register bytecode_size_table = scratch1;
889 Register original_bytecode_offset = scratch3;
891 bytecode_size_table, original_bytecode_offset));
892 __ Move(original_bytecode_offset, bytecode_offset);
893 __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
896 Label process_bytecode, extra_wide;
897 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
898 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
899 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
901 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
902 __ Branch(&process_bytecode,
hi, bytecode, Operand(3));
903 __ And(scratch2, bytecode, Operand(1));
904 __ Branch(&extra_wide,
ne, scratch2, Operand(zero_reg));
907 __ Add_d(bytecode_offset, bytecode_offset, Operand(1));
908 __ Add_d(scratch2, bytecode_array, bytecode_offset);
910 __ Add_d(bytecode_size_table, bytecode_size_table,
912 __ jmp(&process_bytecode);
914 __ bind(&extra_wide);
916 __ Add_d(bytecode_offset, bytecode_offset, Operand(1));
917 __ Add_d(scratch2, bytecode_array, bytecode_offset);
919 __ Add_d(bytecode_size_table, bytecode_size_table,
922 __ bind(&process_bytecode);
925#define JUMP_IF_EQUAL(NAME) \
926 __ Branch(if_return, eq, bytecode, \
927 Operand(static_cast<int>(interpreter::Bytecode::k##NAME)));
933 Label
end, not_jump_loop;
934 __ Branch(¬_jump_loop,
ne, bytecode,
935 Operand(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
938 __ Move(bytecode_offset, original_bytecode_offset);
941 __ bind(¬_jump_loop);
943 __ Add_d(scratch2, bytecode_size_table, bytecode);
945 __ Add_d(bytecode_offset, bytecode_offset, scratch2);
952void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
956void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
961 ResetSharedFunctionInfoAge(masm, scratch);
964void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
965 Register feedback_vector, Register scratch) {
969 __ And(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
977void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
978 UseScratchRegisterScope temps(masm);
979 temps.Include({s1, s2, s3});
982 Register closure = descriptor.GetRegisterParameter(
983 BaselineOutOfLinePrologueDescriptor::kClosure);
985 Register feedback_cell = temps.Acquire();
986 Register feedback_vector = temps.Acquire();
987 __ LoadTaggedField(feedback_cell,
993 UseScratchRegisterScope temps(masm);
995 __ AssertFeedbackVector(feedback_vector, scratch);
998#ifndef V8_ENABLE_LEAPTIERING
1000 Label flags_need_processing;
1003 UseScratchRegisterScope temps(masm);
1004 flags = temps.Acquire();
1007 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1008 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1013 UseScratchRegisterScope temps(masm);
1014 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, temps.Acquire());
1018 UseScratchRegisterScope temps(masm);
1019 Register invocation_count = temps.Acquire();
1020 __ Ld_w(invocation_count,
1022 FeedbackVector::kInvocationCountOffset));
1023 __ Add_w(invocation_count, invocation_count, Operand(1));
1024 __ St_w(invocation_count,
1026 FeedbackVector::kInvocationCountOffset));
1035 Register callee_context = descriptor.GetRegisterParameter(
1036 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1037 Register callee_js_function = descriptor.GetRegisterParameter(
1038 BaselineOutOfLinePrologueDescriptor::kClosure);
1040 UseScratchRegisterScope temps(masm);
1041 ResetJSFunctionAge(masm, callee_js_function, temps.Acquire());
1043 __ Push(callee_context, callee_js_function);
1047 Register argc = descriptor.GetRegisterParameter(
1048 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1051 Register bytecode_array = descriptor.GetRegisterParameter(
1052 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1053 __ Push(argc, bytecode_array, feedback_cell, feedback_vector);
1056 UseScratchRegisterScope temps(masm);
1057 Register invocation_count = temps.Acquire();
1058 __ AssertFeedbackVector(feedback_vector, invocation_count);
1062 Label call_stack_guard;
1063 Register frame_size = descriptor.GetRegisterParameter(
1064 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1072 UseScratchRegisterScope temps(masm);
1073 Register sp_minus_frame_size = temps.Acquire();
1074 __ Sub_d(sp_minus_frame_size, sp, frame_size);
1075 Register interrupt_limit = temps.Acquire();
1076 __ LoadStackLimit(interrupt_limit,
1077 MacroAssembler::StackLimitKind::kInterruptStackLimit);
1078 __ Branch(&call_stack_guard,
Uless, sp_minus_frame_size,
1079 Operand(interrupt_limit));
1086#ifndef V8_ENABLE_LEAPTIERING
1087 __ bind(&flags_need_processing);
1090 UseScratchRegisterScope temps(masm);
1091 temps.Exclude(flags);
1095 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1100 __ bind(&call_stack_guard);
1103 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1106#ifdef V8_ENABLE_LEAPTIERING
1108 static_assert(kJSDispatchHandleShift > 0);
1111 __ SmiTag(frame_size);
1112 __ Push(frame_size);
1113 __ CallRuntime(Runtime::kStackGuardWithGap);
1114#ifdef V8_ENABLE_LEAPTIERING
1120 temps.Exclude({s1, s2, s3});
1124void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1137 __ LeaveFrame(StackFrame::BASELINE);
1140 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1160 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1167 sfi,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1168 ResetSharedFunctionInfoAge(masm, sfi);
1173 Label is_baseline, compile_lazy;
1178#ifdef V8_ENABLE_SANDBOX
1188 __ LoadParameterCountFromJSDispatchTable(a6, dispatch_handle, a7);
1190 BytecodeArray::kParameterSizeOffset));
1191 __ SbxCheck(
eq, AbortReason::kJSSignatureMismatch, a6, Operand(a7));
1194 Label push_stack_frame;
1196 __ LoadFeedbackVector(feedback_vector, closure, a5, &push_stack_frame);
1199#ifndef V8_ENABLE_LEAPTIERING
1204 Label flags_need_processing;
1206 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1207 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1208 &flags_need_processing);
1211 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, a5);
1215 FeedbackVector::kInvocationCountOffset));
1216 __ Add_w(a5, a5, Operand(1));
1218 FeedbackVector::kInvocationCountOffset));
1230 __ bind(&push_stack_frame);
1232 __ PushStandardFrame(closure);
1244 Label stack_overflow;
1248 BytecodeArray::kFrameSizeOffset));
1251 __ Sub_d(a6, sp, Operand(a5));
1252 __ LoadStackLimit(a2, MacroAssembler::StackLimitKind::kRealStackLimit);
1253 __ Branch(&stack_overflow,
lo, a6, Operand(a2));
1259 __ Branch(&loop_check);
1260 __ bind(&loop_header);
1264 __ bind(&loop_check);
1266 __ Branch(&loop_header,
ge, a5, Operand(zero_reg));
1271 Label no_incoming_new_target_or_generator_register;
1274 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1275 __ Branch(&no_incoming_new_target_or_generator_register,
eq, a5,
1279 __ bind(&no_incoming_new_target_or_generator_register);
1283 Label stack_check_interrupt, after_stack_check_interrupt;
1284 __ LoadStackLimit(a5, MacroAssembler::StackLimitKind::kInterruptStackLimit);
1285 __ Branch(&stack_check_interrupt,
lo, sp, Operand(a5));
1286 __ bind(&after_stack_check_interrupt);
1293 __ bind(&do_dispatch);
1295 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1304 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1306 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1313 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1335 __ jmp(&do_dispatch);
1337 __ bind(&do_return);
1342 __ bind(&stack_check_interrupt);
1350 __ CallRuntime(Runtime::kStackGuard);
1364 __ jmp(&after_stack_check_interrupt);
1367#ifndef V8_ENABLE_LEAPTIERING
1368 __ bind(&flags_need_processing);
1369 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1372 __ bind(&is_baseline);
1374#ifndef V8_ENABLE_LEAPTIERING
1383 Label install_baseline_code;
1389 __ Branch(&install_baseline_code,
ne, t0, Operand(FEEDBACK_VECTOR_TYPE));
1392 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1393 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1403 __ ReplaceClosureCodeWithOptimizedCode(a2, closure);
1406 __ bind(&install_baseline_code);
1409 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1413 __ bind(&compile_lazy);
1414 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1418 __ bind(&stack_overflow);
1419 __ CallRuntime(Runtime::kThrowStackOverflow);
1425 Register start_address,
1426 Register scratch, Register scratch2) {
1428 __ Sub_d(scratch, num_args, Operand(1));
1430 __ Sub_d(start_address, start_address, scratch);
1433 __ PushArray(start_address, num_args, scratch, scratch2,
1449 Label stack_overflow;
1452 __ Sub_d(a0, a0, Operand(1));
1461 __ StackOverflowCheck(a3, a4, t0, &stack_overflow);
1467 __ PushRoot(RootIndex::kUndefinedValue);
1479 __ TailCallBuiltin(Builtin::kCallWithSpread);
1484 __ bind(&stack_overflow);
1486 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1502 Label stack_overflow;
1503 __ StackOverflowCheck(a0, a5, t0, &stack_overflow);
1507 __ Sub_d(a0, a0, Operand(1));
1510 Register argc_without_receiver = a6;
1525 __ AssertUndefinedOrAllocationSite(a2, t0);
1529 __ AssertFunction(a1);
1533 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1536 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1540 __ TailCallBuiltin(Builtin::kConstruct);
1543 __ bind(&stack_overflow);
1545 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1553 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1558 Label stack_overflow;
1561 switch (which_frame) {
1572 __ StackOverflowCheck(a0, a5, t0, &stack_overflow);
1581 Register argc_without_receiver = a6;
1583 __ PushArray(a4, argc_without_receiver, a5, t0);
1589 __ TailCallBuiltin(Builtin::kConstruct);
1591 __ bind(&stack_overflow);
1593 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1600void NewImplicitReceiver(MacroAssembler* masm) {
1618 __ Push(a0, a1, a3);
1619 __ CallBuiltin(Builtin::kFastNewObject);
1621 __ Move(implicit_receiver, a0);
1627 __ StoreReceiver(implicit_receiver);
1629 __ St_d(implicit_receiver,
1639void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1640 MacroAssembler* masm) {
1648 __ AssertFunction(a1);
1651 Label non_constructor;
1654 __ And(a2, a2, Operand(Map::Bits1::IsConstructorBit::kMask));
1655 __ Branch(&non_constructor,
eq, a2, Operand(zero_reg));
1658 Label stack_overflow;
1659 __ StackOverflowCheck(a0, a2, a5, &stack_overflow);
1663 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1666 __ LoadRoot(a2, RootIndex::kTheHoleValue);
1670 Register argc_without_receiver = a7;
1680 __ And(a5, a2, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1681 __ Branch(&builtin_call,
ne, a5, Operand(zero_reg));
1684 Label not_create_implicit_receiver;
1685 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(a2);
1689 ¬_create_implicit_receiver);
1690 NewImplicitReceiver(masm);
1691 __ bind(¬_create_implicit_receiver);
1707 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1713 Label use_receiver, do_throw, leave_and_return, check_receiver;
1716 __ JumpIfNotRoot(a0, RootIndex::kUndefinedValue, &check_receiver);
1720 __ bind(&use_receiver);
1723 __ JumpIfRoot(a0, RootIndex::kTheHoleValue, &do_throw);
1725 __ bind(&leave_and_return);
1727 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1732 __ bind(&check_receiver);
1735 __ JumpIfSmi(a0, &use_receiver);
1738 __ JumpIfJSAnyIsNotPrimitive(a0, a4, &leave_and_return);
1739 __ Branch(&use_receiver);
1741 __ bind(&builtin_call);
1744 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1750 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1754 __ bind(&stack_overflow);
1755 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1761 __ bind(&non_constructor);
1762 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1768 Label builtin_trampoline, trampoline_loaded;
1770 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1780 __ LoadTrustedPointerField(
1781 t0,
FieldMemOperand(t0, SharedFunctionInfo::kTrustedFunctionDataOffset),
1783 __ JumpIfObjectType(&builtin_trampoline,
ne, t0, INTERPRETER_DATA_TYPE,
1786 __ LoadProtectedPointerField(
1787 t0,
FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
1789 __ Branch(&trampoline_loaded);
1791 __ bind(&builtin_trampoline);
1792 __ li(t0, ExternalReference::
1793 address_of_interpreter_entry_trampoline_instruction_start(
1797 __ bind(&trampoline_loaded);
1798 __ Add_d(ra, t0, Operand(interpreter_entry_return_pc_offset.value()));
1802 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1812 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1816 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1817 a1, Operand(BYTECODE_ARRAY_TYPE));
1842void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1852 Label enter_bytecode, function_entry_bytecode;
1868 __ bind(&enter_bytecode);
1875 __ bind(&function_entry_bytecode);
1882 __ Branch(&enter_bytecode);
1885 __ bind(&if_return);
1886 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1889void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1894void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1895 bool javascript_builtin,
1898 int allocatable_register_count = config->num_allocatable_general_registers();
1899 UseScratchRegisterScope temps(masm);
1900 Register scratch = temps.Acquire();
1902 if (javascript_builtin) {
1903 __ mov(scratch, a0);
1909 sp, config->num_allocatable_general_registers() *
1914 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1915 int code = config->GetAllocatableGeneralCode(
i);
1922 if (with_result && javascript_builtin) {
1926 constexpr int return_value_offset =
1929 __ Add_d(a0, a0, Operand(return_value_offset));
1933 __ Sub_d(a0, a0, Operand(return_value_offset));
1945 __ LoadEntryFromBuiltinIndex(t0, t0);
1950void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1951 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1954void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1955 MacroAssembler* masm) {
1956 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1959void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1960 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1963void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1964 MacroAssembler* masm) {
1965 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1968void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1970 FrameScope scope(masm, StackFrame::INTERNAL);
1971 __ CallRuntime(Runtime::kNotifyDeoptimized);
1982void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
1983 Operand
offset = Operand(zero_reg)) {
1984 __ Add_d(ra, entry_address,
offset);
1989enum class OsrSourceTier {
1994void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
1995 Register maybe_target_code,
1996 Register expected_param_count) {
1997 Label jump_to_optimized_code;
2003 __ CompareTaggedAndBranch(&jump_to_optimized_code,
ne, maybe_target_code,
2009 FrameScope scope(masm, StackFrame::INTERNAL);
2010 __ Push(expected_param_count);
2011 __ CallRuntime(Runtime::kCompileOptimizedOSR);
2012 __ Pop(expected_param_count);
2016 __ CompareTaggedAndBranch(&jump_to_optimized_code,
ne, maybe_target_code,
2020 __ bind(&jump_to_optimized_code);
2028 __ li(scratch, ExternalReference::address_of_log_or_trace_osr());
2030 __ Branch(&next,
eq, scratch, Operand(zero_reg));
2033 FrameScope scope(masm, StackFrame::INTERNAL);
2035 __ Push(maybe_target_code, expected_param_count);
2036 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
2037 __ Pop(maybe_target_code, expected_param_count);
2043 if (source == OsrSourceTier::kInterpreter) {
2046 __ LeaveFrame(StackFrame::STUB);
2052 __ Check(Condition::kNotEqual, AbortReason::kExpectedOsrCode, scratch,
2060 __ SbxCheck(Condition::kEqual, AbortReason::kOsrUnexpectedStackSize, scratch,
2061 Operand(expected_param_count));
2065 __ LoadProtectedPointerField(
2067 Code::kDeoptimizationDataOrInterpreterDataOffset -
2077 __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code,
2082 Generate_OSREntry(masm, maybe_target_code, Operand(scratch));
2086void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2087 using D = OnStackReplacementDescriptor;
2088 static_assert(D::kParameterCount == 2);
2089 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2090 D::MaybeTargetCodeRegister(),
2091 D::ExpectedParameterCountRegister());
2094void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2095 using D = OnStackReplacementDescriptor;
2096 static_assert(D::kParameterCount == 2);
2100 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2101 D::MaybeTargetCodeRegister(),
2102 D::ExpectedParameterCountRegister());
2106void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2121 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2130 __ Movz(arg_array, undefined_value, scratch);
2132 __ Sub_d(scratch, scratch, Operand(1));
2133 __ Movz(arg_array, undefined_value, scratch);
2135 __ DropArgumentsAndPushNewReceiver(argc,
this_arg);
2151 __ LoadRoot(scratch, RootIndex::kNullValue);
2152 __ CompareTaggedAndBranch(&no_arguments,
eq, arg_array, Operand(scratch));
2153 __ CompareTaggedAndBranch(&no_arguments,
eq, arg_array,
2154 Operand(undefined_value));
2157 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2161 __ bind(&no_arguments);
2170void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2179 __ PushRoot(RootIndex::kUndefinedValue);
2180 __ Add_d(a0, a0, Operand(1));
2185 __ addi_d(a0, a0, -1);
2191void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2207 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2220 __ Movz(arguments_list, undefined_value, scratch);
2221 __ Movz(this_argument, undefined_value, scratch);
2222 __ Movz(target, undefined_value, scratch);
2223 __ Sub_d(scratch, scratch, Operand(1));
2224 __ Movz(arguments_list, undefined_value, scratch);
2225 __ Movz(this_argument, undefined_value, scratch);
2226 __ Sub_d(scratch, scratch, Operand(1));
2227 __ Movz(arguments_list, undefined_value, scratch);
2229 __ DropArgumentsAndPushNewReceiver(argc, this_argument);
2244 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2247void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2263 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2277 __ Movz(arguments_list, undefined_value, scratch);
2279 __ Movz(target, undefined_value, scratch);
2280 __ Sub_d(scratch, scratch, Operand(1));
2281 __ Movz(arguments_list, undefined_value, scratch);
2283 __ Sub_d(scratch, scratch, Operand(1));
2286 __ DropArgumentsAndPushNewReceiver(argc, undefined_value);
2305 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2314void Generate_AllocateSpaceAndShiftExistingArguments(
2315 MacroAssembler* masm, Register count, Register argc_in_out,
2316 Register pointer_to_new_space_out, Register scratch1, Register scratch2,
2317 Register scratch3) {
2324 __ Sub_d(sp, sp, Operand(new_space));
2328 Register dest = pointer_to_new_space_out;
2332 __ Branch(&done,
ge, old_sp, Operand(
end));
2338 __ Branch(&loop,
lt, old_sp, Operand(
end));
2342 __ Add_d(argc_in_out, argc_in_out, count);
2360 __ AssertNotSmi(a2);
2361 __ GetObjectType(a2, a5, a5);
2362 __ Branch(&ok,
eq, a5, Operand(FIXED_ARRAY_TYPE));
2363 __ Branch(&fail,
ne, a5, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2364 __ Branch(&ok,
eq, a4, Operand(zero_reg));
2367 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2376 Label stack_overflow;
2377 __ StackOverflowCheck(len,
kScratchReg, a5, &stack_overflow);
2384 Generate_AllocateSpaceAndShiftExistingArguments(masm, a4, a0, a7, a6, t0, t1);
2388 Label done,
push, loop;
2393 __ Branch(&done,
eq, len, Operand(zero_reg));
2395 __ Sub_d(scratch, sp, Operand(scratch));
2396#if !V8_STATIC_ROOTS_BOOL
2400 __ LoadTaggedRoot(t1, RootIndex::kTheHoleValue);
2405#if V8_STATIC_ROOTS_BOOL
2406 __ Branch(&push,
ne, a5, RootIndex::kTheHoleValue);
2408 __ slli_w(t0, a5, 0);
2409 __ Branch(&push,
ne, t0, Operand(t1));
2411 __ LoadRoot(a5, RootIndex::kUndefinedValue);
2416 __ Branch(&loop,
ne, scratch, Operand(sp));
2421 __ TailCallBuiltin(target_builtin);
2423 __ bind(&stack_overflow);
2424 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2429 CallOrConstructMode mode,
2440 Label new_target_constructor, new_target_not_constructor;
2441 __ JumpIfSmi(a3, &new_target_not_constructor);
2444 __ And(t1, t1, Operand(Map::Bits1::IsConstructorBit::kMask));
2445 __ Branch(&new_target_constructor,
ne, t1, Operand(zero_reg));
2446 __ bind(&new_target_not_constructor);
2449 __ EnterFrame(StackFrame::INTERNAL);
2451 __ CallRuntime(Runtime::kThrowNotConstructor);
2453 __ bind(&new_target_constructor);
2456 Label stack_done, stack_overflow;
2459 __ Sub_d(a7, a7, a2);
2460 __ Branch(&stack_done,
le, a7, Operand(zero_reg));
2463 __ StackOverflowCheck(a7, a4, a5, &stack_overflow);
2478 Generate_AllocateSpaceAndShiftExistingArguments(masm, a7, a0, a2, t0, t1,
2488 __ Sub_w(a7, a7, Operand(1));
2493 __ Branch(&loop,
ne, a7, Operand(zero_reg));
2497 __ bind(&stack_done);
2499 __ TailCallBuiltin(target_builtin);
2501 __ bind(&stack_overflow);
2502 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2512 __ AssertFunction(a1);
2525 Operand(SharedFunctionInfo::IsNativeBit::kMask |
2526 SharedFunctionInfo::IsStrictBit::kMask));
2538 __ LoadGlobalProxy(a3);
2540 Label convert_to_object, convert_receiver;
2541 __ LoadReceiver(a3);
2542 __ JumpIfSmi(a3, &convert_to_object);
2543 __ JumpIfJSAnyIsNotPrimitive(a3, a4, &done_convert);
2545 Label convert_global_proxy;
2546 __ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
2547 __ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
2548 __ bind(&convert_global_proxy);
2551 __ LoadGlobalProxy(a3);
2553 __ Branch(&convert_receiver);
2555 __ bind(&convert_to_object);
2560 FrameScope scope(masm, StackFrame::INTERNAL);
2565 __ CallBuiltin(Builtin::kToObject);
2573 __ bind(&convert_receiver);
2575 __ StoreReceiver(a3);
2577 __ bind(&done_convert);
2586#ifdef V8_ENABLE_LEAPTIERING
2590 a2,
FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2601 __ AssertBoundFunction(a1);
2605 __ LoadTaggedField(t0,
2607 __ StoreReceiver(t0);
2626 __ Sub_d(t0, sp, Operand(a5));
2630 MacroAssembler::StackLimitKind::kRealStackLimit);
2634 __ EnterFrame(StackFrame::INTERNAL);
2635 __ CallRuntime(Runtime::kThrowStackOverflow);
2645 Label loop, done_loop;
2647 __ Add_d(a0, a0, Operand(a4));
2651 __ Sub_d(a4, a4, Operand(1));
2652 __ Branch(&done_loop,
lt, a4, Operand(zero_reg));
2657 __ bind(&done_loop);
2665 a1,
FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2682 Label non_callable, class_constructor;
2683 __ JumpIfSmi(target, &non_callable);
2684 __ LoadMap(map, target);
2690 __ TailCallBuiltin(Builtin::kCallBoundFunction,
eq, instance_type,
2691 Operand(JS_BOUND_FUNCTION_TYPE));
2698 __ And(flags, flags, Operand(Map::Bits1::IsCallableBit::kMask));
2699 __ Branch(&non_callable,
eq, flags, Operand(zero_reg));
2702 __ TailCallBuiltin(Builtin::kCallProxy,
eq, instance_type,
2703 Operand(JS_PROXY_TYPE));
2707 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
eq, instance_type,
2708 Operand(JS_WRAPPED_FUNCTION_TYPE));
2712 __ Branch(&class_constructor,
eq, instance_type,
2713 Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2718 __ StoreReceiver(target);
2720 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2725 __ bind(&non_callable);
2727 FrameScope scope(masm, StackFrame::INTERNAL);
2729 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2733 __ bind(&class_constructor);
2735 FrameScope frame(masm, StackFrame::INTERNAL);
2737 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2741void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2747 __ AssertConstructor(a1);
2748 __ AssertFunction(a1);
2752 __ LoadRoot(a2, RootIndex::kUndefinedValue);
2754 Label call_generic_stub;
2760 __ And(a4, a4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2761 __ Branch(&call_generic_stub,
eq, a4, Operand(zero_reg));
2763 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2765 __ bind(&call_generic_stub);
2766 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2770void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2776 __ AssertConstructor(a1);
2777 __ AssertBoundFunction(a1);
2796 __ Sub_d(t0, sp, Operand(a5));
2800 MacroAssembler::StackLimitKind::kRealStackLimit);
2804 __ EnterFrame(StackFrame::INTERNAL);
2805 __ CallRuntime(Runtime::kThrowStackOverflow);
2815 Label loop, done_loop;
2817 __ Add_d(a0, a0, Operand(a4));
2821 __ Sub_d(a4, a4, Operand(1));
2822 __ Branch(&done_loop,
lt, a4, Operand(zero_reg));
2827 __ bind(&done_loop);
2836 __ CompareTaggedAndBranch(&skip_load,
ne, a1, Operand(a3));
2838 a3,
FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2839 __ bind(&skip_load);
2844 a1,
FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2845 __ TailCallBuiltin(Builtin::kConstruct);
2849void Builtins::Generate_Construct(MacroAssembler* masm) {
2864 Label non_constructor, non_proxy;
2865 __ JumpIfSmi(target, &non_constructor);
2872 __ And(flags, flags, Operand(Map::Bits1::IsConstructorBit::kMask));
2873 __ Branch(&non_constructor,
eq, flags, Operand(zero_reg));
2877 __ GetInstanceTypeRange(map, instance_type, FIRST_JS_FUNCTION_TYPE, scratch);
2878 __ TailCallBuiltin(Builtin::kConstructFunction,
ls, scratch,
2879 Operand(LAST_JS_FUNCTION_TYPE - FIRST_JS_FUNCTION_TYPE));
2883 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
eq, instance_type,
2884 Operand(JS_BOUND_FUNCTION_TYPE));
2887 __ Branch(&non_proxy,
ne, instance_type, Operand(JS_PROXY_TYPE));
2888 __ TailCallBuiltin(Builtin::kConstructProxy);
2891 __ bind(&non_proxy);
2894 __ StoreReceiver(target);
2896 __ LoadNativeContextSlot(target,
2897 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2903 __ bind(&non_constructor);
2904 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2907#if V8_ENABLE_WEBASSEMBLY
2912constexpr RegList kSavedGpRegs = ([]()
constexpr {
2915 saved_gp_regs.set(gp_param_reg);
2923 saved_gp_regs.Count());
2924 return saved_gp_regs;
2930 saved_fp_regs.set(fp_param_reg);
2935 saved_fp_regs.Count());
2936 return saved_fp_regs;
2951void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
2952 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
2955 Label allocate_vector, done;
2959 WasmTrustedInstanceData::kFeedbackVectorsOffset));
2961 __ LoadTaggedField(vector,
2963 __ JumpIfSmi(vector, &allocate_vector);
2968 __ bind(&allocate_vector);
2976 __ MultiPush(kSavedGpRegs);
2977 __ MultiPushFPU(kSavedFpRegs);
2982 __ SmiTag(func_index);
2985 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
2990 __ MultiPopFPU(kSavedFpRegs);
2991 __ MultiPop(kSavedGpRegs);
2993 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
2999void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3005 HardAbortScope hard_abort(masm);
3006 FrameScope scope(masm, StackFrame::INTERNAL);
3010 __ MultiPush(kSavedGpRegs);
3011 __ MultiPushFPU(kSavedFpRegs);
3023 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3026 static_assert(!kSavedGpRegs.has(t0));
3031 __ MultiPopFPU(kSavedFpRegs);
3032 __ MultiPop(kSavedGpRegs);
3038 static_assert(!kSavedGpRegs.has(t1));
3040 WasmTrustedInstanceData::kJumpTableStartOffset));
3041 __ Add_d(t0, t1, Operand(t0));
3047void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3048 HardAbortScope hard_abort(masm);
3050 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3060 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3072void SwitchStackState(MacroAssembler* masm, Register jmpbuf, Register tmp,
3075#if V8_ENABLE_SANDBOX
3076 __ Ld_w(tmp,
MemOperand(jmpbuf, wasm::kJmpBufStateOffset));
3078 __ JumpIfEqual(tmp, old_state, &ok);
3082 __ li(tmp, Operand(new_state));
3083 __ St_w(tmp,
MemOperand(jmpbuf, wasm::kJmpBufStateOffset));
3087void SwitchStackPointer(MacroAssembler* masm, Register jmpbuf) {
3088 __ Ld_d(sp,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3091void FillJumpBuffer(MacroAssembler* masm, Register jmpbuf, Label* target,
3094 __ St_d(tmp,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3095 __ St_d(fp,
MemOperand(jmpbuf, wasm::kJmpBufFpOffset));
3097 __ St_d(tmp,
MemOperand(jmpbuf, wasm::kJmpBufStackLimitOffset));
3099 __ LoadLabelRelative(tmp, target);
3101 __ St_d(tmp,
MemOperand(jmpbuf, wasm::kJmpBufPcOffset));
3104void LoadJumpBuffer(MacroAssembler* masm, Register jmpbuf,
bool load_pc,
3106 SwitchStackPointer(masm, jmpbuf);
3107 __ Ld_d(fp,
MemOperand(jmpbuf, wasm::kJmpBufFpOffset));
3110 __ Ld_d(tmp,
MemOperand(jmpbuf, wasm::kJmpBufPcOffset));
3117void SaveState(MacroAssembler* masm, Register active_continuation, Register tmp,
3120 __ LoadExternalPointerField(
3123 WasmContinuationObject::kStackOffset),
3127 UseScratchRegisterScope temps(masm);
3128 FillJumpBuffer(masm, jmpbuf, suspend, temps.Acquire());
3131void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_continuation,
3134 Register target_jmpbuf = target_continuation;
3135 __ LoadExternalPointerField(
3138 WasmContinuationObject::kStackOffset),
3143 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3145 LoadJumpBuffer(masm, target_jmpbuf,
false, tmp, expected_state);
3149void SwitchStacks(MacroAssembler* masm, Register old_continuation,
3151 const std::initializer_list<Register> keep) {
3152 using ER = ExternalReference;
3154 for (
auto reg : keep) {
3159 __ PrepareCallCFunction(2, a0);
3164 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3167 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3172void ReloadParentContinuation(MacroAssembler* masm, Register return_reg,
3173 Register return_value, Register context,
3174 Register tmp1, Register tmp2, Register tmp3) {
3175 Register active_continuation = tmp1;
3176 __ LoadRoot(active_continuation, RootIndex::kActiveContinuation);
3181 __ LoadExternalPointerField(
3184 WasmContinuationObject::kStackOffset),
3187 __ St_d(zero_reg,
MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
3189 UseScratchRegisterScope temps(masm);
3190 Register scratch = temps.Acquire();
3195 __ LoadTaggedField(parent,
3197 WasmContinuationObject::kParentOffset));
3200 int32_t active_continuation_offset =
3202 RootIndex::kActiveContinuation);
3205 __ LoadExternalPointerField(
3206 jmpbuf,
FieldMemOperand(parent, WasmContinuationObject::kStackOffset),
3211 SwitchStacks(masm, active_continuation,
true,
3212 {return_reg, return_value,
context, jmpbuf});
3216void RestoreParentSuspender(MacroAssembler* masm, Register tmp1,
3219 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3223 int32_t active_suspender_offset =
3225 RootIndex::kActiveSuspender);
3229void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3231 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3233 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3237class RegisterAllocator {
3241 Scoped(RegisterAllocator* allocator, Register*
reg)
3252 void Ask(Register*
reg) {
3259 bool registerIsAvailable(
const Register&
reg) {
return available_.has(
reg); }
3261 void Pinned(
const Register& requested, Register*
reg) {
3262 if (!registerIsAvailable(requested)) {
3263 printf(
"%s register is ocupied!", RegisterName(requested));
3265 DCHECK(registerIsAvailable(requested));
3279 void Reserve(
const Register&
reg) {
3287 void Reserve(
const Register& reg1,
const Register& reg2,
3288 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3289 const Register& reg5 =
no_reg,
const Register& reg6 =
no_reg) {
3298 bool IsUsed(
const Register&
reg) {
3302 void ResetExcept(
const Register& reg1 =
no_reg,
const Register& reg2 =
no_reg,
3303 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3304 const Register& reg5 =
no_reg,
3305 const Register& reg6 =
no_reg) {
3316 if (registerIsAvailable(**it)) {
3325 static RegisterAllocator WithAllocatableGeneralRegisters() {
3329 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
3330 int code = config->GetAllocatableGeneralCode(
i);
3332 list.set(candidate);
3334 return RegisterAllocator(list);
3343#define DEFINE_REG(Name) \
3344 Register Name = no_reg; \
3347#define DEFINE_REG_W(Name) \
3351#define ASSIGN_REG(Name) regs.Ask(&Name);
3353#define ASSIGN_REG_W(Name) \
3357#define DEFINE_PINNED(Name, Reg) \
3358 Register Name = no_reg; \
3359 regs.Pinned(Reg, &Name);
3361#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3363#define DEFINE_SCOPED(Name) \
3365 RegisterAllocator::Scoped scope_##Name(®s, &Name);
3367#define FREE_REG(Name) regs.Free(&Name);
3371void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3377 __ Branch(&instance,
eq, scratch, Operand(WASM_TRUSTED_INSTANCE_DATA_TYPE));
3386 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3392void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3398 __ Sub_d(sp, sp, Operand(required_stack_space));
3399 for (
int i = cnt_fp - 1;
i >= 0;
i--) {
3405 for (
int i = cnt_gp;
i >= 1;
i--) {
3411 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3414void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3435 __ TailCallBuiltin(Builtin::kWasmTrapHandlerThrowTrap);
3438void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3439 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3441 __ EnterFrame(StackFrame::STACK_SWITCH);
3450 ResetStackSwitchFrameStackSlots(masm);
3460 __ LoadExternalPointerField(
3465 FillJumpBuffer(masm, jmpbuf, &resume, scratch);
3472 suspender_continuation,
3473 FieldMemOperand(suspender, WasmSuspenderObject::kContinuationOffset));
3490 __ LoadTaggedField(caller,
3492 WasmContinuationObject::kParentOffset));
3493 int32_t active_continuation_offset =
3495 RootIndex::kActiveContinuation);
3499 parent,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3500 int32_t active_suspender_offset =
3502 RootIndex::kActiveSuspender);
3509 SwitchStacks(masm,
continuation,
false, {caller, suspender});
3512 __ LoadExternalPointerField(
3513 jmpbuf,
FieldMemOperand(caller, WasmContinuationObject::kStackOffset),
3520 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3521 __ St_d(zero_reg, GCScanSlotPlace);
3526 __ LeaveFrame(StackFrame::STACK_SWITCH);
3535void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3536 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3537 __ EnterFrame(StackFrame::STACK_SWITCH);
3545 ResetStackSwitchFrameStackSlots(masm);
3547 regs.ResetExcept(closure);
3565 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3569 regs.ResetExcept(suspender);
3576 __ LoadRoot(active_continuation, RootIndex::kActiveContinuation);
3579 __ LoadExternalPointerField(
3582 WasmContinuationObject::kStackOffset),
3585 FillJumpBuffer(masm, current_jmpbuf, &suspend, scratch);
3594 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3595 __ StoreTaggedField(
3598 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3601 int32_t active_suspender_offset =
3603 RootIndex::kActiveSuspender);
3611 suspender = target_continuation;
3613 target_continuation,
3614 FieldMemOperand(suspender, WasmSuspenderObject::kContinuationOffset));
3617 __ StoreTaggedField(active_continuation,
3619 WasmContinuationObject::kParentOffset));
3621 __ Move(old_continuation, active_continuation);
3622 __ RecordWriteField(
3623 target_continuation, WasmContinuationObject::kParentOffset,
3625 int32_t active_continuation_offset =
3627 RootIndex::kActiveContinuation);
3628 __ St_d(target_continuation,
3631 SwitchStacks(masm, old_continuation,
false, {target_continuation});
3633 regs.ResetExcept(target_continuation);
3641 __ LoadExternalPointerField(
3644 WasmContinuationObject::kStackOffset),
3650 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3651 __ St_d(zero_reg, GCScanSlotPlace);
3654 LoadJumpBuffer(masm, target_jmpbuf,
false, scratch,
3658 __ LeaveFrame(StackFrame::STACK_SWITCH);
3661 __ CallRuntime(Runtime::kThrow);
3664 LoadJumpBuffer(masm, target_jmpbuf,
true, scratch,
3669 __ LeaveFrame(StackFrame::STACK_SWITCH);
3676void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3680void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3684void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3690void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3691 Register wasm_instance, Register wrapper_buffer,
3692 Register& original_fp, Register& new_wrapper_buffer,
3694 ResetStackSwitchFrameStackSlots(masm);
3697 __ LoadRoot(target_continuation, RootIndex::kActiveContinuation);
3699 __ LoadTaggedField(parent_continuation,
3701 WasmContinuationObject::kParentOffset));
3703 SaveState(masm, parent_continuation, scratch, suspend);
3705 SwitchStacks(masm, parent_continuation,
false,
3706 {wasm_instance, wrapper_buffer});
3711 regs.Pinned(t1, &original_fp);
3712 __ mov(original_fp, fp);
3713 __ LoadRoot(target_continuation, RootIndex::kActiveContinuation);
3714 LoadTargetJumpBuffer(masm, target_continuation, scratch,
3722 __ EnterFrame(StackFrame::STACK_SWITCH);
3726 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3728 __ Sub_d(sp, sp, Operand(stack_space));
3732 __ mov(new_wrapper_buffer, sp);
3736 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3737 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3740 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3743 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3748 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3753 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3756void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3762 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3770 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3772 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3776 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3779 ReloadParentContinuation(masm, promise, return_value,
kContextRegister, tmp,
3781 RestoreParentSuspender(masm, tmp, tmp2);
3784 __ li(tmp, Operand(1));
3786 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3788 __ CallBuiltin(Builtin::kFulfillPromise);
3794 __ bind(return_promise);
3797void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3798 RegisterAllocator& regs,
3799 Label* return_promise) {
3801 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3808 thread_in_wasm_flag_addr = a2;
3812 thread_in_wasm_flag_addr,
3814 __ St_w(zero_reg,
MemOperand(thread_in_wasm_flag_addr, 0));
3819 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3821 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3824 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3830 ReloadParentContinuation(masm, promise, reason,
kContextRegister, tmp, tmp2,
3832 RestoreParentSuspender(masm, tmp, tmp2);
3834 __ li(tmp, Operand(1));
3836 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3838 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3839 __ CallBuiltin(Builtin::kRejectPromise);
3843 __ jmp(return_promise);
3845 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3848void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
3850 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3852 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3853 : StackFrame::JS_TO_WASM);
3855 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3860 __ Ld_d(implicit_arg,
3861 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3870 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3871 original_fp, new_wrapper_buffer, &suspend);
3874 new_wrapper_buffer = wrapper_buffer;
3877 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3878 new_wrapper_buffer);
3883 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3885 __ St_d(implicit_arg,
3886 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3891 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3893 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3899 JSToWasmWrapperFrameConstants::
3900 kWrapperBufferStackReturnBufferSize));
3902 __ Sub_d(sp, sp, result_size);
3909 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3921 int stack_params_offset =
3925 stack_params_offset += param_padding;
3932 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3939 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3942 __ Add_d(last_stack_param, params_start, Operand(stack_params_offset));
3944 __ bind(&loop_start);
3946 Label finish_stack_params;
3947 __ Branch(&finish_stack_params,
ge, last_stack_param,
3948 Operand(params_end));
3958 __ Branch(&loop_start);
3960 __ bind(&finish_stack_params);
3963 size_t next_offset = 0;
3974 next_offset += param_padding;
3980 DCHECK_EQ(next_offset, stack_params_offset);
3985 __ Ld_d(thread_in_wasm_flag_addr,
3989 __ li(scratch, Operand(1));
3990 __ St_w(scratch,
MemOperand(thread_in_wasm_flag_addr, 0));
3994 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3997 __ LoadWasmCodePointer(
4000 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
4006 __ CallWasmCodePointerNoSignatureCheck(call_target);
4015 __ Ld_d(thread_in_wasm_flag_addr,
4018 __ St_w(zero_reg,
MemOperand(thread_in_wasm_flag_addr, 0));
4021 __ Ld_d(wrapper_buffer,
4022 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
4027 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
4031 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
4035 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
4039 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
4046 __ Ld_d(a1,
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
4047 __ Ld_d(a0,
MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4051 MemOperand(fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
4053 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4057 GetContextFromImplicitArg(masm, a0, scratch);
4061 Label return_promise;
4063 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4067 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4068 : StackFrame::JS_TO_WASM);
4079 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4084void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4088void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4092void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4098static constexpr Register kOldSPRegister = s3;
4099static constexpr Register kSwitchFlagRegister = s4;
4101void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
4102 Register target_input,
4103 Register argv_input) {
4104 using ER = ExternalReference;
4106 __ mov(kSwitchFlagRegister, zero_reg);
4107 __ mov(kOldSPRegister, sp);
4112 ER on_central_stack_flag_loc = ER::Create(
4113 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4114 const Register& on_central_stack_flag = a2;
4115 __ li(on_central_stack_flag, on_central_stack_flag_loc);
4116 __ Ld_b(on_central_stack_flag,
MemOperand(on_central_stack_flag, 0));
4118 Label do_not_need_to_switch;
4119 __ Branch(&do_not_need_to_switch,
ne, on_central_stack_flag,
4124 DCHECK(!
AreAliased(central_stack_sp, argc_input, argv_input, target_input));
4126 __ Push(argc_input, target_input, argv_input);
4127 __ PrepareCallCFunction(2, a0);
4128 __ li(
kCArgRegs[0], ER::isolate_address(masm->isolate()));
4130 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4133 __ Pop(argc_input, target_input, argv_input);
4138 __ Sub_d(sp, central_stack_sp, Operand(kReturnAddressSlotOffset +
kPadding));
4139 __ li(kSwitchFlagRegister, 1);
4147 __ bind(&do_not_need_to_switch);
4150void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4151 using ER = ExternalReference;
4153 Label no_stack_change;
4155 __ Branch(&no_stack_change,
eq, kSwitchFlagRegister, Operand(zero_reg));
4159 __ PrepareCallCFunction(1, a0);
4160 __ li(
kCArgRegs[0], ER::isolate_address(masm->isolate()));
4161 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4166 __ mov(sp, kOldSPRegister);
4168 __ bind(&no_stack_change);
4176 ArgvMode argv_mode,
bool builtin_exit_frame,
4177 bool switch_to_central_stack) {
4188 using ER = ExternalReference;
4191 static constexpr Register argc_input = a0;
4192 static constexpr Register target_fun = s1;
4193 static constexpr Register argv = a1;
4194 static constexpr Register scratch = a3;
4195 static constexpr Register argc_sav = s0;
4197 __ mov(target_fun, argv);
4212 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
4215 __ mov(argc_sav, argc_input);
4225 __ AssertStackIsAligned();
4227#if V8_ENABLE_WEBASSEMBLY
4228 if (switch_to_central_stack) {
4229 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4239 __ StoreReturnAddressAndCall(target_fun);
4241#if V8_ENABLE_WEBASSEMBLY
4242 if (switch_to_central_stack) {
4243 SwitchFromTheCentralStackIfNeeded(masm);
4250 Label exception_returned;
4253 __ CompareRootAndBranch(a0, RootIndex::kException,
eq, &exception_returned,
4260 ER exception_address =
4261 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4262 __ Ld_d(scratch,
__ ExternalReferenceAsOperand(exception_address,
no_reg));
4264 __ Branch(&okay,
eq, scratch, RootIndex::kTheHoleValue);
4274 __ LeaveExitFrame(scratch);
4283 __ bind(&exception_returned);
4285 ER pending_handler_context_address = ER::Create(
4286 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4287 ER pending_handler_entrypoint_address = ER::Create(
4288 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4289 ER pending_handler_fp_address =
4290 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4291 ER pending_handler_sp_address =
4292 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4298 __ PrepareCallCFunction(3, 0, a0);
4302 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4307 __ li(
cp, pending_handler_context_address);
4309 __ li(sp, pending_handler_sp_address);
4311 __ li(fp, pending_handler_fp_address);
4317 __ Branch(&zero,
eq,
cp, Operand(zero_reg));
4322 ER c_entry_fp_address =
4323 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4324 __ St_d(zero_reg,
__ ExternalReferenceAsOperand(c_entry_fp_address,
no_reg));
4327 __ Ld_d(scratch,
__ ExternalReferenceAsOperand(
4328 pending_handler_entrypoint_address,
no_reg));
4332#if V8_ENABLE_WEBASSEMBLY
4333void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4334 using ER = ExternalReference;
4335 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4344 FrameScope scope(masm, StackFrame::INTERNAL);
4348 __ CallCFunction(ER::wasm_grow_stack(), 5);
4356 UseScratchRegisterScope temps(masm);
4359 __ sub_d(new_fp, fp, sp);
4365 UseScratchRegisterScope temps(masm);
4366 Register scratch = temps.Acquire();
4372 __ bind(&call_runtime);
4377 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4380 WasmTrustedInstanceData::kNativeContextOffset));
4382 __ EnterFrame(StackFrame::INTERNAL);
4385 __ CallRuntime(Runtime::kWasmStackGuard);
4386 __ LeaveFrame(StackFrame::INTERNAL);
4392void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4404 __ Push(result_reg);
4405 __ Push(scratch, scratch2, scratch3);
4408 __ Fld_d(double_scratch,
MemOperand(sp, kArgumentOffset));
4411 __ TryInlineTruncateDoubleToI(result_reg, double_scratch, &done);
4421 __ bstrpick_d(input_high, result_reg,
4425 __ Sub_d(scratch, input_high,
4429 __ mov(result_reg, zero_reg);
4437 Label lessthan_zero_reg;
4438 __ Branch(&lessthan_zero_reg,
ge, result_reg, Operand(zero_reg));
4439 __ Sub_d(input_low, zero_reg, Operand(input_low));
4440 __ bind(&lessthan_zero_reg);
4444 __ Sub_d(input_high, input_high,
4446 __ sll_w(result_reg, input_low, input_high);
4451 __ Pop(scratch, scratch2, scratch3);
4481 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4493 api_function_address =
4494 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4500 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4501 func_templ, scratch));
4503 using FCA = FunctionCallbackArguments;
4504 using ER = ExternalReference;
4505 using FC = ApiCallbackExitFrameConstants;
4507 static_assert(FCA::kArgsLength == 6);
4508 static_assert(FCA::kNewTargetIndex == 5);
4509 static_assert(FCA::kTargetIndex == 4);
4510 static_assert(FCA::kReturnValueIndex == 3);
4511 static_assert(FCA::kContextIndex == 2);
4512 static_assert(FCA::kIsolateIndex == 1);
4513 static_assert(FCA::kUnusedIndex == 0);
4527 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4528 topmost_script_having_context);
4537 __ li(scratch, ER::isolate_address());
4544 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4558 __ LoadExternalPointerField(
4559 api_function_address,
4561 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4565 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4566 StackFrame::API_CALLBACK_EXIT);
4574 __ St_d(argc, argc_operand);
4577 __ Add_d(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4578 __ St_d(scratch,
MemOperand(fp, FC::kFCIImplicitArgsOffset));
4581 __ Add_d(scratch, fp, Operand(FC::kFirstArgumentOffset));
4582 __ St_d(scratch,
MemOperand(fp, FC::kFCIValuesOffset));
4585 __ RecordComment(
"v8::FunctionCallback's argument.");
4587 __ Add_d(function_callback_info_arg, fp,
4588 Operand(FC::kFunctionCallbackInfoOffset));
4591 !
AreAliased(api_function_address, scratch, function_callback_info_arg));
4593 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4597 static constexpr int kSlotsToDropOnReturn =
4600 const bool with_profiling =
4603 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4604 &argc_operand, return_value_operand);
4607void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4618 Register api_function_address = a2;
4630 using PCA = PropertyCallbackArguments;
4631 using ER = ExternalReference;
4632 using FC = ApiAccessorExitFrameConstants;
4634 static_assert(PCA::kPropertyKeyIndex == 0);
4635 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4636 static_assert(PCA::kHolderIndex == 2);
4637 static_assert(PCA::kIsolateIndex == 3);
4638 static_assert(PCA::kHolderV2Index == 4);
4639 static_assert(PCA::kReturnValueIndex == 5);
4640 static_assert(PCA::kDataIndex == 6);
4641 static_assert(PCA::kThisIndex == 7);
4642 static_assert(PCA::kArgsLength == 8);
4655 __ LoadTaggedField(scratch,
4657 __ LoadRoot(undef, RootIndex::kUndefinedValue);
4658 __ li(scratch2, ER::isolate_address());
4662 __ Push(scratch2, holder);
4665 __ LoadTaggedField(name_arg,
4670 __ Push(should_throw_on_error, name_arg);
4672 __ RecordComment(
"Load api_function_address");
4673 __ LoadExternalPointerField(
4674 api_function_address,
4679 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4680 StackFrame::API_ACCESSOR_EXIT);
4682 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4684 __ Add_d(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4686 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
4689#ifdef V8_ENABLE_DIRECT_HANDLE
4694 static_assert(PCA::kPropertyKeyIndex == 0);
4695 __ mov(name_arg, property_callback_info_arg);
4698 ER thunk_ref = ER::invoke_accessor_getter_callback();
4704 static constexpr int kSlotsToDropOnReturn =
4705 FC::kPropertyCallbackInfoArgsLength;
4706 MemOperand*
const kUseStackSpaceConstant =
nullptr;
4708 const bool with_profiling =
true;
4710 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4711 kUseStackSpaceConstant, return_value_operand);
4714void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4732 __ Assert(
ne, AbortReason::kReceivedInvalidReturnAddress, a4,
4733 Operand(
reinterpret_cast<uint64_t
>(
kZapValue)));
4743void Generate_DeoptimizationEntry(MacroAssembler* masm,
4745 Isolate* isolate = masm->isolate();
4752 RegList saved_regs = restored_regs |
sp | ra;
4758 __ Sub_d(sp, sp, Operand(kSimd128RegsSize));
4760 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4761 int code = config->GetAllocatableDoubleCode(
i);
4771 if ((saved_regs.bits() & (1 <<
i)) != 0) {
4780 const int kSavedRegistersAreaSize =
4787 __ Add_d(a3, sp, Operand(kSavedRegistersAreaSize));
4789 __ sub_d(a3, fp, a3);
4792 __ PrepareCallCFunction(5, a4);
4794 __ mov(a0, zero_reg);
4795 Label context_check;
4797 __ JumpIfSmi(a1, &context_check);
4799 __ bind(&context_check);
4800 __ li(a1, Operand(
static_cast<int>(deopt_kind)));
4807 AllowExternalCallThatCantCauseGC scope(masm);
4808 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4821 if ((saved_regs.bits() & (1 <<
i)) != 0) {
4833 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
4834 int code = config->GetAllocatableSimd128Code(
i);
4835 int dst_offset = code *
kSimd128Size + simd128_regs_offset;
4843 __ Add_d(sp, sp, Operand(kSavedRegistersAreaSize));
4848 __ add_d(a2, a2, sp);
4855 Label pop_loop_header;
4856 __ Branch(&pop_loop_header);
4860 __ addi_d(a3, a3,
sizeof(uint64_t));
4861 __ bind(&pop_loop_header);
4862 __ BranchShort(&pop_loop,
ne, a2, Operand(sp));
4866 __ PrepareCallCFunction(1, a1);
4869 AllowExternalCallThatCantCauseGC scope(masm);
4870 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4877 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4883 __ Branch(&outer_loop_header);
4885 __ bind(&outer_push_loop);
4891 __ Branch(&inner_loop_header);
4893 __ bind(&inner_push_loop);
4894 __ Sub_d(frame_size, frame_size, Operand(
sizeof(uint64_t)));
4895 __ Add_d(a6, current_frame, Operand(frame_size));
4899 __ bind(&inner_loop_header);
4900 __ BranchShort(&inner_push_loop,
ne, frame_size, Operand(zero_reg));
4904 __ bind(&outer_loop_header);
4905 __ BranchShort(&outer_push_loop,
lt, a4, Operand(a1));
4908 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
4909 int code = config->GetAllocatableSimd128Code(
i);
4911 int src_offset = code *
kSimd128Size + simd128_regs_offset;
4912 __ Fld_d(fpu_reg,
MemOperand(current_frame, src_offset));
4924 DCHECK(!(restored_regs.has(t7)));
4926 __ mov(t7, current_frame);
4930 if ((restored_regs.bits() & (1 <<
i)) != 0) {
4941 __ BranchShort(&
end,
eq, t7, Operand(zero_reg));
4949void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
4953void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
4960void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
4961 MacroAssembler* masm) {
4975 ResetSharedFunctionInfoAge(masm, code_obj);
4977 __ LoadTrustedPointerField(
4979 FieldMemOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset),
4984 __ GetObjectType(code_obj, t2, t2);
4985 __ Assert(
eq, AbortReason::kExpectedBaselineData, t2, Operand(CODE_TYPE));
4992 __ LoadTaggedField(feedback_cell,
4998 Label install_baseline_code;
5001 __ JumpIfObjectType(&install_baseline_code,
ne, feedback_vector,
5002 FEEDBACK_VECTOR_TYPE, t2);
5010 __ St_d(feedback_cell,
5016 __ St_d(feedback_vector,
5018 feedback_vector =
no_reg;
5022 __ li(get_baseline_pc,
5023 ExternalReference::baseline_pc_for_next_executed_bytecode());
5038 FrameScope scope(masm, StackFrame::INTERNAL);
5039 __ PrepareCallCFunction(3, 0, a4);
5040 __ CallCFunction(get_baseline_pc, 3, 0);
5049 Generate_OSREntry(masm, code_obj);
5052 __ bind(&install_baseline_code);
5054 FrameScope scope(masm, StackFrame::INTERNAL);
5057 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5064void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5072 __ LeaveFrame(StackFrame::INTERPRETED);
5076#ifdef V8_ENABLE_LEAPTIERING
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
interpreter::Bytecode bytecode
#define BUILTIN_CODE(isolate, name)
#define RETURN_BYTECODE_LIST(V)
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr BytecodeOffset None()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const int kMantissaBits
static const int kExponentBits
static const int kExponentBias
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
constexpr void clear(RegisterT reg)
static constexpr int8_t kNumRegisters
static constexpr DwVfpRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedSlotCount
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr int kNumberOfSavedGpParamRegs
static constexpr int kNumberOfSavedFpParamRegs
static constexpr Register ObjectRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
static constexpr uint32_t jmpbuf_offset()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE_BOOL
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
MovableLabel continuation
RegListBase< RegisterT > registers
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void Or(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr DoubleRegister kFpReturnRegisters[]
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr Register kGpReturnRegisters[]
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kFunctionEntryBytecodeOffset
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
constexpr int kSimd128Size
DwVfpRegister DoubleRegister
static void Generate_CheckStackOverflow(MacroAssembler *masm, Register argc, Register scratch1, Register scratch2)
constexpr DoubleRegister kScratchDoubleReg
const RegList kCalleeSaved
@ kUnknownIndirectPointerTag
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr Register kScratchReg
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
constexpr uint32_t kDebugZapValue
constexpr uint32_t kZapValue
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr Register kInterpreterDispatchTableRegister
@ kFunctionTemplateInfoCallbackTag
constexpr Register kWasmTrapHandlerFaultAddressRegister
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
constexpr int JSParameterCount(int param_count_without_receiver)
Register ToRegister(int num)
const DoubleRegList kCalleeSavedFPU
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
@ kDefaultDerivedConstructor
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
constexpr Register kJavaScriptCallDispatchHandleRegister
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define OFFSET_OF_DATA_START(Type)