29#if V8_ENABLE_WEBASSEMBLY
39#define __ ACCESS_MASM(masm)
42 int formal_parameter_count,
Address address) {
56enum class ArgumentsElementType {
61void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
63 ArgumentsElementType element_type) {
65 UseScratchRegisterScope temps(masm);
73 if (element_type == ArgumentsElementType::kHandle) {
78 __ sub(counter, counter, Operand(1),
SetCC);
82void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
96 __ StackOverflowCheck(r0, scratch, &stack_overflow);
100 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
117 Generate_PushArguments(masm, r4, r0, r5, ArgumentsElementType::kRaw);
119 __ PushRoot(RootIndex::kTheHoleValue);
135 __ DropArguments(scratch);
138 __ bind(&stack_overflow);
140 FrameScope scope(masm, StackFrame::INTERNAL);
141 __ CallRuntime(Runtime::kThrowStackOverflow);
149void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
161 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
162 __ EnterFrame(StackFrame::CONSTRUCT);
165 __ LoadRoot(r4, RootIndex::kTheHoleValue);
166 __ Push(
cp, r0, r1, r4, r3);
178 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r4);
182 ¬_create_implicit_receiver);
185 __ CallBuiltin(Builtin::kFastNewObject);
186 __ b(&post_instantiation_deopt_entry);
189 __ bind(¬_create_implicit_receiver);
190 __ LoadRoot(r0, RootIndex::kTheHoleValue);
200 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
202 __ bind(&post_instantiation_deopt_entry);
224 Label stack_overflow;
225 __ StackOverflowCheck(r0, r5, &stack_overflow);
235 Generate_PushArguments(masm, r4, r0, r5, ArgumentsElementType::kRaw);
246 Label use_receiver, do_throw, leave_and_return, check_receiver;
249 __ JumpIfNotRoot(r0, RootIndex::kUndefinedValue, &check_receiver);
256 __ bind(&use_receiver);
258 __ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
260 __ bind(&leave_and_return);
264 __ LeaveFrame(StackFrame::CONSTRUCT);
267 __ DropArguments(r1);
270 __ bind(&check_receiver);
272 __ JumpIfSmi(r0, &use_receiver);
276 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
277 __ CompareObjectType(r0, r4, r5, FIRST_JS_RECEIVER_TYPE);
278 __ b(
ge, &leave_and_return);
284 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
287 __ bind(&stack_overflow);
290 __ CallRuntime(Runtime::kThrowStackOverflow);
295void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
296 Generate_JSBuiltinsConstructStubHelper(masm);
304 __ DecodeField<Code::KindField>(scratch);
305 __ cmp(scratch, Operand(
static_cast<int>(CodeKind::BASELINE)));
306 __ Assert(
eq, AbortReason::kExpectedBaselineData);
310 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
311 Label* is_baseline, Label* is_unavailable) {
319 __ LoadMap(scratch1, data);
323 __ cmp(scratch1, Operand(CODE_TYPE));
326 __ b(
ne, ¬_baseline);
328 __ b(
eq, is_baseline);
329 __ bind(¬_baseline);
331 __ b(
eq, is_baseline);
335 __ cmp(scratch1, Operand(BYTECODE_ARRAY_TYPE));
338 __ cmp(scratch1, Operand(INTERPRETER_DATA_TYPE));
339 __ b(
ne, is_unavailable);
346void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
354 __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0,
357 __ AssertGeneratorObject(r1);
363 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
364 Label stepping_prepared;
368 ExternalReference debug_hook =
369 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
370 __ Move(scratch, debug_hook);
372 __ cmp(scratch, Operand(0));
373 __ b(
ne, &prepare_step_in_if_stepping);
377 ExternalReference debug_suspended_generator =
378 ExternalReference::debug_suspended_generator_address(masm->isolate());
379 __ Move(scratch, debug_suspended_generator);
381 __ cmp(scratch, Operand(r1));
382 __ b(
eq, &prepare_step_in_suspended_generator);
383 __ bind(&stepping_prepared);
387 Label stack_overflow;
390 __ b(
lo, &stack_overflow);
406 FieldMemOperand(r1, JSGeneratorObject::kParametersAndRegistersOffset));
408 Label done_loop, loop;
410 __ sub(r3, r3, Operand(1),
SetCC);
411 __ b(
lt, &done_loop);
425 Label is_baseline, is_unavailable, ok;
431 __ bind(&is_unavailable);
432 __ Abort(AbortReason::kMissingBytecodeArray);
434 __ bind(&is_baseline);
435 __ CompareObjectType(r3, r3, r3, CODE_TYPE);
436 __ Assert(
eq, AbortReason::kMissingBytecodeArray);
445 r0, SharedFunctionInfo::kFormalParameterCountOffset));
451 __ JumpJSFunction(r1);
454 __ bind(&prepare_step_in_if_stepping);
456 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
459 __ PushRoot(RootIndex::kTheHoleValue);
460 __ CallRuntime(Runtime::kDebugOnFunctionCall);
464 __ b(&stepping_prepared);
466 __ bind(&prepare_step_in_suspended_generator);
468 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
470 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
474 __ b(&stepping_prepared);
476 __ bind(&stack_overflow);
478 FrameScope scope(masm, StackFrame::INTERNAL);
479 __ CallRuntime(Runtime::kThrowStackOverflow);
484void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
485 FrameScope scope(masm, StackFrame::INTERNAL);
487 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
495constexpr int kPushedStackSpace =
502static_assert(kPushedStackSpace ==
506 "Pushed stack space and frame constants do not match. See "
507 "frame-constants-arm.h");
531 Label
invoke, handler_entry, exit;
537 NoRootArrayScope no_root_array(masm);
542 __ stm(
db_w, sp, kCalleeSavedWithoutFp);
543 pushed_stack_space +=
571 __ LoadIsolateField(r4, IsolateFieldId::kFastCCallCallerFP);
575 __ LoadIsolateField(r4, IsolateFieldId::kFastCCallCallerPC);
581 __ stm(
db_w, sp, {r5, r6, r7, r8, r9, fp, lr});
590 Label non_outermost_js;
592 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
593 __ Move(r5, js_entry_sp);
596 __ b(
ne, &non_outermost_js);
601 __ bind(&non_outermost_js);
615 __ bind(&handler_entry);
619 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
626 IsolateAddressId::kExceptionAddress, masm->isolate()));
629 __ LoadRoot(r0, RootIndex::kException);
635 __ PushStackHandler();
643 DCHECK_EQ(kPushedStackSpace, pushed_stack_space);
644 USE(pushed_stack_space);
645 __ CallBuiltin(entry_trampoline);
648 __ PopStackHandler();
652 Label non_outermost_js_2;
655 __ b(
ne, &non_outermost_js_2);
657 __ Move(r5, js_entry_sp);
659 __ bind(&non_outermost_js_2);
662 __ ldm(
ia_w, sp, {r3, r4, r5});
663 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerFP);
666 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerPC);
678 __ ldm(
ia_w, sp, {fp, lr});
683 __ ldm(
ia_w, sp, kCalleeSavedWithoutFp);
688 __ CheckConstPool(
true,
false);
693void Builtins::Generate_JSEntry(MacroAssembler* masm) {
694 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
697void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
698 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
699 Builtin::kJSConstructEntryTrampoline);
702void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
703 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
704 Builtin::kRunMicrotasksTrampoline);
731 FrameScope scope(masm, StackFrame::INTERNAL);
735 IsolateAddressId::kContextAddress, masm->isolate());
736 __ Move(
cp, context_address);
744 Label enough_stack_space, stack_overflow;
746 __ StackOverflowCheck(r6, r5, &stack_overflow);
747 __ b(&enough_stack_space);
748 __ bind(&stack_overflow);
749 __ CallRuntime(Runtime::kThrowStackOverflow);
753 __ bind(&enough_stack_space);
761 Generate_PushArguments(masm, r4, r0, r5, ArgumentsElementType::kHandle);
775 __ LoadRoot(r4, RootIndex::kUndefinedValue);
784 __ CallBuiltin(builtin);
795void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
799void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
803void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
810 __ TailCallBuiltin(Builtin::kRunMicrotasks);
823 Register actual_params_size = scratch2;
825 __ ldr(actual_params_size,
830 __ cmp(params_size, actual_params_size);
834 __ LeaveFrame(StackFrame::INTERPRETED);
837 __ DropArguments(params_size);
846 Register bytecode_array,
847 Register bytecode_offset,
848 Register bytecode, Register scratch1,
849 Register scratch2, Label* if_return) {
851 Register bytecode_size_table = scratch1;
857 Register original_bytecode_offset = scratch2;
859 bytecode, original_bytecode_offset));
861 __ Move(bytecode_size_table,
862 ExternalReference::bytecode_size_table_address());
863 __ Move(original_bytecode_offset, bytecode_offset);
866 Label process_bytecode;
867 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
868 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
869 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
871 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
872 __ cmp(bytecode, Operand(0x3));
873 __ b(
hi, &process_bytecode);
874 __ tst(bytecode, Operand(0x1));
876 __ add(bytecode_offset, bytecode_offset, Operand(1));
877 __ ldrb(bytecode,
MemOperand(bytecode_array, bytecode_offset));
880 __ add(bytecode_size_table, bytecode_size_table,
884 __ add(bytecode_size_table, bytecode_size_table,
888 __ bind(&process_bytecode);
894#define JUMP_IF_EQUAL(NAME) \
895 __ cmp(bytecode, Operand(static_cast<int>(interpreter::Bytecode::k##NAME)), \
905 Label
end, not_jump_loop;
906 __ cmp(bytecode, Operand(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
907 __ b(
ne, ¬_jump_loop);
910 __ Move(bytecode_offset, original_bytecode_offset);
913 __ bind(¬_jump_loop);
915 __ ldrb(scratch1,
MemOperand(bytecode_size_table, bytecode));
916 __ add(bytecode_offset, bytecode_offset, scratch1);
923void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi,
926 __ mov(scratch, Operand(0));
930void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
931 Register scratch1, Register scratch2) {
934 ResetSharedFunctionInfoAge(masm, scratch1, scratch2);
937void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
938 Register feedback_vector, Register scratch) {
942 __ and_(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
950void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
951 UseScratchRegisterScope temps(masm);
953 temps.Include({r4, r5, r8, r9});
957 Register closure = descriptor.GetRegisterParameter(
958 BaselineOutOfLinePrologueDescriptor::kClosure);
960 Register feedback_cell = temps.Acquire();
961 Register feedback_vector = temps.Acquire();
962 __ ldr(feedback_cell,
964 __ ldr(feedback_vector,
967 UseScratchRegisterScope temps(masm);
968 Register temporary = temps.Acquire();
969 __ AssertFeedbackVector(feedback_vector, temporary);
972#ifndef V8_ENABLE_LEAPTIERING
974 Label flags_need_processing;
977 UseScratchRegisterScope temps(masm);
980 flags = temps.Acquire();
981 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
982 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
987 UseScratchRegisterScope temps(masm);
988 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, temps.Acquire());
993 UseScratchRegisterScope temps(masm);
994 Register invocation_count = temps.Acquire();
995 __ ldr(invocation_count,
997 FeedbackVector::kInvocationCountOffset));
998 __ add(invocation_count, invocation_count, Operand(1));
999 __ str(invocation_count,
1001 FeedbackVector::kInvocationCountOffset));
1011 Register callee_context = descriptor.GetRegisterParameter(
1012 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1013 Register callee_js_function = descriptor.GetRegisterParameter(
1014 BaselineOutOfLinePrologueDescriptor::kClosure);
1016 UseScratchRegisterScope temps(masm);
1017 ResetJSFunctionAge(masm, callee_js_function, temps.Acquire(),
1020 __ Push(callee_context, callee_js_function);
1024 Register argc = descriptor.GetRegisterParameter(
1025 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1028 Register bytecodeArray = descriptor.GetRegisterParameter(
1029 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1030 __ Push(argc, bytecodeArray);
1032 UseScratchRegisterScope temps(masm);
1033 Register scratch = temps.Acquire();
1034 __ CompareObjectType(feedback_vector, scratch, scratch,
1035 FEEDBACK_VECTOR_TYPE);
1036 __ Assert(
eq, AbortReason::kExpectedFeedbackVector);
1038 __ Push(feedback_cell);
1039 __ Push(feedback_vector);
1042 Label call_stack_guard;
1043 Register frame_size = descriptor.GetRegisterParameter(
1044 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1052 UseScratchRegisterScope temps(masm);
1054 Register sp_minus_frame_size = temps.Acquire();
1055 __ sub(sp_minus_frame_size, sp, frame_size);
1056 Register interrupt_limit = temps.Acquire();
1058 __ cmp(sp_minus_frame_size, interrupt_limit);
1059 __ b(&call_stack_guard,
lo);
1066#ifndef V8_ENABLE_LEAPTIERING
1067 __ bind(&flags_need_processing);
1070 UseScratchRegisterScope temps(masm);
1072 temps.Exclude(flags);
1075 __ ldm(
ia_w, sp, {fp, lr});
1076 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1081 __ bind(&call_stack_guard);
1084 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1087 __ SmiTag(frame_size);
1088 __ Push(frame_size);
1089 __ CallRuntime(Runtime::kStackGuardWithGap);
1098void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1111 __ LeaveFrame(StackFrame::BASELINE);
1114 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1133 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1139 ResetSharedFunctionInfoAge(masm, r4, r8);
1143 Label is_baseline, compile_lazy;
1146 &is_baseline, &compile_lazy);
1148 Label push_stack_frame;
1150 __ LoadFeedbackVector(feedback_vector, closure, r4, &push_stack_frame);
1153#ifndef V8_ENABLE_LEAPTIERING
1157 Label flags_need_processing;
1158 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1159 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1160 &flags_need_processing);
1163 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r4);
1167 FeedbackVector::kInvocationCountOffset));
1168 __ add(r9, r9, Operand(1));
1170 FeedbackVector::kInvocationCountOffset));
1182 __ bind(&push_stack_frame);
1184 __ PushStandardFrame(closure);
1195 Label stack_overflow;
1199 BytecodeArray::kFrameSizeOffset));
1202 __ sub(r9, sp, Operand(r4));
1204 __ cmp(r9, Operand(r2));
1205 __ b(
lo, &stack_overflow);
1211 __ b(&loop_check,
al);
1212 __ bind(&loop_header);
1216 __ bind(&loop_check);
1218 __ b(&loop_header,
ge);
1225 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1231 Label stack_check_interrupt, after_stack_check_interrupt;
1234 __ b(
lo, &stack_check_interrupt);
1235 __ bind(&after_stack_check_interrupt);
1242 __ bind(&do_dispatch);
1245 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1253 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1255 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1262 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1283 __ jmp(&do_dispatch);
1285 __ bind(&do_return);
1290 __ bind(&stack_check_interrupt);
1298 __ CallRuntime(Runtime::kStackGuard);
1312 __ jmp(&after_stack_check_interrupt);
1315#ifndef V8_ENABLE_LEAPTIERING
1316 __ bind(&flags_need_processing);
1317 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1320 __ bind(&is_baseline);
1322#ifndef V8_ENABLE_LEAPTIERING
1324 __ ldr(feedback_vector,
1326 __ ldr(feedback_vector,
1329 Label install_baseline_code;
1334 __ cmp(r8, Operand(FEEDBACK_VECTOR_TYPE));
1335 __ b(
ne, &install_baseline_code);
1338 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1339 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1344 __ ReplaceClosureCodeWithOptimizedCode(r2, closure);
1345 __ JumpCodeObject(r2);
1347 __ bind(&install_baseline_code);
1349 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1353 __ bind(&compile_lazy);
1354 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1356 __ bind(&stack_overflow);
1357 __ CallRuntime(Runtime::kThrowStackOverflow);
1362 Register start_address,
1366 __ sub(scratch, num_args, Operand(1));
1368 __ sub(start_address, start_address, scratch);
1370 __ PushArray(start_address, num_args, scratch,
1386 Label stack_overflow;
1390 __ sub(r0, r0, Operand(1));
1399 __ StackOverflowCheck(r3, r4, &stack_overflow);
1406 __ PushRoot(RootIndex::kUndefinedValue);
1419 __ TailCallBuiltin(Builtin::kCallWithSpread);
1424 __ bind(&stack_overflow);
1426 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1442 Label stack_overflow;
1444 __ StackOverflowCheck(r0, r6, &stack_overflow);
1448 __ sub(r0, r0, Operand(1));
1451 Register argc_without_receiver = r6;
1467 __ AssertUndefinedOrAllocationSite(r2, r5);
1471 __ AssertFunction(r1);
1475 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1478 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1482 __ TailCallBuiltin(Builtin::kConstruct);
1485 __ bind(&stack_overflow);
1487 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1495 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1500 Label stack_overflow;
1503 switch (which_frame) {
1515 __ StackOverflowCheck(r0, r6, &stack_overflow);
1524 Register argc_without_receiver = r6;
1526 __ PushArray(r4, argc_without_receiver, r5);
1533 __ TailCallBuiltin(Builtin::kConstruct);
1535 __ bind(&stack_overflow);
1537 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1545void NewImplicitReceiver(MacroAssembler* masm) {
1563 __ Push(r0, r1, r3);
1564 __ CallBuiltin(Builtin::kFastNewObject);
1566 __ Move(implicit_receiver, r0);
1574 __ str(implicit_receiver,
1584void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1585 MacroAssembler* masm) {
1593 __ AssertFunction(r1);
1596 Label non_constructor;
1599 __ tst(r2, Operand(Map::Bits1::IsConstructorBit::kMask));
1600 __ b(
eq, &non_constructor);
1603 Label stack_overflow;
1604 __ StackOverflowCheck(r0, r2, &stack_overflow);
1608 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1610 __ LoadRoot(r2, RootIndex::kTheHoleValue);
1614 Register argc_without_receiver = r6;
1625 __ tst(r2, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1626 __ b(
ne, &builtin_call);
1629 Label not_create_implicit_receiver;
1630 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r2);
1634 ¬_create_implicit_receiver);
1635 NewImplicitReceiver(masm);
1636 __ bind(¬_create_implicit_receiver);
1652 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1658 Label use_receiver, do_throw, leave_and_return, check_receiver;
1661 __ JumpIfNotRoot(r0, RootIndex::kUndefinedValue, &check_receiver);
1668 __ bind(&use_receiver);
1671 __ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
1673 __ bind(&leave_and_return);
1675 __ LeaveFrame(StackFrame::CONSTRUCT);
1678 __ bind(&check_receiver);
1680 __ JumpIfSmi(r0, &use_receiver);
1684 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
1685 __ CompareObjectType(r0, r4, r5, FIRST_JS_RECEIVER_TYPE);
1686 __ b(
ge, &leave_and_return);
1687 __ b(&use_receiver);
1689 __ bind(&builtin_call);
1692 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1698 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1701 __ bind(&stack_overflow);
1703 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1709 __ bind(&non_constructor);
1710 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1716 Label builtin_trampoline, trampoline_loaded;
1718 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1731 INTERPRETER_DATA_TYPE);
1732 __ b(
ne, &builtin_trampoline);
1736 __ LoadCodeInstructionStart(r2, r2);
1737 __ b(&trampoline_loaded);
1739 __ bind(&builtin_trampoline);
1740 __ Move(r2, ExternalReference::
1741 address_of_interpreter_entry_trampoline_instruction_start(
1745 __ bind(&trampoline_loaded);
1746 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset.value()));
1751 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1761 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1763 BYTECODE_ARRAY_TYPE);
1765 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1783 UseScratchRegisterScope temps(masm);
1784 Register scratch = temps.Acquire();
1793void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1801 Label enter_bytecode, function_entry_bytecode;
1805 __ b(
eq, &function_entry_bytecode);
1817 __ bind(&enter_bytecode);
1824 __ bind(&function_entry_bytecode);
1831 __ b(&enter_bytecode);
1834 __ bind(&if_return);
1835 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1838void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1843void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1844 bool javascript_builtin,
1847 int allocatable_register_count = config->num_allocatable_general_registers();
1848 UseScratchRegisterScope temps(masm);
1849 Register scratch = temps.Acquire();
1851 if (javascript_builtin) {
1852 __ mov(scratch, r0);
1859 sp, config->num_allocatable_general_registers() *
kPointerSize +
1863 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1864 int code = config->GetAllocatableGeneralCode(
i);
1870 if (javascript_builtin && with_result) {
1874 constexpr int return_value_offset =
1877 __ add(r0, r0, Operand(return_value_offset));
1880 __ sub(r0, r0, Operand(return_value_offset));
1891 __ LoadEntryFromBuiltinIndex(builtin, builtin);
1896void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1897 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1900void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1901 MacroAssembler* masm) {
1902 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1905void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1906 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1909void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1910 MacroAssembler* masm) {
1911 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1914void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1916 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1917 __ CallRuntime(Runtime::kNotifyDeoptimized);
1927void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
1931 __ mov(lr, entry_address);
1940enum class OsrSourceTier {
1945void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
1946 Register maybe_target_code,
1947 Register expected_param_count) {
1948 Label jump_to_optimized_code;
1955 __ b(
ne, &jump_to_optimized_code);
1960 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1961 __ CallRuntime(Runtime::kCompileOptimizedOSR);
1966 __ b(
ne, &jump_to_optimized_code);
1969 __ bind(&jump_to_optimized_code);
1975 __ Move(r1, ExternalReference::address_of_log_or_trace_osr());
1977 __ tst(r1, Operand(0xFF));
1981 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1983 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
1990 if (source == OsrSourceTier::kInterpreter) {
1993 __ LeaveFrame(StackFrame::STUB);
2002 FieldMemOperand(r0, Code::kDeoptimizationDataOrInterpreterDataOffset));
2004 __ LoadCodeInstructionStart(r0, r0);
2007 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2019void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2020 using D = OnStackReplacementDescriptor;
2021 static_assert(D::kParameterCount == 2);
2022 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2023 D::MaybeTargetCodeRegister(),
2024 D::ExpectedParameterCountRegister());
2027void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2028 using D = OnStackReplacementDescriptor;
2029 static_assert(D::kParameterCount == 2);
2033 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2034 D::MaybeTargetCodeRegister(),
2035 D::ExpectedParameterCountRegister());
2038#ifdef V8_ENABLE_MAGLEV
2041 bool save_new_target) {
2047 FrameScope scope(masm, StackFrame::INTERNAL);
2049 if (save_new_target) {
2053 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2054 if (save_new_target) {
2064void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2076 __ LoadRoot(r5, RootIndex::kUndefinedValue);
2083 __ DropArgumentsAndPushNewReceiver(r0, r5);
2098 __ JumpIfRoot(r2, RootIndex::kNullValue, &no_arguments);
2099 __ JumpIfRoot(r2, RootIndex::kUndefinedValue, &no_arguments);
2102 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2106 __ bind(&no_arguments);
2114void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2124 __ PushRoot(RootIndex::kUndefinedValue);
2125 __ add(r0, r0, Operand(1));
2130 __ sub(r0, r0, Operand(1));
2136void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2149 __ LoadRoot(r1, RootIndex::kUndefinedValue);
2158 __ DropArgumentsAndPushNewReceiver(r0, r5);
2172 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2175void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2189 __ LoadRoot(r1, RootIndex::kUndefinedValue);
2199 __ DropArgumentsAndPushNewReceiver(r0, r4);
2218 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2227void Generate_AllocateSpaceAndShiftExistingArguments(
2228 MacroAssembler* masm, Register count, Register argc_in_out,
2229 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2232 UseScratchRegisterScope temps(masm);
2237 __ AllocateStackSpace(new_space);
2241 Register dest = pointer_to_new_space_out;
2246 __ cmp(old_sp,
end);
2254 __ add(argc_in_out, argc_in_out, count);
2276 __ AssertNotSmi(r2);
2279 __ cmp(r6, Operand(FIXED_ARRAY_TYPE));
2281 __ cmp(r6, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2283 __ cmp(r4, Operand(0));
2287 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2292 Label stack_overflow;
2293 __ StackOverflowCheck(r4, scratch, &stack_overflow);
2300 Generate_AllocateSpaceAndShiftExistingArguments(masm, r4, r0, r9, r5, r6);
2304 __ mov(r6, Operand(0));
2305 __ LoadRoot(r5, RootIndex::kTheHoleValue);
2312 __ cmp(scratch, r5);
2314 __ LoadRoot(scratch, RootIndex::kUndefinedValue,
eq);
2316 __ add(r6, r6, Operand(1));
2322 __ TailCallBuiltin(target_builtin);
2324 __ bind(&stack_overflow);
2325 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2330 CallOrConstructMode mode,
2343 Label new_target_constructor, new_target_not_constructor;
2344 __ JumpIfSmi(r3, &new_target_not_constructor);
2347 __ tst(scratch, Operand(Map::Bits1::IsConstructorBit::kMask));
2348 __ b(
ne, &new_target_constructor);
2349 __ bind(&new_target_not_constructor);
2352 __ EnterFrame(StackFrame::INTERNAL);
2354 __ CallRuntime(Runtime::kThrowNotConstructor);
2356 __ bind(&new_target_constructor);
2359 Label stack_done, stack_overflow;
2363 __ b(
le, &stack_done);
2375 __ StackOverflowCheck(r5, scratch, &stack_overflow);
2389 Generate_AllocateSpaceAndShiftExistingArguments(masm, r5, r0, r2, scratch,
2399 __ sub(r5, r5, Operand(1),
SetCC);
2406 __ bind(&stack_done);
2408 __ TailCallBuiltin(target_builtin);
2410 __ bind(&stack_overflow);
2411 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2421 __ AssertCallableFunction(r1);
2432 __ tst(r3, Operand(SharedFunctionInfo::IsNativeBit::kMask |
2433 SharedFunctionInfo::IsStrictBit::kMask));
2434 __ b(
ne, &done_convert);
2445 __ LoadGlobalProxy(r3);
2447 Label convert_to_object, convert_receiver;
2448 __ ldr(r3,
__ ReceiverOperand());
2449 __ JumpIfSmi(r3, &convert_to_object);
2450 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
2451 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2452 __ b(
hs, &done_convert);
2454 Label convert_global_proxy;
2455 __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &convert_global_proxy);
2456 __ JumpIfNotRoot(r3, RootIndex::kNullValue, &convert_to_object);
2457 __ bind(&convert_global_proxy);
2460 __ LoadGlobalProxy(r3);
2462 __ b(&convert_receiver);
2464 __ bind(&convert_to_object);
2469 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2474 __ CallBuiltin(Builtin::kToObject);
2481 __ bind(&convert_receiver);
2483 __ str(r3,
__ ReceiverOperand());
2485 __ bind(&done_convert);
2495 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2501void Generate_PushBoundArguments(MacroAssembler* masm) {
2510 Label no_bound_arguments;
2514 __ cmp(r4, Operand(0));
2515 __ b(
eq, &no_bound_arguments);
2534 UseScratchRegisterScope temps(masm);
2535 Register remaining_stack_size = temps.Acquire();
2540 __ LoadStackLimit(remaining_stack_size,
2542 __ sub(remaining_stack_size, sp, remaining_stack_size);
2545 __ cmp(remaining_stack_size, scratch);
2550 __ EnterFrame(StackFrame::INTERNAL);
2551 __ CallRuntime(Runtime::kThrowStackOverflow);
2566 __ sub(r4, r4, Operand(1),
SetCC);
2575 __ bind(&no_bound_arguments);
2586 __ AssertBoundFunction(r1);
2590 __ str(r3,
__ ReceiverOperand());
2593 Generate_PushBoundArguments(masm);
2612 Label non_callable, class_constructor;
2613 __ JumpIfSmi(target, &non_callable);
2614 __ LoadMap(map, target);
2615 __ CompareInstanceTypeRange(map, instance_type, scratch,
2619 __ cmp(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2620 __ TailCallBuiltin(Builtin::kCallBoundFunction,
eq);
2627 __ tst(flags, Operand(Map::Bits1::IsCallableBit::kMask));
2628 __ b(
eq, &non_callable);
2632 __ cmp(instance_type, Operand(JS_PROXY_TYPE));
2633 __ TailCallBuiltin(Builtin::kCallProxy,
eq);
2637 __ cmp(instance_type, Operand(JS_WRAPPED_FUNCTION_TYPE));
2638 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
eq);
2642 __ cmp(instance_type, Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2643 __ b(
eq, &class_constructor);
2648 __ str(target,
__ ReceiverOperand());
2650 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2655 __ bind(&non_callable);
2657 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2659 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2664 __ bind(&class_constructor);
2666 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2668 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2674void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2680 __ AssertConstructor(r1);
2681 __ AssertFunction(r1);
2685 __ LoadRoot(r2, RootIndex::kUndefinedValue);
2687 Label call_generic_stub;
2692 __ tst(r4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2693 __ b(
eq, &call_generic_stub);
2695 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2697 __ bind(&call_generic_stub);
2698 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2702void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2708 __ AssertConstructor(r1);
2709 __ AssertBoundFunction(r1);
2712 Generate_PushBoundArguments(masm);
2721 __ TailCallBuiltin(Builtin::kConstruct);
2725void Builtins::Generate_Construct(MacroAssembler* masm) {
2739 Label non_constructor, non_proxy;
2740 __ JumpIfSmi(target, &non_constructor);
2748 __ tst(flags, Operand(Map::Bits1::IsConstructorBit::kMask));
2749 __ b(
eq, &non_constructor);
2753 __ CompareInstanceTypeRange(map, instance_type, scratch,
2754 FIRST_JS_FUNCTION_TYPE, LAST_JS_FUNCTION_TYPE);
2755 __ TailCallBuiltin(Builtin::kConstructFunction,
ls);
2759 __ cmp(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2760 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
eq);
2763 __ cmp(instance_type, Operand(JS_PROXY_TYPE));
2764 __ b(
ne, &non_proxy);
2765 __ TailCallBuiltin(Builtin::kConstructProxy);
2768 __ bind(&non_proxy);
2771 __ str(target,
__ ReceiverOperand());
2773 __ LoadNativeContextSlot(target,
2774 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2780 __ bind(&non_constructor);
2781 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2784#if V8_ENABLE_WEBASSEMBLY
2786struct SaveWasmParamsScope {
2787 explicit SaveWasmParamsScope(MacroAssembler* masm)
2791 for (Register gp_param_reg : wasm::kGpParamRegisters) {
2792 gp_regs.set(gp_param_reg);
2795 for (DwVfpRegister fp_param_reg : wasm::kFpParamRegisters) {
2796 CHECK(fp_param_reg.code() >= lowest_fp_reg.code() &&
2797 fp_param_reg.code() <= highest_fp_reg.code());
2801 CHECK_EQ(highest_fp_reg.code() - lowest_fp_reg.code() + 1,
2804 WasmLiftoffSetupFrameConstants::kNumberOfSavedGpParamRegs +
2806 CHECK_EQ(highest_fp_reg.code() - lowest_fp_reg.code() + 1,
2807 WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs);
2809 __ stm(db_w, sp, gp_regs);
2810 __ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
2812 ~SaveWasmParamsScope() {
2813 __ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
2814 __ ldm(ia_w, sp, gp_regs);
2818 DwVfpRegister lowest_fp_reg;
2819 DwVfpRegister highest_fp_reg;
2820 MacroAssembler* masm;
2829void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
2830 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
2833 Label allocate_vector, done;
2837 WasmTrustedInstanceData::kFeedbackVectorsOffset));
2840 __ JumpIfSmi(vector, &allocate_vector);
2846 __ bind(&allocate_vector);
2855 SaveWasmParamsScope save_params(masm);
2858 __ SmiTag(func_index);
2864 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
2873void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2878 HardAbortScope hard_abort(masm);
2879 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2882 SaveWasmParamsScope save_params(masm);
2891 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2902 WasmTrustedInstanceData::kJumpTableStartOffset));
2910void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2911 HardAbortScope hard_abort(masm);
2913 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2916 constexpr DwVfpRegister last =
2918 constexpr DwVfpRegister first =
2922 last.code() - first.code() + 1,
2923 "All registers in the range from first to last have to be set");
2927 constexpr DwVfpRegister lowest_fp_reg = first;
2928 constexpr DwVfpRegister highest_fp_reg = last;
2933 __ vstm(
db_w, sp, lowest_fp_reg, highest_fp_reg);
2938 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2941 __ vldm(
ia_w, sp, lowest_fp_reg, highest_fp_reg);
2950void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
2955 __ JumpIfEqual(tmp, old_state, &ok);
2958 __ mov(tmp, Operand(new_state));
2963void SwitchStackPointer(MacroAssembler* masm, Register stack) {
2967void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* target,
2975 __ GetLabelAddress(tmp, target);
2980void LoadJumpBuffer(MacroAssembler* masm, Register stack,
bool load_pc,
2982 SwitchStackPointer(masm, stack);
2993void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
2996 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
2998 LoadJumpBuffer(masm, target_stack,
false, tmp, expected_state);
3002void SwitchStacks(MacroAssembler* masm, Register old_stack,
bool return_switch,
3003 const std::initializer_list<Register> keep) {
3004 using ER = ExternalReference;
3006 for (
auto reg : keep) {
3011 __ PrepareCallCFunction(2);
3017 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3020 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3025void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3026 Register return_value, Register context, Register tmp1,
3027 Register tmp2, Register tmp3) {
3029 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3035 UseScratchRegisterScope temps(masm);
3036 Register scratch = temps.Acquire();
3044 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3047 SwitchStacks(masm, active_stack,
true,
3048 {return_reg, return_value,
context, parent});
3052void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3054 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3059 int32_t active_suspender_offset =
3061 RootIndex::kActiveSuspender);
3065void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3066 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset),
3067 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3071class RegisterAllocator {
3075 Scoped(RegisterAllocator* allocator, Register*
reg)
3086 void Ask(Register*
reg) {
3093 bool registerIsAvailable(
const Register&
reg) {
return available_.has(
reg); }
3095 void Pinned(
const Register& requested, Register*
reg) {
3096 DCHECK(registerIsAvailable(requested));
3110 void Reserve(
const Register&
reg) {
3118 void Reserve(
const Register& reg1,
const Register& reg2,
3119 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3120 const Register& reg5 =
no_reg,
const Register& reg6 =
no_reg) {
3129 bool IsUsed(
const Register&
reg) {
3133 void ResetExcept(
const Register& reg1 =
no_reg,
const Register& reg2 =
no_reg,
3134 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3135 const Register& reg5 =
no_reg,
3136 const Register& reg6 =
no_reg) {
3147 if (registerIsAvailable(**it)) {
3156 static RegisterAllocator WithAllocatableGeneralRegisters() {
3160 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
3161 int code = config->GetAllocatableGeneralCode(
i);
3163 list.set(candidate);
3165 return RegisterAllocator(list);
3174#define DEFINE_REG(Name) \
3175 Register Name = no_reg; \
3178#define DEFINE_REG_W(Name) \
3182#define ASSIGN_REG(Name) regs.Ask(&Name);
3184#define ASSIGN_REG_W(Name) \
3188#define DEFINE_PINNED(Name, Reg) \
3189 Register Name = no_reg; \
3190 regs.Pinned(Reg, &Name);
3192#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3194#define DEFINE_SCOPED(Name) \
3196 RegisterAllocator::Scoped scope_##Name(®s, &Name);
3198#define FREE_REG(Name) regs.Free(&Name);
3202void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3205 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3208 __ b(
eq, &instance);
3215 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3221void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3238 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3241void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3245void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3246 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3248 __ EnterFrame(StackFrame::STACK_SWITCH);
3257 ResetStackSwitchFrameStackSlots(masm);
3264 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3266 FillJumpBuffer(masm, stack, &resume, scratch);
3269 regs.ResetExcept(suspender, stack);
3272 __ ldr(suspender_stack,
3281 __ cmp(suspender_stack, stack);
3292 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3295 parent,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3296 int32_t active_suspender_offset =
3298 RootIndex::kActiveSuspender);
3300 regs.ResetExcept(suspender, caller, stack);
3305 SwitchStacks(masm, stack,
false, {caller, suspender});
3311 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3312 __ Zero(GCScanSlotPlace);
3319 __ LeaveFrame(StackFrame::STACK_SWITCH);
3328void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3329 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3330 __ EnterFrame(StackFrame::STACK_SWITCH);
3338 ResetStackSwitchFrameStackSlots(masm);
3340 regs.ResetExcept(closure);
3358 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3362 regs.ResetExcept(suspender);
3369 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3371 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3379 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3380 __ StoreTaggedField(
3383 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3386 int32_t active_suspender_offset =
3388 RootIndex::kActiveSuspender);
3396 suspender = target_stack;
3397 __ ldr(target_stack,
3401 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3402 SwitchStacks(masm, active_stack,
false, {target_stack});
3403 regs.ResetExcept(target_stack);
3413 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3414 __ Zero(GCScanSlotPlace);
3417 LoadJumpBuffer(masm, target_stack,
false, scratch,
3421 __ LeaveFrame(StackFrame::STACK_SWITCH);
3424 __ CallRuntime(Runtime::kThrow);
3427 LoadJumpBuffer(masm, target_stack,
true, scratch,
3434 __ LeaveFrame(StackFrame::STACK_SWITCH);
3441void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3445void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3449void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3455void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3456 Register wasm_instance, Register wrapper_buffer,
3457 Register& original_fp, Register& new_wrapper_buffer,
3459 ResetStackSwitchFrameStackSlots(masm);
3462 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3466 FillJumpBuffer(masm, parent_stack, suspend, scratch);
3467 SwitchStacks(masm, parent_stack,
false, {wasm_instance, wrapper_buffer});
3472 regs.Pinned(r9, &original_fp);
3473 __ Move(original_fp, fp);
3474 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3475 LoadTargetJumpBuffer(masm, target_stack, scratch,
3483 __ EnterFrame(StackFrame::STACK_SWITCH);
3487 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3489 __ sub(sp, sp, Operand(stack_space));
3490 __ EnforceStackAlignment();
3494 __ Move(new_wrapper_buffer, sp);
3499 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3502 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3506 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount));
3510 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount));
3515 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3520 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3526 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3532 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3536void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3542 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3550 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3552 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3555 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3558 ReloadParentStack(masm, promise, return_value,
kContextRegister, tmp, tmp2,
3560 RestoreParentSuspender(masm, tmp);
3563 __ Move(tmp, Operand(1));
3565 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3567 __ CallBuiltin(Builtin::kFulfillPromise);
3572 __ bind(return_promise);
3575void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3576 RegisterAllocator& regs,
3577 Label* return_promise) {
3579 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3586 thread_in_wasm_flag_addr = r2;
3590 thread_in_wasm_flag_addr,
3597 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3599 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3605 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3607 ReloadParentStack(masm, promise, reason,
kContextRegister, tmp, tmp2, tmp3);
3608 RestoreParentSuspender(masm, tmp);
3610 __ Move(tmp, Operand(1));
3612 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3614 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3615 __ CallBuiltin(Builtin::kRejectPromise);
3619 __ jmp(return_promise);
3621 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3624void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
3626 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3628 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3629 : StackFrame::JS_TO_WASM);
3631 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3636 __ ldr(implicit_arg,
3637 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3646 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3647 original_fp, new_wrapper_buffer, &suspend);
3650 new_wrapper_buffer = wrapper_buffer;
3653 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3654 new_wrapper_buffer);
3657 __ str(new_wrapper_buffer,
3658 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3660 __ str(implicit_arg,
3661 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3666 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3668 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3674 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
3675 kWrapperBufferStackReturnBufferSize));
3683 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3695 int stack_params_offset =
3699 stack_params_offset += param_padding;
3703 __ ldr(params_start,
3705 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3711 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3714 __ add(last_stack_param, params_start, Operand(stack_params_offset));
3716 __ bind(&loop_start);
3718 Label finish_stack_params;
3719 __ cmp(last_stack_param, params_end);
3720 __ b(
ge, &finish_stack_params);
3728 __ jmp(&loop_start);
3730 __ bind(&finish_stack_params);
3733 size_t next_offset = 0;
3743 next_offset += param_padding;
3749 DCHECK_EQ(next_offset, stack_params_offset);
3754 __ ldr(thread_in_wasm_flag_addr,
3758 __ Move(scratch, Operand(1));
3759 __ str(scratch,
MemOperand(thread_in_wasm_flag_addr, 0));
3762 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3767 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3768 __ CallWasmCodePointer(call_target);
3777 __ ldr(thread_in_wasm_flag_addr,
3783 __ ldr(wrapper_buffer,
3784 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3789 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
3793 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
3797 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
3801 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
3807 __ ldr(r1,
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3808 __ ldr(r0,
MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3811 fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3813 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3816 GetContextFromImplicitArg(masm, r0, scratch);
3818 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
3820 Label return_promise;
3822 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
3826 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
3827 : StackFrame::JS_TO_WASM);
3841 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
3844 __ CheckConstPool(
true,
false);
3848void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
3852void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
3856void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
3862static constexpr Register kOldSPRegister = r7;
3863static constexpr Register kSwitchFlagRegister = r8;
3865void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
3866 Register target_input,
3867 Register argv_input) {
3868 using ER = ExternalReference;
3870 __ Move(kOldSPRegister, sp);
3875 ER on_central_stack_flag_loc = ER::Create(
3876 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
3877 __ Move(kSwitchFlagRegister, on_central_stack_flag_loc);
3878 __ ldrb(kSwitchFlagRegister,
MemOperand(kSwitchFlagRegister));
3880 Label do_not_need_to_switch;
3881 __ cmp(kSwitchFlagRegister, Operand(0));
3882 __ b(
ne, &do_not_need_to_switch);
3887 DCHECK(!
AreAliased(central_stack_sp, argc_input, argv_input, target_input));
3889 __ Push(argc_input);
3890 __ Push(target_input);
3891 __ Push(argv_input);
3892 __ PrepareCallCFunction(2);
3895 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
3899 __ Pop(target_input);
3905 __ sub(sp, central_stack_sp, Operand(kReturnAddressSlotOffset +
kPadding));
3906 __ EnforceStackAlignment();
3914 __ bind(&do_not_need_to_switch);
3917void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
3918 using ER = ExternalReference;
3920 Label no_stack_change;
3922 __ cmp(kSwitchFlagRegister, Operand(0));
3923 __ b(
ne, &no_stack_change);
3927 __ PrepareCallCFunction(1);
3929 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
3934 __ Move(sp, kOldSPRegister);
3936 __ bind(&no_stack_change);
3944 ArgvMode argv_mode,
bool builtin_exit_frame,
3945 bool switch_to_central_stack) {
3956 using ER = ExternalReference;
3960 static constexpr Register target_fun = r5;
3961 static constexpr Register argv = r1;
3962 static constexpr Register scratch = r3;
3963 static constexpr Register argc_sav = r4;
3965 __ mov(target_fun, Operand(r1));
3969 __ mov(argv, Operand(r2));
3980 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
3983 __ mov(argc_sav, Operand(argc_input));
3992 int frame_alignment_mask = frame_alignment - 1;
3995 Label alignment_as_expected;
3997 __ tst(sp, Operand(frame_alignment_mask));
3998 __ b(
eq, &alignment_as_expected);
4001 __ bind(&alignment_as_expected);
4006#if V8_ENABLE_WEBASSEMBLY
4007 if (switch_to_central_stack) {
4008 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4017 __ StoreReturnAddressAndCall(target_fun);
4022 Label exception_returned;
4023 __ CompareRoot(r0, RootIndex::kException);
4024 __ b(
eq, &exception_returned);
4026#if V8_ENABLE_WEBASSEMBLY
4027 if (switch_to_central_stack) {
4028 SwitchFromTheCentralStackIfNeeded(masm);
4036 ER exception_address =
4037 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4038 __ ldr(scratch,
__ ExternalReferenceAsOperand(exception_address,
no_reg));
4039 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
4051 __ LeaveExitFrame(scratch);
4060 __ bind(&exception_returned);
4062 ER pending_handler_context_address = ER::Create(
4063 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4064 ER pending_handler_entrypoint_address = ER::Create(
4065 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4066 ER pending_handler_fp_address =
4067 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4068 ER pending_handler_sp_address =
4069 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4075 __ PrepareCallCFunction(3, 0);
4079 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4084 __ Move(
cp, pending_handler_context_address);
4086 __ Move(sp, pending_handler_sp_address);
4088 __ Move(fp, pending_handler_fp_address);
4093 __ cmp(
cp, Operand(0));
4097 ER c_entry_fp_address =
4098 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4100 __ str(scratch,
__ ExternalReferenceAsOperand(c_entry_fp_address,
no_reg));
4103 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
4104 __ ldr(scratch,
__ ExternalReferenceAsOperand(
4105 pending_handler_entrypoint_address,
no_reg));
4109#if V8_ENABLE_WEBASSEMBLY
4110void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4111 using ER = ExternalReference;
4112 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4123 FrameScope scope(masm, StackFrame::INTERNAL);
4125 __ PrepareCallCFunction(5);
4128 __ CallCFunction(ER::wasm_grow_stack(), 5);
4135 __ b(
eq, &call_runtime);
4142 UseScratchRegisterScope temps(masm);
4143 Register scratch = temps.Acquire();
4150 __ bind(&call_runtime);
4155 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4158 WasmTrustedInstanceData::kNativeContextOffset));
4160 __ EnterFrame(StackFrame::INTERNAL);
4163 __ CallRuntime(Runtime::kWasmStackGuard);
4164 __ LeaveFrame(StackFrame::INTERNAL);
4170void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4173 HardAbortScope hard_abort(masm);
4174 UseScratchRegisterScope temps(masm);
4178 LowDwVfpRegister double_scratch = temps.AcquireLowD();
4181 __ Push(result_reg, double_high, double_low);
4186 MemOperand input_operand(sp, kArgumentOffset);
4190 __ vldr(double_scratch, input_operand);
4191 __ vmov(double_low, double_high, double_scratch);
4194 __ TryInlineTruncateDoubleToI(result_reg, double_scratch, &done);
4196 Register scratch = temps.Acquire();
4208 __ cmp(scratch, Operand(83));
4217 __ cmp(scratch, Operand(30 - 1));
4218 __ Check(
ge, AbortReason::kUnexpectedValue);
4225 __ rsb(scratch, scratch, Operand(51),
SetCC);
4230 __ mov(result_reg, Operand(double_low,
LSL, scratch),
LeaveCC,
ls);
4235 __ mov(double_low, Operand(double_low,
LSR, scratch));
4239 __ rsb(scratch, scratch, Operand(32));
4242 __ orr(result_reg, result_reg,
4244 __ orr(result_reg, double_low, Operand(result_reg,
LSL, scratch));
4253 __ eor(result_reg, result_reg, Operand(double_high,
ASR, 31));
4254 __ add(result_reg, result_reg, Operand(double_high,
LSR, 31));
4257 __ str(result_reg, result_operand);
4260 __ Pop(result_reg, double_high, double_low);
4289 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4301 api_function_address =
4302 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4308 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4309 func_templ, scratch));
4311 using FCA = FunctionCallbackArguments;
4312 using ER = ExternalReference;
4313 using FC = ApiCallbackExitFrameConstants;
4315 static_assert(FCA::kArgsLength == 6);
4316 static_assert(FCA::kNewTargetIndex == 5);
4317 static_assert(FCA::kTargetIndex == 4);
4318 static_assert(FCA::kReturnValueIndex == 3);
4319 static_assert(FCA::kContextIndex == 2);
4320 static_assert(FCA::kIsolateIndex == 1);
4321 static_assert(FCA::kUnusedIndex == 0);
4334 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4335 topmost_script_having_context);
4345 __ Move(scratch, ER::isolate_address());
4352 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4367 api_function_address,
4369 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset));
4371 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4372 StackFrame::API_CALLBACK_EXIT);
4380 __ str(argc, argc_operand);
4383 __ add(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4384 __ str(scratch,
MemOperand(fp, FC::kFCIImplicitArgsOffset));
4387 __ add(scratch, fp, Operand(FC::kFirstArgumentOffset));
4391 __ RecordComment(
"v8::FunctionCallback's argument.");
4392 __ add(function_callback_info_arg, fp,
4393 Operand(FC::kFunctionCallbackInfoOffset));
4397 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4401 static constexpr int kSlotsToDropOnReturn =
4404 const bool with_profiling =
4407 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4408 &argc_operand, return_value_operand);
4411void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4421 using PCA = PropertyCallbackArguments;
4422 using ER = ExternalReference;
4423 using FC = ApiAccessorExitFrameConstants;
4425 static_assert(PCA::kPropertyKeyIndex == 0);
4426 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4427 static_assert(PCA::kHolderIndex == 2);
4428 static_assert(PCA::kIsolateIndex == 3);
4429 static_assert(PCA::kHolderV2Index == 4);
4430 static_assert(PCA::kReturnValueIndex == 5);
4431 static_assert(PCA::kDataIndex == 6);
4432 static_assert(PCA::kThisIndex == 7);
4433 static_assert(PCA::kArgsLength == 8);
4449 Register api_function_address = r2;
4460 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4462 __ Push(scratch, smi_zero);
4463 __ Move(scratch, ER::isolate_address());
4464 __ Push(scratch, holder);
4468 __ Push(smi_zero, name_arg);
4470 __ RecordComment(
"Load api_function_address");
4471 __ ldr(api_function_address,
4475 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4476 StackFrame::API_ACCESSOR_EXIT);
4478 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4480 __ add(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4482 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
4485#ifdef V8_ENABLE_DIRECT_HANDLE
4490 static_assert(PCA::kPropertyKeyIndex == 0);
4491 __ mov(name_arg, property_callback_info_arg);
4494 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4500 static constexpr int kSlotsToDropOnReturn =
4501 FC::kPropertyCallbackInfoArgsLength;
4502 MemOperand*
const kUseStackSpaceConstant =
nullptr;
4504 const bool with_profiling =
true;
4506 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4507 kUseStackSpaceConstant, return_value_operand);
4510void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4523void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
4531 UseScratchRegisterScope temps(masm);
4535 __ bic(temp2, chars, Operand(0x3),
SetCC);
4537 __ add(temp2, dest, temp2);
4542 __ cmp(dest, temp2);
4547 __ mov(chars, Operand(chars,
LSL, 31),
SetCC);
4560void Generate_DeoptimizationEntry(MacroAssembler* masm,
4562 Isolate* isolate = masm->isolate();
4568 static constexpr int kDoubleRegsSize =
4573 UseScratchRegisterScope temps(masm);
4574 Register scratch = temps.Acquire();
4575 __ SaveFPRegs(sp, scratch);
4591 UseScratchRegisterScope temps(masm);
4592 Register scratch = temps.Acquire();
4594 IsolateAddressId::kCEntryFPAddress, isolate));
4598 static constexpr int kSavedRegistersAreaSize =
4605 __ add(r3, sp, Operand(kSavedRegistersAreaSize));
4610 __ PrepareCallCFunction(5);
4611 __ mov(r0, Operand(0));
4612 Label context_check;
4614 __ JumpIfSmi(r1, &context_check);
4616 __ bind(&context_check);
4617 __ mov(r1, Operand(
static_cast<int>(deopt_kind)));
4624 AllowExternalCallThatCantCauseGC scope(masm);
4625 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4641 static constexpr int kSimd128RegsOffset =
4644 UseScratchRegisterScope temps(masm);
4645 Register scratch = temps.Acquire();
4648 __ RestoreFPRegs(src_location, scratch);
4651 __ add(dst_location, r1, Operand(kSimd128RegsOffset));
4652 __ SaveFPRegsToHeap(dst_location, scratch);
4658 UseScratchRegisterScope temps(masm);
4659 Register is_iterable = temps.Acquire();
4661 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4662 __ mov(zero, Operand(0));
4667 __ add(sp, sp, Operand(kSavedRegistersAreaSize));
4679 Label pop_loop_header;
4680 __ b(&pop_loop_header);
4684 __ add(r3, r3, Operand(
sizeof(uint32_t)));
4685 __ bind(&pop_loop_header);
4687 __ b(
ne, &pop_loop);
4692 __ PrepareCallCFunction(1);
4695 AllowExternalCallThatCantCauseGC scope(masm);
4696 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4703 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4708 __ add(r1, r4, Operand(r1,
LSL, 2));
4709 __ jmp(&outer_loop_header);
4710 __ bind(&outer_push_loop);
4714 __ jmp(&inner_loop_header);
4715 __ bind(&inner_push_loop);
4716 __ sub(r3, r3, Operand(
sizeof(uint32_t)));
4717 __ add(r6, r2, Operand(r3));
4720 __ bind(&inner_loop_header);
4722 __ b(
ne, &inner_push_loop);
4724 __ bind(&outer_loop_header);
4726 __ b(
lt, &outer_push_loop);
4736 UseScratchRegisterScope temps(masm);
4737 Register scratch = temps.Acquire();
4739 __ add(src_location, r2, Operand(kSimd128RegsOffset));
4740 __ RestoreFPRegsFromHeap(src_location, scratch);
4757 __ ldm(
ia_w, sp, restored_regs);
4760 UseScratchRegisterScope temps(masm);
4761 Register is_iterable = temps.Acquire();
4764 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4765 __ mov(
one, Operand(1));
4773 UseScratchRegisterScope temps(masm);
4774 Register scratch = temps.Acquire();
4790void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
4794void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
4801void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
4802 MacroAssembler* masm) {
4815 ResetSharedFunctionInfoAge(masm, code_obj, r3);
4819 SharedFunctionInfo::kTrustedFunctionDataOffset));
4823 __ CompareObjectType(code_obj, r3, r3, CODE_TYPE);
4824 __ Assert(
eq, AbortReason::kExpectedBaselineData);
4831 __ ldr(feedback_cell,
4833 __ ldr(feedback_vector,
4836 Label install_baseline_code;
4839 __ CompareObjectType(feedback_vector, r3, r3, FEEDBACK_VECTOR_TYPE);
4840 __ b(
ne, &install_baseline_code);
4849 __ str(feedback_cell,
4855 __ str(feedback_vector,
4857 feedback_vector =
no_reg;
4861 __ Move(get_baseline_pc,
4862 ExternalReference::baseline_pc_for_next_executed_bytecode());
4876 FrameScope scope(masm, StackFrame::INTERNAL);
4877 __ PrepareCallCFunction(3, 0);
4878 __ CallCFunction(get_baseline_pc, 3, 0);
4880 __ LoadCodeInstructionStart(code_obj, code_obj);
4884 Generate_OSREntry(masm, code_obj);
4887 __ bind(&install_baseline_code);
4889 FrameScope scope(masm, StackFrame::INTERNAL);
4892 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
4899void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
4907 __ LeaveFrame(StackFrame::INTERNAL);
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
ConstantPool::BlockScope BlockConstPoolScope
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kDirectCallerSPOffset
static constexpr int kNextExitFrameFPOffset
static constexpr int kArgvOffset
static constexpr int kArgcOffset
static constexpr int kNextFastCallFramePCOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const int kMantissaBitsInTopWord
static const int kExponentBits
static const int kExponentBias
static const int kExponentShift
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
static int ActivationFrameAlignment()
static V8_INLINE Operand SmiUntag(Register rm)
static V8_INLINE Operand Zero()
constexpr RegisterT first() const
constexpr RegisterT last() const
constexpr unsigned Count() const
static constexpr int8_t kNumRegisters
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedSlotCount
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr Register ObjectRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_SANDBOX_BOOL
RegListBase< RegisterT > registers
constexpr bool IsPowerOfTwo(T value)
Node::Uses::const_iterator begin(const Node::Uses &uses)
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
constexpr int kStackFpOffset
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
constexpr int kStackPcOffset
constexpr Register no_reg
constexpr Register kRootRegister
constexpr AddrMode PreIndex
constexpr int kFunctionEntryBytecodeOffset
constexpr int kPointerSizeLog2
constexpr BlockAddrMode ia_w
const RegList kCalleeSaved
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
constexpr int kPointerSize
constexpr BlockAddrMode db_w
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr LowDwVfpRegister kLastCalleeSavedDoubleReg
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr Register kInterpreterDispatchTableRegister
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
constexpr int JSParameterCount(int param_count_without_receiver)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr AddrMode PostIndex
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
constexpr LowDwVfpRegister kFirstCalleeSavedDoubleReg
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
@ kDefaultDerivedConstructor
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
const int kNumCalleeSaved
const int kNumDoubleCalleeSaved
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define OFFSET_OF_DATA_START(Type)