5#if V8_TARGET_ARCH_S390X
28#if V8_ENABLE_WEBASSEMBLY
38#define __ ACCESS_MASM(masm)
47 __ DecodeField<Code::KindField>(scratch);
48 __ CmpS64(scratch, Operand(
static_cast<int>(CodeKind::BASELINE)));
49 __ Assert(
eq, AbortReason::kExpectedBaselineData);
53 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
54 Label* is_baseline, Label* is_unavailable) {
64 __ LoadMap(scratch1, data);
68 __ CmpS32(scratch1, Operand(CODE_TYPE));
71 __ b(
ne, ¬_baseline);
74 __ bind(¬_baseline);
80 __ CmpS32(scratch1, Operand(BYTECODE_ARRAY_TYPE));
83 __ CmpS32(scratch1, Operand(INTERPRETER_DATA_TYPE));
84 __ b(
ne, is_unavailable);
91void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
97 __ AddS64(r14, entry_address,
offset.rm());
104void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi,
107 __ mov(scratch, Operand(0));
112void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
113 Register scratch1, Register scratch2) {
117 ResetSharedFunctionInfoAge(masm, scratch1, scratch2);
120void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
121 Register feedback_vector, Register scratch) {
125 __ AndP(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
135void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
136 MacroAssembler* masm) {
150 ResetSharedFunctionInfoAge(masm, code_obj, r5);
154 code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset));
158 __ CompareObjectType(code_obj, r5, r5, CODE_TYPE);
159 __ Assert(
eq, AbortReason::kExpectedBaselineData);
166 __ LoadTaggedField(feedback_cell,
172 Label install_baseline_code;
175 __ CompareObjectType(feedback_vector, r5, r5, FEEDBACK_VECTOR_TYPE);
176 __ b(
ne, &install_baseline_code);
185 __ StoreU64(feedback_cell,
191 __ StoreU64(feedback_vector,
197 __ Move(get_baseline_pc,
198 ExternalReference::baseline_pc_for_next_executed_bytecode());
213 FrameScope scope(masm, StackFrame::INTERNAL);
214 __ PrepareCallCFunction(3, 0, r1);
215 __ CallCFunction(get_baseline_pc, 3, 0);
217 __ LoadCodeInstructionStart(code_obj, code_obj);
221 Generate_OSREntry(masm, code_obj, Operand(0));
224 __ bind(&install_baseline_code);
226 FrameScope scope(masm, StackFrame::INTERNAL);
229 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
238enum class OsrSourceTier {
243void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
244 Register maybe_target_code,
245 Register expected_param_count) {
246 Label jump_to_optimized_code;
252 __ CmpSmiLiteral(maybe_target_code,
Smi::zero(), r0);
253 __ bne(&jump_to_optimized_code);
258 FrameScope scope(masm, StackFrame::INTERNAL);
259 __ CallRuntime(Runtime::kCompileOptimizedOSR);
264 __ bne(&jump_to_optimized_code);
267 __ bind(&jump_to_optimized_code);
273 __ Move(r3, ExternalReference::address_of_log_or_trace_osr());
275 __ tmll(r3, Operand(0xFF));
279 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
281 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
288 if (source == OsrSourceTier::kInterpreter) {
291 __ LeaveFrame(StackFrame::STUB);
301 FieldMemOperand(r2, Code::kDeoptimizationDataOrInterpreterDataOffset));
309 __ LoadCodeInstructionStart(r2, r2);
313 Generate_OSREntry(masm, r2, Operand(r3));
319 int formal_parameter_count,
Address address) {
327enum class ArgumentsElementType {
332void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
334 ArgumentsElementType element_type) {
344 if (element_type == ArgumentsElementType::kHandle) {
349 __ SubS64(counter, counter, Operand(1));
353void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
364 Label stack_overflow;
366 __ StackOverflowCheck(r2, scratch, &stack_overflow);
370 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
386 Generate_PushArguments(masm, r6, r2, r1, ArgumentsElementType::kRaw);
389 __ PushRoot(RootIndex::kTheHoleValue);
406 __ DropArguments(scratch);
409 __ bind(&stack_overflow);
411 FrameScope scope(masm, StackFrame::INTERNAL);
412 __ CallRuntime(Runtime::kThrowStackOverflow);
420void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
432 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
433 __ EnterFrame(StackFrame::CONSTRUCT);
437 __ PushRoot(RootIndex::kUndefinedValue);
451 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r6);
455 ¬_create_implicit_receiver);
458 __ CallBuiltin(Builtin::kFastNewObject);
459 __ b(&post_instantiation_deopt_entry);
462 __ bind(¬_create_implicit_receiver);
463 __ LoadRoot(r2, RootIndex::kTheHoleValue);
474 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
476 __ bind(&post_instantiation_deopt_entry);
508 Label stack_overflow;
509 __ StackOverflowCheck(r2, r7, &stack_overflow);
514 Generate_PushArguments(masm, r6, r2, r1, ArgumentsElementType::kRaw);
525 Label use_receiver, do_throw, leave_and_return, check_receiver;
528 __ JumpIfNotRoot(r2, RootIndex::kUndefinedValue, &check_receiver);
535 __ bind(&use_receiver);
537 __ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
539 __ bind(&leave_and_return);
543 __ LeaveFrame(StackFrame::CONSTRUCT);
546 __ DropArguments(r3);
549 __ bind(&check_receiver);
551 __ JumpIfSmi(r2, &use_receiver);
555 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
556 __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE);
557 __ bge(&leave_and_return);
563 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
566 __ bind(&stack_overflow);
569 __ CallRuntime(Runtime::kThrowStackOverflow);
574void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
575 Generate_JSBuiltinsConstructStubHelper(masm);
579void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
587 r2,
FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset), r0);
588 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
591 __ AssertGeneratorObject(r3);
594 __ LoadTaggedField(r6,
599 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
600 Label stepping_prepared;
603 ExternalReference debug_hook =
604 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
605 __ Move(scratch, debug_hook);
608 __ bne(&prepare_step_in_if_stepping);
612 ExternalReference debug_suspended_generator =
613 ExternalReference::debug_suspended_generator_address(masm->isolate());
615 __ Move(scratch, debug_suspended_generator);
617 __ CmpS64(scratch, r3);
618 __ beq(&prepare_step_in_suspended_generator);
619 __ bind(&stepping_prepared);
623 Label stack_overflow;
626 __ CmpU64(sp, scratch);
627 __ blt(&stack_overflow);
640 r5,
FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
644 FieldMemOperand(r3, JSGeneratorObject::kParametersAndRegistersOffset));
646 Label done_loop, loop;
648 __ SubS64(r5, r5, Operand(1));
659 __ LoadTaggedField(scratch,
666 Label is_baseline, is_unavailable, ok;
673 __ bind(&is_unavailable);
674 __ Abort(AbortReason::kMissingBytecodeArray);
676 __ bind(&is_baseline);
677 __ CompareObjectType(r5, r5, r5, CODE_TYPE);
678 __ Assert(
eq, AbortReason::kMissingBytecodeArray);
688 r2, SharedFunctionInfo::kFormalParameterCountOffset));
694 __ JumpJSFunction(r3);
697 __ bind(&prepare_step_in_if_stepping);
699 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
702 __ PushRoot(RootIndex::kTheHoleValue);
703 __ CallRuntime(Runtime::kDebugOnFunctionCall);
705 __ LoadTaggedField(r6,
708 __ b(&stepping_prepared);
710 __ bind(&prepare_step_in_suspended_generator);
712 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
714 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
716 __ LoadTaggedField(r6,
719 __ b(&stepping_prepared);
721 __ bind(&stack_overflow);
723 FrameScope scope(masm, StackFrame::INTERNAL);
724 __ CallRuntime(Runtime::kThrowStackOverflow);
729void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
730 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
732 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
738constexpr int kPushedStackSpace =
765 Label
invoke, handler_entry, exit;
771 __ StoreMultipleP(r4, sp,
MemOperand(r4, kStackPointerBias - stack_space));
790 kXPLINKStackFrameExtraParamSlot *
798 int pushed_stack_space = 0;
800 NoRootArrayScope no_root_array(masm);
849 __ mov(r0, Operand(-1));
863 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerFP);
868 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerPC);
873#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
876 IsolateData::cage_base_offset());
883 pushed_stack_space +=
887 Label non_outermost_js;
889 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
890 __ Move(r7, js_entry_sp);
897 __ bind(&non_outermost_js);
907 __ bind(&handler_entry);
911 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
921 __ LoadRoot(r2, RootIndex::kException);
927 __ PushStackHandler();
939 USE(pushed_stack_space);
940 DCHECK_EQ(kPushedStackSpace, pushed_stack_space);
941 __ CallBuiltin(entry_trampoline);
944 __ PopStackHandler();
948 Label non_outermost_js_2;
953 __ Move(r7, js_entry_sp);
955 __ bind(&non_outermost_js_2);
959 __ LoadIsolateField(scrach, IsolateFieldId::kFastCCallCallerPC);
963 __ LoadIsolateField(scrach, IsolateFieldId::kFastCCallCallerFP);
993 __ LoadMultipleP(r4, sp,
MemOperand(sp, kStackPointerBias));
1002void Builtins::Generate_JSEntry(MacroAssembler* masm) {
1003 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
1006void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
1007 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
1008 Builtin::kJSConstructEntryTrampoline);
1011void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
1012 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
1013 Builtin::kRunMicrotasksTrampoline);
1040 FrameScope scope(masm, StackFrame::INTERNAL);
1044 IsolateAddressId::kContextAddress, masm->isolate());
1045 __ Move(
cp, context_address);
1052 Label enough_stack_space, stack_overflow;
1054 __ StackOverflowCheck(r7, r1, &stack_overflow);
1055 __ b(&enough_stack_space);
1056 __ bind(&stack_overflow);
1057 __ CallRuntime(Runtime::kThrowStackOverflow);
1061 __ bind(&enough_stack_space);
1072 Generate_PushArguments(masm, r6, r2, r1, ArgumentsElementType::kHandle);
1086 __ LoadRoot(r4, RootIndex::kUndefinedValue);
1093 __ CallBuiltin(builtin);
1103void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1107void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1111void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
1118 __ TailCallBuiltin(Builtin::kRunMicrotasks);
1122 Register scratch2) {
1125 __ LoadU64(params_size,
1127 __ LoadU16(params_size,
1130 Register actual_params_size = scratch2;
1132 __ LoadU64(actual_params_size,
1137 Label corrected_args_count;
1138 __ CmpS64(params_size, actual_params_size);
1139 __ bge(&corrected_args_count);
1140 __ mov(params_size, actual_params_size);
1141 __ bind(&corrected_args_count);
1144 __ LeaveFrame(StackFrame::INTERPRETED);
1146 __ DropArguments(params_size);
1155 Register bytecode_array,
1156 Register bytecode_offset,
1157 Register bytecode, Register scratch1,
1158 Register scratch2, Label* if_return) {
1159 Register bytecode_size_table = scratch1;
1166 Register original_bytecode_offset = scratch2;
1168 bytecode, original_bytecode_offset));
1169 __ Move(bytecode_size_table,
1170 ExternalReference::bytecode_size_table_address());
1171 __ Move(original_bytecode_offset, bytecode_offset);
1174 Label process_bytecode, extra_wide;
1175 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
1176 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
1177 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1179 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1180 __ CmpS64(bytecode, Operand(0x3));
1181 __ bgt(&process_bytecode);
1182 __ tmll(bytecode, Operand(0x1));
1183 __ bne(&extra_wide);
1186 __ AddS64(bytecode_offset, bytecode_offset, Operand(1));
1187 __ LoadU8(bytecode,
MemOperand(bytecode_array, bytecode_offset));
1188 __ AddS64(bytecode_size_table, bytecode_size_table,
1190 __ b(&process_bytecode);
1192 __ bind(&extra_wide);
1194 __ AddS64(bytecode_offset, bytecode_offset, Operand(1));
1195 __ LoadU8(bytecode,
MemOperand(bytecode_array, bytecode_offset));
1196 __ AddS64(bytecode_size_table, bytecode_size_table,
1200 __ bind(&process_bytecode);
1203#define JUMP_IF_EQUAL(NAME) \
1204 __ CmpS64(bytecode, \
1205 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1212 Label
end, not_jump_loop;
1214 Operand(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1215 __ bne(¬_jump_loop);
1218 __ Move(bytecode_offset, original_bytecode_offset);
1221 __ bind(¬_jump_loop);
1223 __ LoadU8(scratch3,
MemOperand(bytecode_size_table, bytecode));
1224 __ AddS64(bytecode_offset, bytecode_offset, scratch3);
1230void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1237 Register closure = descriptor.GetRegisterParameter(
1238 BaselineOutOfLinePrologueDescriptor::kClosure);
1242 __ LoadTaggedField(feedback_cell,
1247 __ AssertFeedbackVector(feedback_vector, r1);
1249#ifndef V8_ENABLE_LEAPTIERING
1251 Label flags_need_processing;
1254 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1255 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1260 UseScratchRegisterScope temps(masm);
1261 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r1);
1267 __ LoadU32(invocation_count,
1269 FeedbackVector::kInvocationCountOffset));
1270 __ AddU32(invocation_count, Operand(1));
1271 __ StoreU32(invocation_count,
1273 FeedbackVector::kInvocationCountOffset));
1283 Register callee_context = descriptor.GetRegisterParameter(
1284 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1285 Register callee_js_function = descriptor.GetRegisterParameter(
1286 BaselineOutOfLinePrologueDescriptor::kClosure);
1287 ResetJSFunctionAge(masm, callee_js_function, r1, r0);
1288 __ Push(callee_context, callee_js_function);
1292 Register argc = descriptor.GetRegisterParameter(
1293 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1296 Register bytecodeArray = descriptor.GetRegisterParameter(
1297 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1299 __ Push(argc, bytecodeArray);
1303 __ CompareObjectType(feedback_vector, scratch, scratch,
1304 FEEDBACK_VECTOR_TYPE);
1305 __ Assert(
eq, AbortReason::kExpectedFeedbackVector);
1307 __ Push(feedback_cell);
1308 __ Push(feedback_vector);
1311 Label call_stack_guard;
1312 Register frame_size = descriptor.GetRegisterParameter(
1313 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1324 __ SubS64(sp_minus_frame_size, sp, frame_size);
1326 __ CmpU64(sp_minus_frame_size, interrupt_limit);
1327 __ blt(&call_stack_guard);
1334#ifndef V8_ENABLE_LEAPTIERING
1335 __ bind(&flags_need_processing);
1341 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1346 __ bind(&call_stack_guard);
1349 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1352 __ SmiTag(frame_size);
1353 __ Push(frame_size);
1354 __ CallRuntime(Runtime::kStackGuardWithGap);
1363void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1376 __ LeaveFrame(StackFrame::BASELINE);
1379 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1399 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1406 ResetSharedFunctionInfoAge(masm, r6, ip);
1410 Label is_baseline, compile_lazy;
1413 &is_baseline, &compile_lazy);
1415 Label push_stack_frame;
1417 __ LoadFeedbackVector(feedback_vector, closure, r6, &push_stack_frame);
1420#ifndef V8_ENABLE_LEAPTIERING
1425 Label flags_need_processing;
1426 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1427 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1428 &flags_need_processing);
1431 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, r1);
1435 FeedbackVector::kInvocationCountOffset));
1436 __ AddS64(r1, r1, Operand(1));
1438 FeedbackVector::kInvocationCountOffset));
1451 __ bind(&push_stack_frame);
1453 __ PushStandardFrame(closure);
1464 Label stack_overflow;
1468 BytecodeArray::kFrameSizeOffset));
1471 __ SubS64(r8, sp, r4);
1473 __ blt(&stack_overflow);
1477 Label loop, no_args;
1480 __ LoadAndTestP(r4, r4);
1485 __ SubS64(r1, Operand(1));
1492 Label no_incoming_new_target_or_generator_register;
1496 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1498 __ beq(&no_incoming_new_target_or_generator_register);
1501 __ bind(&no_incoming_new_target_or_generator_register);
1505 Label stack_check_interrupt, after_stack_check_interrupt;
1509 __ blt(&stack_check_interrupt);
1510 __ bind(&after_stack_check_interrupt);
1517 __ bind(&do_dispatch);
1520 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1529 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1531 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1538 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1561 __ bind(&do_return);
1566 __ bind(&stack_check_interrupt);
1574 __ CallRuntime(Runtime::kStackGuard);
1589 __ jmp(&after_stack_check_interrupt);
1592#ifndef V8_ENABLE_LEAPTIERING
1593 __ bind(&flags_need_processing);
1594 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1597 __ bind(&is_baseline);
1599#ifndef V8_ENABLE_LEAPTIERING
1608 Label install_baseline_code;
1614 __ CmpS32(ip, Operand(FEEDBACK_VECTOR_TYPE));
1615 __ b(
ne, &install_baseline_code);
1618 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1619 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1624 __ ReplaceClosureCodeWithOptimizedCode(r4, closure, ip, r1);
1625 __ JumpCodeObject(r4);
1627 __ bind(&install_baseline_code);
1629 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1633 __ bind(&compile_lazy);
1634 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1636 __ bind(&stack_overflow);
1637 __ CallRuntime(Runtime::kThrowStackOverflow);
1642 Register start_address,
1645 __ SubS64(scratch, num_args, Operand(1));
1647 __ SubS64(start_address, start_address, scratch);
1649 __ PushArray(start_address, num_args, r1, scratch,
1665 Label stack_overflow;
1668 __ SubS64(r2, r2, Operand(1));
1677 __ StackOverflowCheck(r5, ip, &stack_overflow);
1683 __ PushRoot(RootIndex::kUndefinedValue);
1695 __ TailCallBuiltin(Builtin::kCallWithSpread);
1700 __ bind(&stack_overflow);
1702 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1718 Label stack_overflow;
1719 __ StackOverflowCheck(r2, ip, &stack_overflow);
1723 __ SubS64(r2, r2, Operand(1));
1726 Register argc_without_receiver = ip;
1742 __ AssertUndefinedOrAllocationSite(r4, r7);
1746 __ AssertFunction(r3);
1750 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1753 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1757 __ TailCallBuiltin(Builtin::kConstruct);
1760 __ bind(&stack_overflow);
1762 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1770 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1775 Label stack_overflow;
1778 switch (which_frame) {
1789 __ StackOverflowCheck(r2, ip, &stack_overflow);
1798 Register argc_without_receiver = ip;
1800 __ PushArray(r6, argc_without_receiver, r1, r7);
1807 __ TailCallBuiltin(Builtin::kConstruct);
1809 __ bind(&stack_overflow);
1811 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1819void NewImplicitReceiver(MacroAssembler* masm) {
1837 __ Push(r2, r3, r5);
1838 __ CallBuiltin(Builtin::kFastNewObject);
1840 __ Move(implicit_receiver, r2);
1859void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1860 MacroAssembler* masm) {
1868 __ AssertFunction(r3);
1871 Label non_constructor;
1874 __ TestBit(r4, Map::Bits1::IsConstructorBit::kShift);
1875 __ beq(&non_constructor);
1878 Label stack_overflow;
1879 __ StackOverflowCheck(r2, r4, &stack_overflow);
1883 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1885 __ LoadRoot(r4, RootIndex::kTheHoleValue);
1889 Register argc_without_receiver = r8;
1901 __ AndP(r0, r4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1902 __ bne(&builtin_call);
1905 Label not_create_implicit_receiver;
1906 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r4);
1910 ¬_create_implicit_receiver);
1911 NewImplicitReceiver(masm);
1912 __ bind(¬_create_implicit_receiver);
1928 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1934 Label use_receiver, do_throw, leave_and_return, check_receiver;
1937 __ JumpIfNotRoot(r2, RootIndex::kUndefinedValue, &check_receiver);
1944 __ bind(&use_receiver);
1947 __ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
1949 __ bind(&leave_and_return);
1951 __ LeaveFrame(StackFrame::CONSTRUCT);
1954 __ bind(&check_receiver);
1956 __ JumpIfSmi(r2, &use_receiver);
1960 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
1961 __ CompareObjectType(r2, r6, r7, FIRST_JS_RECEIVER_TYPE);
1962 __ bge(&leave_and_return);
1963 __ b(&use_receiver);
1965 __ bind(&builtin_call);
1968 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1974 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1977 __ bind(&stack_overflow);
1979 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1985 __ bind(&non_constructor);
1986 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1992 Label builtin_trampoline, trampoline_loaded;
1994 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
2005 r4,
FieldMemOperand(r4, SharedFunctionInfo::kTrustedFunctionDataOffset));
2008 INTERPRETER_DATA_TYPE);
2009 __ bne(&builtin_trampoline);
2012 r4,
FieldMemOperand(r4, InterpreterData::kInterpreterTrampolineOffset));
2013 __ LoadCodeInstructionStart(r4, r4);
2014 __ b(&trampoline_loaded);
2016 __ bind(&builtin_trampoline);
2017 __ Move(r4, ExternalReference::
2018 address_of_interpreter_entry_trampoline_instruction_start(
2022 __ bind(&trampoline_loaded);
2023 __ AddS64(r14, r4, Operand(interpreter_entry_return_pc_offset.value()));
2028 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
2038 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2040 BYTECODE_ARRAY_TYPE);
2042 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2060 UseScratchRegisterScope temps(masm);
2061 Register scratch = temps.Acquire();
2070void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
2078 Label enter_bytecode, function_entry_bytecode;
2082 __ beq(&function_entry_bytecode);
2094 __ bind(&enter_bytecode);
2102 __ bind(&function_entry_bytecode);
2109 __ b(&enter_bytecode);
2112 __ bind(&if_return);
2113 __ Abort(AbortReason::kInvalidBytecodeAdvance);
2116void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
2121void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2122 bool javascript_builtin,
2125 int allocatable_register_count = config->num_allocatable_general_registers();
2128 if (javascript_builtin) {
2129 __ mov(scratch, r2);
2135 sp, config->num_allocatable_general_registers() *
2140 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
2141 int code = config->GetAllocatableGeneralCode(
i);
2147 if (javascript_builtin && with_result) {
2151 constexpr int return_value_offset =
2154 __ AddS64(r2, r2, Operand(return_value_offset));
2158 __ SubS64(r2, r2, Operand(return_value_offset));
2165 UseScratchRegisterScope temps(masm);
2166 Register builtin = temps.Acquire();
2172 __ LoadEntryFromBuiltinIndex(builtin, builtin);
2177void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2178 Generate_ContinueToBuiltinHelper(masm,
false,
false);
2181void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2182 MacroAssembler* masm) {
2183 Generate_ContinueToBuiltinHelper(masm,
false,
true);
2186void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2187 Generate_ContinueToBuiltinHelper(masm,
true,
false);
2190void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2191 MacroAssembler* masm) {
2192 Generate_ContinueToBuiltinHelper(masm,
true,
true);
2195void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2197 FrameScope scope(masm, StackFrame::INTERNAL);
2198 __ CallRuntime(Runtime::kNotifyDeoptimized);
2207void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2219 __ LoadRoot(r7, RootIndex::kUndefinedValue);
2232 __ DropArgumentsAndPushNewReceiver(r2, r7);
2247 __ JumpIfRoot(r4, RootIndex::kNullValue, &no_arguments);
2248 __ JumpIfRoot(r4, RootIndex::kUndefinedValue, &no_arguments);
2251 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2255 __ bind(&no_arguments);
2263void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2273 __ PushRoot(RootIndex::kUndefinedValue);
2274 __ AddS64(r2, r2, Operand(1));
2279 __ SubS64(r2, r2, Operand(1));
2285void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2298 __ LoadRoot(r3, RootIndex::kUndefinedValue);
2315 __ DropArgumentsAndPushNewReceiver(r2, r7);
2329 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2332void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2346 __ LoadRoot(r3, RootIndex::kUndefinedValue);
2363 __ DropArgumentsAndPushNewReceiver(r2, r6);
2382 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2391void Generate_AllocateSpaceAndShiftExistingArguments(
2392 MacroAssembler* masm, Register count, Register argc_in_out,
2393 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2400 __ AllocateStackSpace(new_space);
2404 Register dest = pointer_to_new_space_out;
2407 __ AddS64(
end, old_sp, r0);
2410 __ CmpS64(old_sp,
end);
2420 __ AddS64(argc_in_out, argc_in_out, count);
2442 __ AssertNotSmi(r4);
2445 __ CmpS64(scratch, Operand(FIXED_ARRAY_TYPE));
2447 __ CmpS64(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2453 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2459 Label stack_overflow;
2460 __ StackOverflowCheck(r6, scratch, &stack_overflow);
2467 Generate_AllocateSpaceAndShiftExistingArguments(masm, r6, r2, r7, ip, r8);
2471 Label loop, no_args, skip;
2481 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2483 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2487 __ BranchOnCount(r1, &loop);
2492 __ TailCallBuiltin(target_builtin);
2494 __ bind(&stack_overflow);
2495 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2500 CallOrConstructMode mode,
2512 Label new_target_constructor, new_target_not_constructor;
2513 __ JumpIfSmi(r5, &new_target_not_constructor);
2516 __ tmll(scratch, Operand(Map::Bits1::IsConstructorBit::kShift));
2517 __ bne(&new_target_constructor);
2518 __ bind(&new_target_not_constructor);
2521 __ EnterFrame(StackFrame::INTERNAL);
2523 __ CallRuntime(Runtime::kThrowNotConstructor);
2526 __ bind(&new_target_constructor);
2529 Label stack_done, stack_overflow;
2532 __ SubS64(r7, r7, r4);
2533 __ ble(&stack_done);
2545 __ StackOverflowCheck(r7, scratch, &stack_overflow);
2554 __ AddS64(r6, r6, scratch);
2561 Generate_AllocateSpaceAndShiftExistingArguments(masm, r7, r2, r4, scratch,
2571 __ SubS64(r7, r7, Operand(1));
2580 __ bind(&stack_done);
2582 __ TailCallBuiltin(target_builtin);
2584 __ bind(&stack_overflow);
2585 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2595 __ AssertCallableFunction(r3);
2608 Operand(SharedFunctionInfo::IsStrictBit::kMask |
2609 SharedFunctionInfo::IsNativeBit::kMask));
2610 __ bne(&done_convert);
2621 __ LoadGlobalProxy(r5);
2623 Label convert_to_object, convert_receiver;
2624 __ LoadReceiver(r5);
2625 __ JumpIfSmi(r5, &convert_to_object);
2626 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
2627 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2628 __ bge(&done_convert);
2630 Label convert_global_proxy;
2631 __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &convert_global_proxy);
2632 __ JumpIfNotRoot(r5, RootIndex::kNullValue, &convert_to_object);
2633 __ bind(&convert_global_proxy);
2636 __ LoadGlobalProxy(r5);
2638 __ b(&convert_receiver);
2640 __ bind(&convert_to_object);
2645 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2650 __ CallBuiltin(Builtin::kToObject);
2658 __ bind(&convert_receiver);
2660 __ StoreReceiver(r5);
2662 __ bind(&done_convert);
2672 r4,
FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2678void Generate_PushBoundArguments(MacroAssembler* masm) {
2686 Label no_bound_arguments;
2690 __ LoadAndTestP(r6, r6);
2691 __ beq(&no_bound_arguments);
2706 __ SubS64(r1, sp, scratch);
2715 __ EnterFrame(StackFrame::INTERNAL);
2716 __ CallRuntime(Runtime::kThrowStackOverflow);
2727 __ AddS64(r2, r2, r6);
2732 __ SubS64(r1, r6, Operand(1));
2736 __ SubS64(r6, r6, Operand(1));
2744 __ bind(&no_bound_arguments);
2755 __ AssertBoundFunction(r3);
2758 __ LoadTaggedField(r5,
2760 __ StoreReceiver(r5);
2763 Generate_PushBoundArguments(masm);
2767 r3,
FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2783 Label non_callable, class_constructor;
2784 __ JumpIfSmi(target, &non_callable);
2785 __ LoadMap(map, target);
2786 __ CompareInstanceTypeRange(map, instance_type, scratch,
2790 __ CmpS64(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2791 __ TailCallBuiltin(Builtin::kCallBoundFunction,
eq);
2798 __ TestBit(flags, Map::Bits1::IsCallableBit::kShift);
2799 __ beq(&non_callable);
2803 __ CmpS64(instance_type, Operand(JS_PROXY_TYPE));
2804 __ TailCallBuiltin(Builtin::kCallProxy,
eq);
2808 __ CmpS64(instance_type, Operand(JS_WRAPPED_FUNCTION_TYPE));
2809 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
eq);
2813 __ CmpS64(instance_type, Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2814 __ beq(&class_constructor);
2819 __ StoreReceiver(target);
2821 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2826 __ bind(&non_callable);
2828 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2830 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2835 __ bind(&class_constructor);
2837 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2839 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2845void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2851 __ AssertConstructor(r3, r1);
2852 __ AssertFunction(r3);
2856 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2858 Label call_generic_stub;
2864 __ AndP(r6, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2865 __ beq(&call_generic_stub);
2867 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2869 __ bind(&call_generic_stub);
2870 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2874void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2880 __ AssertConstructor(r3, r1);
2881 __ AssertBoundFunction(r3);
2884 Generate_PushBoundArguments(masm);
2888 __ CompareTagged(r3, r5);
2891 r5,
FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2896 r3,
FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2897 __ TailCallBuiltin(Builtin::kConstruct);
2901void Builtins::Generate_Construct(MacroAssembler* masm) {
2915 Label non_constructor, non_proxy;
2916 __ JumpIfSmi(target, &non_constructor);
2924 __ TestBit(flags, Map::Bits1::IsConstructorBit::kShift);
2925 __ beq(&non_constructor);
2929 __ CompareInstanceTypeRange(map, instance_type, scratch,
2930 FIRST_JS_FUNCTION_TYPE, LAST_JS_FUNCTION_TYPE);
2931 __ TailCallBuiltin(Builtin::kConstructFunction,
le);
2935 __ CmpS64(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2936 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
eq);
2939 __ CmpS64(instance_type, Operand(JS_PROXY_TYPE));
2941 __ TailCallBuiltin(Builtin::kConstructProxy);
2944 __ bind(&non_proxy);
2947 __ StoreReceiver(target);
2949 __ LoadNativeContextSlot(target,
2950 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2956 __ bind(&non_constructor);
2957 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2960#ifdef V8_ENABLE_MAGLEV
2963 bool save_new_target) {
2969 FrameScope scope(masm, StackFrame::INTERNAL);
2971 if (save_new_target) {
2975 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2976 if (save_new_target) {
2985#if V8_ENABLE_WEBASSEMBLY
2987struct SaveWasmParamsScope {
2988 explicit SaveWasmParamsScope(MacroAssembler* masm) : masm(masm) {
2989 for (Register gp_param_reg : wasm::kGpParamRegisters) {
2990 gp_regs.set(gp_param_reg);
2992 for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
2993 fp_regs.set(fp_param_reg);
2998 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedGpParamRegs + 1,
3000 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs,
3003 __ MultiPush(gp_regs);
3004 __ MultiPushF64OrV128(fp_regs, r1);
3006 ~SaveWasmParamsScope() {
3007 __ MultiPopF64OrV128(fp_regs, r1);
3008 __ MultiPop(gp_regs);
3013 MacroAssembler* masm;
3016void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3017 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3020 Label allocate_vector, done;
3024 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3026 __ AddS64(vector, vector, scratch);
3027 __ LoadTaggedField(vector,
3029 __ JumpIfSmi(vector, &allocate_vector);
3035 __ bind(&allocate_vector);
3047 SaveWasmParamsScope save_params(masm);
3050 __ SmiTag(func_index);
3056 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3067void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3073 HardAbortScope hard_abort(masm);
3074 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3077 SaveWasmParamsScope save_params(masm);
3086 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3099 WasmTrustedInstanceData::kJumpTableStartOffset));
3100 __ AddS64(ip, ip, r0);
3107void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3108 HardAbortScope hard_abort(masm);
3110 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3120 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3132void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
3137 __ JumpIfEqual(tmp, old_state, &ok);
3140 __ mov(tmp, Operand(new_state));
3145void SwitchStackPointer(MacroAssembler* masm, Register stack) {
3149void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* target,
3157 __ GetLabelAddress(tmp, target);
3162void LoadJumpBuffer(MacroAssembler* masm, Register stack,
bool load_pc,
3164 SwitchStackPointer(masm, stack);
3175void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3178 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3180 LoadJumpBuffer(masm, target_stack,
false, tmp, expected_state);
3184void SwitchStacks(MacroAssembler* masm, Register old_stack,
bool return_switch,
3185 const std::initializer_list<Register> keep) {
3186 using ER = ExternalReference;
3188 for (
auto reg : keep) {
3193 __ PrepareCallCFunction(2, r0);
3199 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3202 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3207void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3208 Register return_value, Register context, Register tmp1,
3209 Register tmp2, Register tmp3) {
3211 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3217 UseScratchRegisterScope temps(masm);
3218 Register scratch = temps.Acquire();
3226 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3229 SwitchStacks(masm, active_stack,
true,
3230 {return_reg, return_value,
context, parent});
3234void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3236 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3241 int32_t active_suspender_offset =
3243 RootIndex::kActiveSuspender);
3247void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3248 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset),
3249 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3252class RegisterAllocator {
3256 Scoped(RegisterAllocator* allocator, Register*
reg)
3267 void Ask(Register*
reg) {
3274 bool registerIsAvailable(
const Register&
reg) {
return available_.has(
reg); }
3276 void Pinned(
const Register& requested, Register*
reg) {
3277 DCHECK(registerIsAvailable(requested));
3291 void Reserve(
const Register&
reg) {
3299 void Reserve(
const Register& reg1,
const Register& reg2,
3300 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3301 const Register& reg5 =
no_reg,
const Register& reg6 =
no_reg) {
3310 bool IsUsed(
const Register&
reg) {
3314 void ResetExcept(
const Register& reg1 =
no_reg,
const Register& reg2 =
no_reg,
3315 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3316 const Register& reg5 =
no_reg,
3317 const Register& reg6 =
no_reg) {
3328 if (registerIsAvailable(**it)) {
3337 static RegisterAllocator WithAllocatableGeneralRegisters() {
3341 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
3342 int code = config->GetAllocatableGeneralCode(
i);
3344 list.set(candidate);
3346 return RegisterAllocator(list);
3355#define DEFINE_REG(Name) \
3356 Register Name = no_reg; \
3359#define DEFINE_REG_W(Name) \
3363#define ASSIGN_REG(Name) regs.Ask(&Name);
3365#define ASSIGN_REG_W(Name) \
3369#define DEFINE_PINNED(Name, Reg) \
3370 Register Name = no_reg; \
3371 regs.Pinned(Reg, &Name);
3373#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3375#define DEFINE_SCOPED(Name) \
3377 RegisterAllocator::Scoped scope_##Name(®s, &Name);
3379#define FREE_REG(Name) regs.Free(&Name);
3383void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3386 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3396 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3402void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3407 fp_regs.set(fp_param_reg);
3409 __ MultiPushDoubles(fp_regs);
3417 __ MultiPush(gp_regs);
3420 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3423void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3427void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3428 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3430 __ EnterFrame(StackFrame::STACK_SWITCH);
3439 ResetStackSwitchFrameStackSlots(masm);
3446 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3448 FillJumpBuffer(masm, stack, &resume, scratch);
3451 regs.ResetExcept(suspender, stack);
3454 __ LoadU64(suspender_stack,
3463 __ CmpS64(suspender_stack, stack);
3474 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3477 parent,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3478 int32_t active_suspender_offset =
3480 RootIndex::kActiveSuspender);
3482 regs.ResetExcept(suspender, caller, stack);
3487 SwitchStacks(masm, stack,
false, {caller, suspender});
3493 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3494 __ Zero(GCScanSlotPlace);
3501 __ LeaveFrame(StackFrame::STACK_SWITCH);
3510void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3511 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3512 __ EnterFrame(StackFrame::STACK_SWITCH);
3520 ResetStackSwitchFrameStackSlots(masm);
3522 regs.ResetExcept(closure);
3540 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3544 regs.ResetExcept(suspender);
3551 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3553 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3561 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3562 __ StoreTaggedField(
3565 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3568 int32_t active_suspender_offset =
3570 RootIndex::kActiveSuspender);
3578 suspender = target_stack;
3579 __ LoadU64(target_stack,
3583 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3584 SwitchStacks(masm, active_stack,
false, {target_stack});
3585 regs.ResetExcept(target_stack);
3595 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3596 __ Zero(GCScanSlotPlace);
3599 LoadJumpBuffer(masm, target_stack,
false, scratch,
3603 __ LeaveFrame(StackFrame::STACK_SWITCH);
3606 __ CallRuntime(Runtime::kThrow);
3609 LoadJumpBuffer(masm, target_stack,
true, scratch,
3616 __ LeaveFrame(StackFrame::STACK_SWITCH);
3623void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3627void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3631void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3637void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3638 Register wasm_instance, Register wrapper_buffer,
3639 Register& original_fp, Register& new_wrapper_buffer,
3641 ResetStackSwitchFrameStackSlots(masm);
3644 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3648 FillJumpBuffer(masm, parent_stack, suspend, scratch);
3649 SwitchStacks(masm, parent_stack,
false, {wasm_instance, wrapper_buffer});
3654 regs.Pinned(r13, &original_fp);
3655 __ Move(original_fp, fp);
3656 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3657 LoadTargetJumpBuffer(masm, target_stack, scratch,
3665 __ EnterFrame(StackFrame::STACK_SWITCH);
3669 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3671 __ SubS64(sp, sp, Operand(stack_space));
3672 __ EnforceStackAlignment();
3676 __ Move(new_wrapper_buffer, sp);
3680 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3681 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3686 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3690 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3695 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3700 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3703void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3709 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3717 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3719 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3723 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3726 ReloadParentStack(masm, promise, return_value,
kContextRegister, tmp, tmp2,
3728 RestoreParentSuspender(masm, tmp);
3731 __ mov(tmp, Operand(1));
3733 tmp,
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3735 __ CallBuiltin(Builtin::kFulfillPromise);
3740 __ bind(return_promise);
3743void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3744 RegisterAllocator& regs,
3745 Label* return_promise) {
3747 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3754 thread_in_wasm_flag_addr = r4;
3758 thread_in_wasm_flag_addr,
3760 __ mov(r0, Operand(0));
3766 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3768 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
3774 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3776 ReloadParentStack(masm, promise, reason,
kContextRegister, tmp, tmp2, tmp3);
3777 RestoreParentSuspender(masm, tmp);
3779 __ mov(tmp, Operand(1));
3781 tmp,
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3783 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3784 __ CallBuiltin(Builtin::kRejectPromise);
3788 __ b(return_promise);
3790 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3793void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
3795 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3796 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3797 : StackFrame::JS_TO_WASM);
3799 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3804 __ LoadU64(implicit_arg,
3805 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3814 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3815 original_fp, new_wrapper_buffer, &suspend);
3818 new_wrapper_buffer = wrapper_buffer;
3821 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3822 new_wrapper_buffer);
3827 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3831 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3836 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3839 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3846 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
3847 kWrapperBufferStackReturnBufferSize));
3849 __ SubS64(sp, sp, r0);
3856 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3868 int stack_params_offset =
3877 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3884 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3887 __ AddS64(last_stack_param, params_start, Operand(stack_params_offset));
3889 __ bind(&loop_start);
3891 Label finish_stack_params;
3892 __ CmpS64(last_stack_param, params_end);
3893 __ bge(&finish_stack_params);
3901 __ jmp(&loop_start);
3903 __ bind(&finish_stack_params);
3906 size_t next_offset = 0;
3922 DCHECK_EQ(next_offset, stack_params_offset);
3927 __ LoadU64(thread_in_wasm_flag_addr,
3931 __ mov(scratch, Operand(1));
3935 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3938 __ LoadWasmCodePointer(
3941 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3942 __ CallWasmCodePointer(call_target);
3951 __ LoadU64(thread_in_wasm_flag_addr,
3954 __ mov(r0, Operand(0));
3960 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3966 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
3971 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
3976 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
3981 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
3988 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3990 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3994 MemOperand(fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3996 r2,
MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3999 GetContextFromImplicitArg(masm, r2, scratch);
4001 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
4003 Label return_promise;
4005 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4009 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4010 : StackFrame::JS_TO_WASM);
4021 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4026void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4030void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4034void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4040static constexpr Register kOldSPRegister = r13;
4042void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
4043 Register target_input,
4044 Register argv_input) {
4045 using ER = ExternalReference;
4049 __ mov(kOldSPRegister, Operand(0));
4054 ER on_central_stack_flag_loc = ER::Create(
4055 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4056 __ Move(r1, on_central_stack_flag_loc);
4059 Label do_not_need_to_switch;
4060 __ CmpU32(r1, Operand(0));
4061 __ bne(&do_not_need_to_switch);
4065 __ Move(kOldSPRegister, sp);
4068 DCHECK(!
AreAliased(central_stack_sp, argc_input, argv_input, target_input));
4070 __ Push(argc_input);
4071 __ Push(target_input);
4072 __ Push(argv_input);
4073 __ PrepareCallCFunction(2, r0);
4076 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4080 __ Pop(target_input);
4086 __ SubS64(sp, central_stack_sp, Operand(kReturnAddressSlotOffset +
kPadding));
4087 __ EnforceStackAlignment();
4101 __ bind(&do_not_need_to_switch);
4104void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4105 using ER = ExternalReference;
4107 Label no_stack_change;
4109 __ CmpU64(kOldSPRegister, Operand(0));
4110 __ beq(&no_stack_change);
4111 __ Move(sp, kOldSPRegister);
4115 __ PrepareCallCFunction(1, r0);
4117 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4122 __ bind(&no_stack_change);
4130 ArgvMode argv_mode,
bool builtin_exit_frame,
4131 bool switch_to_central_stack) {
4142 using ER = ExternalReference;
4145 static constexpr Register argc_input = r2;
4146 static constexpr Register target_fun = r7;
4147 static constexpr Register argv = r3;
4148 static constexpr Register scratch = ip;
4150 static constexpr Register argc_sav = r9;
4152 static constexpr Register argc_sav = r6;
4155 __ mov(target_fun, argv);
4169 int arg_stack_space = 0;
4172 bool needs_return_buffer =
4174 if (needs_return_buffer) {
4175 arg_stack_space += result_size;
4179 arg_stack_space += 2;
4182 scratch, arg_stack_space,
4183 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
4186 __ mov(argc_sav, argc_input);
4196 if (needs_return_buffer) {
4211#if V8_ENABLE_WEBASSEMBLY
4212 if (switch_to_central_stack) {
4213 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4218 __ Move(isolate_reg, ER::isolate_address());
4226 const int stack_args = 3;
4227 const int stack_space = kXPLINKStackFrameExtraParamSlot + stack_args;
4229 kStackPointerBias)));
4238 __ StoreReturnAddressAndCall(r8);
4246 if (result_size == 1) {
4248 }
else if (result_size == 2) {
4257 __ StoreReturnAddressAndCall(target_fun);
4260 if (needs_return_buffer) {
4268 Label exception_returned;
4269 __ CompareRoot(r2, RootIndex::kException);
4272#if V8_ENABLE_WEBASSEMBLY
4273 if (switch_to_central_stack) {
4274 SwitchFromTheCentralStackIfNeeded(masm);
4282 ER exception_address =
4283 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4285 __ ExternalReferenceAsOperand(exception_address,
no_reg));
4286 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
4298 __ LeaveExitFrame(scratch);
4302 __ AddS64(sp, sp, scratch);
4308 __ bind(&exception_returned);
4310 ER pending_handler_context_address = ER::Create(
4311 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4312 ER pending_handler_entrypoint_address = ER::Create(
4313 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4314 ER pending_handler_fp_address =
4315 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4316 ER pending_handler_sp_address =
4317 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4323 __ PrepareCallCFunction(3, 0, r2);
4327 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4332 __ Move(
cp, pending_handler_context_address);
4334 __ Move(sp, pending_handler_sp_address);
4336 __ Move(fp, pending_handler_fp_address);
4348 ER c_entry_fp_address =
4349 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4351 __ StoreU64(scratch,
4352 __ ExternalReferenceAsOperand(c_entry_fp_address,
no_reg));
4355 __ LoadU64(scratch,
__ ExternalReferenceAsOperand(
4356 pending_handler_entrypoint_address,
no_reg));
4360#if V8_ENABLE_WEBASSEMBLY
4361void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4362 using ER = ExternalReference;
4363 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4372 FrameScope scope(masm, StackFrame::INTERNAL);
4374 __ PrepareCallCFunction(5, r0);
4376 __ CallCFunction(ER::wasm_grow_stack(), 5);
4383 __ beq(&call_runtime);
4386 __ SubS64(fp, fp, sp);
4390 UseScratchRegisterScope temps(masm);
4391 Register scratch = temps.Acquire();
4398 __ bind(&call_runtime);
4403 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4406 WasmTrustedInstanceData::kNativeContextOffset));
4408 __ EnterFrame(StackFrame::INTERNAL);
4411 __ CallRuntime(Runtime::kWasmStackGuard);
4412 __ LeaveFrame(StackFrame::INTERNAL);
4418void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4419 Label out_of_range, only_low, negate, done, fastpath_done;
4422 HardAbortScope hard_abort(masm);
4431 __ Push(result_reg, scratch);
4436 __ LoadF64(double_scratch,
MemOperand(sp, argument_offset));
4439 __ ConvertDoubleToInt64(result_reg, double_scratch);
4442 __ TestIfInt32(result_reg);
4445 __ Push(scratch_high, scratch_low);
4449 __ LoadU32(scratch_high,
4451 __ LoadU32(scratch_low,
4463 __ CmpS64(scratch, Operand(83));
4471 __ mov(r0, Operand(51));
4472 __ SubS64(scratch, r0, scratch);
4477 __ ShiftRightU32(scratch_low, scratch_low, scratch);
4481 __ mov(r0, Operand(32));
4482 __ SubS64(scratch, r0, scratch);
4487 __ ShiftLeftU64(r0, r0, Operand(16));
4488 __ OrP(result_reg, result_reg, r0);
4489 __ ShiftLeftU32(r0, result_reg, scratch);
4490 __ OrP(result_reg, scratch_low, r0);
4493 __ bind(&out_of_range);
4500 __ lcgr(scratch, scratch);
4501 __ ShiftLeftU32(result_reg, scratch_low, scratch);
4510 __ ShiftRightS32(r0, scratch_high, Operand(31));
4512 __ ShiftRightU64(r0, r0, Operand(32));
4513 __ XorP(result_reg, r0);
4514 __ ShiftRightU32(r0, scratch_high, Operand(31));
4515 __ AddS64(result_reg, r0);
4518 __ Pop(scratch_high, scratch_low);
4521 __ bind(&fastpath_done);
4522 __ StoreU64(result_reg,
MemOperand(sp, argument_offset));
4523 __ Pop(result_reg, scratch);
4553 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4565 api_function_address =
4566 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4572 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4573 func_templ, scratch));
4575 using FCA = FunctionCallbackArguments;
4576 using ER = ExternalReference;
4577 using FC = ApiCallbackExitFrameConstants;
4579 static_assert(FCA::kArgsLength == 6);
4580 static_assert(FCA::kNewTargetIndex == 5);
4581 static_assert(FCA::kTargetIndex == 4);
4582 static_assert(FCA::kReturnValueIndex == 3);
4583 static_assert(FCA::kContextIndex == 2);
4584 static_assert(FCA::kIsolateIndex == 1);
4585 static_assert(FCA::kUnusedIndex == 0);
4599 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4600 topmost_script_having_context);
4610 __ Move(scratch, ER::isolate_address());
4617 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4618 __ StoreU64(scratch,
4622 __ StoreU64(func_templ,
4626 __ StoreU64(scratch,
4635 api_function_address,
4637 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset));
4639 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4640 StackFrame::API_CALLBACK_EXIT);
4648 __ StoreU64(argc, argc_operand);
4651 __ AddS64(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4652 __ StoreU64(scratch,
MemOperand(fp, FC::kFCIImplicitArgsOffset));
4655 __ AddS64(scratch, fp, Operand(FC::kFirstArgumentOffset));
4656 __ StoreU64(scratch,
MemOperand(fp, FC::kFCIValuesOffset));
4659 __ RecordComment(
"v8::FunctionCallback's argument.");
4660 __ AddS64(function_callback_info_arg, fp,
4661 Operand(FC::kFunctionCallbackInfoOffset));
4665 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4669 static constexpr int kSlotsToDropOnReturn =
4672 const bool with_profiling =
4675 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4676 &argc_operand, return_value_operand);
4679void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4689 using PCA = PropertyCallbackArguments;
4690 using ER = ExternalReference;
4691 using FC = ApiAccessorExitFrameConstants;
4693 static_assert(PCA::kPropertyKeyIndex == 0);
4694 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4695 static_assert(PCA::kHolderIndex == 2);
4696 static_assert(PCA::kIsolateIndex == 3);
4697 static_assert(PCA::kHolderV2Index == 4);
4698 static_assert(PCA::kReturnValueIndex == 5);
4699 static_assert(PCA::kDataIndex == 6);
4700 static_assert(PCA::kThisIndex == 7);
4701 static_assert(PCA::kArgsLength == 8);
4717 Register api_function_address = r4;
4726 __ LoadTaggedField(scratch,
4729 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4731 __ Push(scratch, smi_zero);
4732 __ Move(scratch, ER::isolate_address());
4733 __ Push(scratch, holder);
4734 __ LoadTaggedField(name_arg,
4737 __ Push(smi_zero, name_arg);
4739 __ RecordComment(
"Load api_function_address");
4741 api_function_address,
4745 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4746 StackFrame::API_ACCESSOR_EXIT);
4748 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4750 __ AddS64(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4752 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
4755#ifdef V8_ENABLE_DIRECT_HANDLE
4760 static_assert(PCA::kPropertyKeyIndex == 0);
4761 __ mov(name_arg, property_callback_info_arg);
4764 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4770 static constexpr int kSlotsToDropOnReturn =
4771 FC::kPropertyCallbackInfoArgsLength;
4772 MemOperand*
const kUseStackSpaceConstant =
nullptr;
4774 const bool with_profiling =
true;
4776 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4777 kUseStackSpaceConstant, return_value_operand);
4780void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4789void Generate_DeoptimizationEntry(MacroAssembler* masm,
4791 Isolate* isolate = masm->isolate();
4803 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4804 int code = config->GetAllocatableDoubleCode(
i);
4818 static constexpr int kSavedRegistersAreaSize =
4826 __ SubS64(r5, fp, r5);
4830 __ PrepareCallCFunction(5, r7);
4832 Label context_check;
4835 __ JumpIfSmi(r3, &context_check);
4837 __ bind(&context_check);
4838 __ mov(r3, Operand(
static_cast<int>(deopt_kind)));
4848 AllowExternalCallThatCantCauseGC scope(masm);
4849 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4873 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4874 int code = config->GetAllocatableDoubleCode(
i);
4875 int dst_offset = code *
kSimd128Size + simd128_regs_offset;
4886 UseScratchRegisterScope temps(masm);
4887 Register is_iterable = temps.Acquire();
4889 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4890 __ lhi(zero, Operand(0));
4907 Label pop_loop_header;
4913 __ bind(&pop_loop_header);
4920 __ PrepareCallCFunction(1, r3);
4923 AllowExternalCallThatCantCauseGC scope(masm);
4924 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4931 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4938 __ AddS64(r3, r6, r3);
4941 __ bind(&outer_push_loop);
4947 __ bind(&inner_push_loop);
4948 __ SubS64(r5, Operand(
sizeof(intptr_t)));
4949 __ AddS64(r8, r4, r5);
4953 __ bind(&inner_loop_header);
4955 __ bne(&inner_push_loop);
4958 __ bind(&outer_loop_header);
4960 __ blt(&outer_push_loop);
4963 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4964 int code = config->GetAllocatableDoubleCode(
i);
4966 int src_offset = code *
kSimd128Size + simd128_regs_offset;
4981 if ((restored_regs.bits() & (1 <<
i)) != 0) {
4987 UseScratchRegisterScope temps(masm);
4988 Register is_iterable = temps.Acquire();
4991 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4992 __ lhi(
one, Operand(1));
5013void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5017void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5021void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
5022 using D = OnStackReplacementDescriptor;
5023 static_assert(D::kParameterCount == 2);
5024 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
5025 D::MaybeTargetCodeRegister(),
5026 D::ExpectedParameterCountRegister());
5029void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
5030 using D = OnStackReplacementDescriptor;
5031 static_assert(D::kParameterCount == 2);
5035 OnStackReplacement(masm, OsrSourceTier::kBaseline,
5036 D::MaybeTargetCodeRegister(),
5037 D::ExpectedParameterCountRegister());
5040void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5048 __ LeaveFrame(StackFrame::INTERPRETED);
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kArgvOffset
static constexpr int kNextFastCallFramePCOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const uint32_t kMantissaMask
static const uint32_t kExponentMask
static const int kMantissaBitsInTopWord
static const int kExponentBias
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
static V8_INLINE Operand Zero()
static constexpr int8_t kNumRegisters
static constexpr DwVfpRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr int kMantissaOffset
static constexpr int kExponentOffset
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedSlotCount
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr Register ObjectRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_SANDBOX_BOOL
#define ABI_RETURNS_OBJECTPAIR_IN_REGS
RegListBase< RegisterT > registers
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
constexpr int kStackFpOffset
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
constexpr int kStackPcOffset
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kFunctionEntryBytecodeOffset
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
constexpr int kSimd128Size
const int kNumRequiredStackFrameSlots
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
const RegList kCalleeSaved
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
const int kStackFrameExtraParamSlot
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
const int kStackFrameSPSlot
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const int kNumCalleeSavedDoubles
constexpr Register kInterpreterDispatchTableRegister
const int kCalleeRegisterSaveAreaSize
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
constexpr int JSParameterCount(int param_count_without_receiver)
Register ToRegister(int num)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
@ kDefaultDerivedConstructor
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
const int kNumCalleeSaved
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define OFFSET_OF_DATA_START(Type)