5#if V8_TARGET_ARCH_PPC64
28#if V8_ENABLE_WEBASSEMBLY
38#define __ ACCESS_MASM(masm)
46 __ DecodeField<Code::KindField>(scratch);
47 __ CmpS64(scratch, Operand(
static_cast<int>(CodeKind::BASELINE)), r0);
48 __ Assert(
eq, AbortReason::kExpectedBaselineData);
51static void CheckSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
58#if V8_STATIC_ROOTS_BOOL
59 __ IsObjectTypeFast(data, scratch, CODE_TYPE, r0);
61 __ CompareObjectType(data, scratch, scratch, CODE_TYPE);
65 __ b(
ne, ¬_baseline);
67 __ b(
eq, is_baseline);
68 __ bind(¬_baseline);
70 __ b(
eq, is_baseline);
73#if V8_STATIC_ROOTS_BOOL
76 INTERPRETER_DATA_TYPE);
79 __ CmpU64(scratch, Operand(INTERPRETER_DATA_TYPE), r0);
81 __ b(
ne, is_bytecode);
85 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
86 Label* is_baseline, Label* is_unavailable) {
92 __ LoadTrustedPointerField(
98 __ IsObjectType(data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
101 CheckSharedFunctionInfoBytecodeOrBaseline(masm, data, scratch1, is_baseline,
105 __ LoadTrustedPointerField(
106 bytecode,
FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset),
107 kBytecodeArrayIndirectPointerTag, scratch1);
110 __ IsObjectType(bytecode, scratch1, scratch1, BYTECODE_ARRAY_TYPE);
111 __ b(
ne, is_unavailable);
114void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
116 __ AddS64(ip, entry_address, Operand(
offset), r0);
123void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi,
126 __ mov(scratch, Operand(0));
131void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
132 Register scratch1, Register scratch2) {
137 ResetSharedFunctionInfoAge(masm, scratch1, scratch2);
140void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
141 Register feedback_vector, Register scratch1,
149 Operand(
static_cast<uint8_t
>(~FeedbackVector::OsrUrgencyBits::kMask)));
160void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
161 MacroAssembler* masm) {
173 code_obj,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset),
176 ResetSharedFunctionInfoAge(masm, code_obj, r6);
178 __ LoadTrustedPointerField(
180 FieldMemOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset),
185 __ IsObjectType(code_obj, r6, r6, CODE_TYPE);
186 __ Assert(
eq, AbortReason::kExpectedBaselineData);
193 __ LoadTaggedField(feedback_cell,
196 __ LoadTaggedField(feedback_vector,
200 Label install_baseline_code;
203 __ IsObjectType(feedback_vector, r6, r6, FEEDBACK_VECTOR_TYPE);
204 __ b(
ne, &install_baseline_code);
213 __ StoreU64(feedback_cell,
219 __ StoreU64(feedback_vector,
225 __ Move(get_baseline_pc,
226 ExternalReference::baseline_pc_for_next_executed_bytecode());
242 FrameScope scope(masm, StackFrame::INTERNAL);
243 __ PrepareCallCFunction(4, 0, ip);
244 __ CallCFunction(get_baseline_pc, 3, 0);
247 __ LoadCodeInstructionStart(code_obj, code_obj);
251 Generate_OSREntry(masm, code_obj, 0);
254 __ bind(&install_baseline_code);
256 FrameScope scope(masm, StackFrame::INTERNAL);
259 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
267 int formal_parameter_count,
Address address) {
275enum class ArgumentsElementType {
280void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
282 ArgumentsElementType element_type) {
290 __ add(scratch, array, scratch);
294 if (element_type == ArgumentsElementType::kHandle) {
302void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
314 Label stack_overflow;
316 __ StackOverflowCheck(r3, scratch, &stack_overflow);
319 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
337 Generate_PushArguments(masm, r7, r3, r8, ArgumentsElementType::kRaw);
340 __ PushRoot(RootIndex::kTheHoleValue);
347 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
359 __ DropArguments(scratch);
362 __ bind(&stack_overflow);
364 FrameScope scope(masm, StackFrame::INTERNAL);
365 __ CallRuntime(Runtime::kThrowStackOverflow);
370enum class OsrSourceTier {
375void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
376 Register maybe_target_code,
377 Register expected_param_count) {
378 Label jump_to_optimized_code;
384 __ CmpSmiLiteral(maybe_target_code,
Smi::zero(), r0);
385 __ bne(&jump_to_optimized_code);
390 FrameScope scope(masm, StackFrame::INTERNAL);
391 __ CallRuntime(Runtime::kCompileOptimizedOSR);
396 __ bne(&jump_to_optimized_code);
399 __ bind(&jump_to_optimized_code);
405 __ Move(r4, ExternalReference::address_of_log_or_trace_osr());
407 __ andi(r0, r4, Operand(0xFF));
411 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
413 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
420 if (source == OsrSourceTier::kInterpreter) {
423 __ LeaveFrame(StackFrame::STUB);
432 r4,
FieldMemOperand(r3, Code::kDeoptimizationDataOrInterpreterDataOffset),
436 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
439 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3, r0, ip);
442 __ LoadCodeInstructionStart(r3, r3);
463void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
475 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
476 __ EnterFrame(StackFrame::CONSTRUCT);
480 __ PushRoot(RootIndex::kUndefinedValue);
494 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r7);
498 ¬_create_implicit_receiver);
501 __ CallBuiltin(Builtin::kFastNewObject);
502 __ b(&post_instantiation_deopt_entry);
505 __ bind(¬_create_implicit_receiver);
506 __ LoadRoot(r3, RootIndex::kTheHoleValue);
517 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
519 __ bind(&post_instantiation_deopt_entry);
552 Label stack_overflow;
553 __ StackOverflowCheck(r3, r8, &stack_overflow);
558 Generate_PushArguments(masm, r7, r3, r8, ArgumentsElementType::kRaw);
565 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
572 Label use_receiver, do_throw, leave_and_return, check_receiver;
575 __ JumpIfNotRoot(r3, RootIndex::kUndefinedValue, &check_receiver);
582 __ bind(&use_receiver);
584 __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
586 __ bind(&leave_and_return);
590 __ LeaveFrame(StackFrame::CONSTRUCT);
593 __ DropArguments(r4);
596 __ bind(&check_receiver);
598 __ JumpIfSmi(r3, &use_receiver);
602 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
603 __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE);
604 __ bge(&leave_and_return);
610 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
613 __ bind(&stack_overflow);
616 __ CallRuntime(Runtime::kThrowStackOverflow);
621void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
622 Generate_JSBuiltinsConstructStubHelper(masm);
626void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
634 r3,
FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset), r0);
635 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
638 __ AssertGeneratorObject(r4);
646 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
647 Label stepping_prepared;
649 ExternalReference debug_hook =
650 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
651 __ Move(scratch, debug_hook);
653 __ extsb(scratch, scratch);
655 __ bne(&prepare_step_in_if_stepping);
659 ExternalReference debug_suspended_generator =
660 ExternalReference::debug_suspended_generator_address(masm->isolate());
662 __ Move(scratch, debug_suspended_generator);
664 __ CmpS64(scratch, r4);
665 __ beq(&prepare_step_in_suspended_generator);
666 __ bind(&stepping_prepared);
670 Label stack_overflow;
672 __ CmpU64(sp, scratch);
673 __ blt(&stack_overflow);
686 r6,
FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
689 r5,
FieldMemOperand(r4, JSGeneratorObject::kParametersAndRegistersOffset),
692 Label done_loop, loop;
694 __ subi(r6, r6, Operand(1));
698 __ add(scratch, r5, r10);
714 Label ok, is_baseline, is_unavailable;
720 &is_baseline, &is_unavailable);
723 __ bind(&is_unavailable);
724 __ Abort(AbortReason::kMissingBytecodeArray);
726 __ bind(&is_baseline);
727 __ IsObjectType(bytecode, ip, ip, CODE_TYPE);
728 __ Assert(
eq, AbortReason::kMissingBytecodeArray);
738 r3, SharedFunctionInfo::kFormalParameterCountOffset));
744 __ JumpJSFunction(r4, r0);
747 __ bind(&prepare_step_in_if_stepping);
749 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
752 __ PushRoot(RootIndex::kTheHoleValue);
753 __ CallRuntime(Runtime::kDebugOnFunctionCall);
758 __ b(&stepping_prepared);
760 __ bind(&prepare_step_in_suspended_generator);
762 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
764 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
769 __ b(&stepping_prepared);
771 __ bind(&stack_overflow);
773 FrameScope scope(masm, StackFrame::INTERNAL);
774 __ CallRuntime(Runtime::kThrowStackOverflow);
779void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
780 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
782 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
810 Label
invoke, handler_entry, exit;
813 NoRootArrayScope no_root_array(masm);
832#ifdef V8_COMPRESS_POINTERS
835 IsolateData::cage_base_offset());
849 __ li(r0, Operand(-1));
866 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerFP);
871 __ LoadIsolateField(ip, IsolateFieldId::kFastCCallCallerPC);
881 Label non_outermost_js;
882 ExternalReference js_entry_sp =
885 __ Move(r3, js_entry_sp);
888 __ bne(&non_outermost_js);
893 __ bind(&non_outermost_js);
906 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
907 __ bind(&handler_entry);
911 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
918 IsolateAddressId::kExceptionAddress, masm->isolate()));
922 __ LoadRoot(r3, RootIndex::kException);
928 __ PushStackHandler();
940 __ CallBuiltin(entry_trampoline);
943 __ PopStackHandler();
947 Label non_outermost_js_2;
950 __ bne(&non_outermost_js_2);
952 __ Move(r8, js_entry_sp);
954 __ bind(&non_outermost_js_2);
958 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerPC);
962 __ LoadIsolateField(scratch, IsolateFieldId::kFastCCallCallerFP);
987void Builtins::Generate_JSEntry(MacroAssembler* masm) {
988 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
991void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
992 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
993 Builtin::kJSConstructEntryTrampoline);
996void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
997 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
998 Builtin::kRunMicrotasksTrampoline);
1013 FrameScope scope(masm, StackFrame::INTERNAL);
1017 IsolateAddressId::kContextAddress, masm->isolate());
1018 __ Move(
cp, context_address);
1025 Label enough_stack_space, stack_overflow;
1027 __ StackOverflowCheck(r3, r9, &stack_overflow);
1028 __ b(&enough_stack_space);
1029 __ bind(&stack_overflow);
1030 __ CallRuntime(Runtime::kThrowStackOverflow);
1034 __ bind(&enough_stack_space);
1040 Generate_PushArguments(masm, r8, r7, r9, ArgumentsElementType::kHandle);
1054 __ LoadRoot(r7, RootIndex::kUndefinedValue);
1063 __ CallBuiltin(builtin);
1073void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1077void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1081void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
1088 __ TailCallBuiltin(Builtin::kRunMicrotasks);
1092 Register scratch2) {
1095 __ LoadU64(params_size,
1097 __ LoadU16(params_size,
1100 Register actual_params_size = scratch2;
1102 __ LoadU64(actual_params_size,
1107 Label corrected_args_count;
1108 __ CmpS64(params_size, actual_params_size);
1109 __ bge(&corrected_args_count);
1110 __ mr(params_size, actual_params_size);
1111 __ bind(&corrected_args_count);
1113 __ LeaveFrame(StackFrame::INTERPRETED);
1115 __ DropArguments(params_size);
1124 Register bytecode_array,
1125 Register bytecode_offset,
1126 Register bytecode, Register scratch1,
1127 Register scratch2, Label* if_return) {
1128 Register bytecode_size_table = scratch1;
1135 Register original_bytecode_offset = scratch2;
1137 bytecode, original_bytecode_offset));
1138 __ Move(bytecode_size_table,
1139 ExternalReference::bytecode_size_table_address());
1140 __ Move(original_bytecode_offset, bytecode_offset);
1143 Label process_bytecode, extra_wide;
1144 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
1145 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
1146 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1148 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1149 __ cmpi(bytecode, Operand(0x3));
1150 __ bgt(&process_bytecode);
1151 __ andi(r0, bytecode, Operand(0x1));
1152 __ bne(&extra_wide, cr0);
1155 __ addi(bytecode_offset, bytecode_offset, Operand(1));
1156 __ lbzx(bytecode,
MemOperand(bytecode_array, bytecode_offset));
1157 __ addi(bytecode_size_table, bytecode_size_table,
1159 __ b(&process_bytecode);
1161 __ bind(&extra_wide);
1163 __ addi(bytecode_offset, bytecode_offset, Operand(1));
1164 __ lbzx(bytecode,
MemOperand(bytecode_array, bytecode_offset));
1165 __ addi(bytecode_size_table, bytecode_size_table,
1169 __ bind(&process_bytecode);
1172#define JUMP_IF_EQUAL(NAME) \
1174 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1181 Label
end, not_jump_loop;
1183 Operand(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1184 __ bne(¬_jump_loop);
1187 __ Move(bytecode_offset, original_bytecode_offset);
1190 __ bind(¬_jump_loop);
1192 __ lbzx(scratch3,
MemOperand(bytecode_size_table, bytecode));
1193 __ add(bytecode_offset, bytecode_offset, scratch3);
1199void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1202 Register closure = descriptor.GetRegisterParameter(
1203 BaselineOutOfLinePrologueDescriptor::kClosure);
1207 __ LoadTaggedField(feedback_cell,
1210 __ LoadTaggedField(feedback_vector,
1213 __ AssertFeedbackVector(feedback_vector,
r11);
1215#ifndef V8_ENABLE_LEAPTIERING
1217 Label flags_need_processing;
1220 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1221 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1225 { ResetFeedbackVectorOsrUrgency(masm, feedback_vector,
r11, r0); }
1230 __ LoadU32(invocation_count,
1232 FeedbackVector::kInvocationCountOffset),
1234 __ AddS32(invocation_count, invocation_count, Operand(1));
1235 __ StoreU32(invocation_count,
1237 FeedbackVector::kInvocationCountOffset),
1248 Register callee_context = descriptor.GetRegisterParameter(
1249 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1250 Register callee_js_function = descriptor.GetRegisterParameter(
1251 BaselineOutOfLinePrologueDescriptor::kClosure);
1252 ResetJSFunctionAge(masm, callee_js_function,
r11, r0);
1253 __ Push(callee_context, callee_js_function);
1257 Register argc = descriptor.GetRegisterParameter(
1258 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1261 Register bytecodeArray = descriptor.GetRegisterParameter(
1262 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1264 __ Push(argc, bytecodeArray);
1268 __ CompareObjectType(feedback_vector, scratch, scratch,
1269 FEEDBACK_VECTOR_TYPE);
1270 __ Assert(
eq, AbortReason::kExpectedFeedbackVector);
1272 __ Push(feedback_cell);
1273 __ Push(feedback_vector);
1276 Label call_stack_guard;
1277 Register frame_size = descriptor.GetRegisterParameter(
1278 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1289 __ SubS64(sp_minus_frame_size, sp, frame_size);
1292 __ CmpU64(sp_minus_frame_size, interrupt_limit);
1293 __ blt(&call_stack_guard);
1300#ifndef V8_ENABLE_LEAPTIERING
1301 __ bind(&flags_need_processing);
1312 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1317 __ bind(&call_stack_guard);
1320 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1323 __ SmiTag(frame_size);
1324 __ Push(frame_size);
1325 __ CallRuntime(Runtime::kStackGuardWithGap);
1334void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1347 __ LeaveFrame(StackFrame::BASELINE);
1350 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1370 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1377 sfi,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset), r0);
1378 ResetSharedFunctionInfoAge(masm, sfi, ip);
1382 Label is_baseline, compile_lazy;
1385 &is_baseline, &compile_lazy);
1387 Label push_stack_frame;
1389 __ LoadFeedbackVector(feedback_vector, closure, r7, &push_stack_frame);
1392#ifndef V8_ENABLE_LEAPTIERING
1397 Label flags_need_processing;
1398 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1399 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1400 &flags_need_processing);
1403 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, ip, r0);
1408 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
1410 __ addi(r8, r8, Operand(1));
1413 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
1427 __ bind(&push_stack_frame);
1429 __ PushStandardFrame(closure);
1440 Label stack_overflow;
1444 BytecodeArray::kFrameSizeOffset));
1450 __ blt(&stack_overflow);
1454 Label loop, no_args;
1457 __ beq(&no_args, cr0);
1467 Label no_incoming_new_target_or_generator_register;
1471 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset),
1474 __ beq(&no_incoming_new_target_or_generator_register);
1477 __ bind(&no_incoming_new_target_or_generator_register);
1481 Label stack_check_interrupt, after_stack_check_interrupt;
1484 __ blt(&stack_check_interrupt);
1485 __ bind(&after_stack_check_interrupt);
1492 __ bind(&do_dispatch);
1495 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1503 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1505 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1512 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1535 __ bind(&do_return);
1540 __ bind(&stack_check_interrupt);
1548 __ CallRuntime(Runtime::kStackGuard);
1563 __ jmp(&after_stack_check_interrupt);
1566#ifndef V8_ENABLE_LEAPTIERING
1567 __ bind(&flags_need_processing);
1568 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1571 __ bind(&is_baseline);
1573#ifndef V8_ENABLE_LEAPTIERING
1582 Label install_baseline_code;
1588 __ CmpS32(ip, Operand(FEEDBACK_VECTOR_TYPE), r0);
1589 __ b(
ne, &install_baseline_code);
1592 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1593 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1598 __ ReplaceClosureCodeWithOptimizedCode(r5, closure, ip, r7);
1599 __ JumpCodeObject(r5);
1601 __ bind(&install_baseline_code);
1603 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1607 __ bind(&compile_lazy);
1608 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1610 __ bind(&stack_overflow);
1611 __ CallRuntime(Runtime::kThrowStackOverflow);
1616 Register start_address,
1619 __ subi(scratch, num_args, Operand(1));
1621 __ sub(start_address, start_address, scratch);
1623 __ PushArray(start_address, num_args, scratch, r0,
1639 Label stack_overflow;
1643 __ subi(r3, r3, Operand(1));
1652 __ StackOverflowCheck(r6, ip, &stack_overflow);
1658 __ PushRoot(RootIndex::kUndefinedValue);
1670 __ TailCallBuiltin(Builtin::kCallWithSpread);
1675 __ bind(&stack_overflow);
1677 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1693 Label stack_overflow;
1694 __ StackOverflowCheck(r3, ip, &stack_overflow);
1698 __ subi(r3, r3, Operand(1));
1701 Register argc_without_receiver = ip;
1718 __ AssertUndefinedOrAllocationSite(r5, r8);
1722 __ AssertFunction(r4);
1726 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1729 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1733 __ TailCallBuiltin(Builtin::kConstruct);
1736 __ bind(&stack_overflow);
1738 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1746 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1751 Label stack_overflow;
1754 switch (which_frame) {
1766 __ StackOverflowCheck(r3, ip, &stack_overflow);
1775 Register argc_without_receiver = ip;
1777 __ PushArray(r7, argc_without_receiver, r8, r0);
1784 __ TailCallBuiltin(Builtin::kConstruct);
1786 __ bind(&stack_overflow);
1788 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1796void NewImplicitReceiver(MacroAssembler* masm) {
1814 __ Push(r3, r4, r6);
1815 __ CallBuiltin(Builtin::kFastNewObject);
1817 __ Move(implicit_receiver, r3);
1837void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1838 MacroAssembler* masm) {
1846 __ AssertFunction(r4);
1849 Label non_constructor;
1852 __ TestBit(r5, Map::Bits1::IsConstructorBit::kShift, r0);
1853 __ beq(&non_constructor, cr0);
1856 Label stack_overflow;
1857 __ StackOverflowCheck(r3, r5, &stack_overflow);
1861 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1863 __ LoadRoot(r5, RootIndex::kTheHoleValue);
1867 Register argc_without_receiver = r9;
1879 __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1881 __ bne(&builtin_call, cr0);
1884 Label not_create_implicit_receiver;
1885 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(r5);
1889 ¬_create_implicit_receiver);
1890 NewImplicitReceiver(masm);
1891 __ bind(¬_create_implicit_receiver);
1907 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1913 Label use_receiver, do_throw, leave_and_return, check_receiver;
1916 __ JumpIfNotRoot(r3, RootIndex::kUndefinedValue, &check_receiver);
1923 __ bind(&use_receiver);
1927 __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
1929 __ bind(&leave_and_return);
1931 __ LeaveFrame(StackFrame::CONSTRUCT);
1934 __ bind(&check_receiver);
1936 __ JumpIfSmi(r3, &use_receiver);
1940 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
1941 __ CompareObjectType(r3, r7, r8, FIRST_JS_RECEIVER_TYPE);
1942 __ bge(&leave_and_return);
1943 __ b(&use_receiver);
1945 __ bind(&builtin_call);
1948 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1955 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1958 __ bind(&stack_overflow);
1960 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1966 __ bind(&non_constructor);
1967 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1973 Label builtin_trampoline, trampoline_loaded;
1975 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1985 __ LoadTrustedPointerField(
1986 r5,
FieldMemOperand(r5, SharedFunctionInfo::kTrustedFunctionDataOffset),
1990 __ bne(&builtin_trampoline);
1992 __ LoadCodePointerField(
1993 r5,
FieldMemOperand(r5, InterpreterData::kInterpreterTrampolineOffset),
1995 __ LoadCodeInstructionStart(r5, r5);
1996 __ b(&trampoline_loaded);
1998 __ bind(&builtin_trampoline);
1999 __ Move(r5, ExternalReference::
2000 address_of_interpreter_entry_trampoline_instruction_start(
2004 __ bind(&trampoline_loaded);
2005 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset.value()));
2011 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
2021 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
2024 BYTECODE_ARRAY_TYPE);
2026 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2045 UseScratchRegisterScope temps(masm);
2046 Register scratch = temps.Acquire();
2055void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
2063 Label enter_bytecode, function_entry_bytecode;
2067 __ beq(&function_entry_bytecode);
2079 __ bind(&enter_bytecode);
2087 __ bind(&function_entry_bytecode);
2094 __ b(&enter_bytecode);
2097 __ bind(&if_return);
2098 __ Abort(AbortReason::kInvalidBytecodeAdvance);
2101void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
2106void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2107 bool javascript_builtin,
2110 int allocatable_register_count = config->num_allocatable_general_registers();
2113 if (javascript_builtin) {
2120 sp, config->num_allocatable_general_registers() *
2125 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
2126 int code = config->GetAllocatableGeneralCode(
i);
2132 if (javascript_builtin && with_result) {
2136 constexpr int return_value_offset =
2139 __ addi(r3, r3, Operand(return_value_offset));
2143 __ subi(r3, r3, Operand(return_value_offset));
2150 UseScratchRegisterScope temps(masm);
2151 Register builtin = temps.Acquire();
2157 __ LoadEntryFromBuiltinIndex(builtin, builtin);
2162void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2163 Generate_ContinueToBuiltinHelper(masm,
false,
false);
2166void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2167 MacroAssembler* masm) {
2168 Generate_ContinueToBuiltinHelper(masm,
false,
true);
2171void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2172 Generate_ContinueToBuiltinHelper(masm,
true,
false);
2175void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2176 MacroAssembler* masm) {
2177 Generate_ContinueToBuiltinHelper(masm,
true,
true);
2180void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2182 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2183 __ CallRuntime(Runtime::kNotifyDeoptimized);
2192void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2193 using D = OnStackReplacementDescriptor;
2194 static_assert(D::kParameterCount == 2);
2195 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2196 D::MaybeTargetCodeRegister(),
2197 D::ExpectedParameterCountRegister());
2200void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2201 using D = OnStackReplacementDescriptor;
2202 static_assert(D::kParameterCount == 2);
2206 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2207 D::MaybeTargetCodeRegister(),
2208 D::ExpectedParameterCountRegister());
2212void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2224 __ LoadRoot(r8, RootIndex::kUndefinedValue);
2237 __ DropArgumentsAndPushNewReceiver(r3, r8);
2252 __ JumpIfRoot(r5, RootIndex::kNullValue, &no_arguments);
2253 __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &no_arguments);
2256 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2260 __ bind(&no_arguments);
2268void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2278 __ PushRoot(RootIndex::kUndefinedValue);
2279 __ addi(r3, r3, Operand(1));
2284 __ subi(r3, r3, Operand(1));
2290void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2303 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2319 __ DropArgumentsAndPushNewReceiver(r3, r8);
2333 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2336void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2350 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2366 __ DropArgumentsAndPushNewReceiver(r3, r7);
2385 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2394void Generate_AllocateSpaceAndShiftExistingArguments(
2395 MacroAssembler* masm, Register count, Register argc_in_out,
2396 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2403 __ AllocateStackSpace(new_space);
2405 Register dest = pointer_to_new_space_out;
2408 __ mr(r0, argc_in_out);
2419 __ add(argc_in_out, argc_in_out, count);
2442 __ AssertNotSmi(r5);
2446 __ cmpi(scratch, Operand(FIXED_ARRAY_TYPE));
2448 __ cmpi(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
2454 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2460 Label stack_overflow;
2461 __ StackOverflowCheck(r7, scratch, &stack_overflow);
2468 Generate_AllocateSpaceAndShiftExistingArguments(masm, r7, r3, r8, ip, r9);
2472 Label loop, no_args, skip;
2482 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2484 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2493 __ TailCallBuiltin(target_builtin);
2495 __ bind(&stack_overflow);
2496 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2501 CallOrConstructMode mode,
2513 Label new_target_constructor, new_target_not_constructor;
2514 __ JumpIfSmi(r6, &new_target_not_constructor);
2518 __ TestBit(scratch, Map::Bits1::IsConstructorBit::kShift, r0);
2519 __ bne(&new_target_constructor, cr0);
2520 __ bind(&new_target_not_constructor);
2523 __ EnterFrame(StackFrame::INTERNAL);
2525 __ CallRuntime(Runtime::kThrowNotConstructor);
2528 __ bind(&new_target_constructor);
2531 Label stack_done, stack_overflow;
2535 __ ble(&stack_done, cr0);
2547 __ StackOverflowCheck(r8, scratch, &stack_overflow);
2555 __ add(r7, r7, scratch);
2562 Generate_AllocateSpaceAndShiftExistingArguments(masm, r8, r3, r5, scratch,
2572 __ subi(r8, r8, Operand(1));
2581 __ bind(&stack_done);
2583 __ TailCallBuiltin(target_builtin);
2585 __ bind(&stack_overflow);
2586 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2596 __ AssertCallableFunction(r4);
2609 Operand(SharedFunctionInfo::IsStrictBit::kMask |
2610 SharedFunctionInfo::IsNativeBit::kMask));
2611 __ bne(&done_convert, cr0);
2622 __ LoadGlobalProxy(r6);
2624 Label convert_to_object, convert_receiver;
2625 __ LoadReceiver(r6);
2626 __ JumpIfSmi(r6, &convert_to_object);
2627 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
2628 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2629 __ bge(&done_convert);
2631 Label convert_global_proxy;
2632 __ JumpIfRoot(r6, RootIndex::kUndefinedValue, &convert_global_proxy);
2633 __ JumpIfNotRoot(r6, RootIndex::kNullValue, &convert_to_object);
2634 __ bind(&convert_global_proxy);
2637 __ LoadGlobalProxy(r6);
2639 __ b(&convert_receiver);
2641 __ bind(&convert_to_object);
2646 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2651 __ CallBuiltin(Builtin::kToObject);
2659 __ bind(&convert_receiver);
2661 __ StoreReceiver(r6);
2663 __ bind(&done_convert);
2673 r5,
FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2679void Generate_PushBoundArguments(MacroAssembler* masm) {
2687 Label no_bound_arguments;
2692 __ beq(&no_bound_arguments, cr0);
2707 __ sub(r0, sp, r10);
2713 __ CmpU64(r0, scratch);
2718 __ EnterFrame(StackFrame::INTERNAL);
2719 __ CallRuntime(Runtime::kThrowStackOverflow);
2736 __ subi(r7, r7, Operand(1));
2738 __ add(scratch, scratch, r5);
2748 __ bind(&no_bound_arguments);
2759 __ AssertBoundFunction(r4);
2764 __ StoreReceiver(r6);
2767 Generate_PushBoundArguments(masm);
2771 r4,
FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset), r0);
2787 Label non_callable, class_constructor;
2788 __ JumpIfSmi(target, &non_callable);
2789 __ LoadMap(map, target);
2790 __ CompareInstanceTypeRange(map, instance_type, scratch,
2794 __ cmpi(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2795 __ TailCallBuiltin(Builtin::kCallBoundFunction,
eq);
2802 __ TestBit(flags, Map::Bits1::IsCallableBit::kShift, r0);
2803 __ beq(&non_callable, cr0);
2807 __ cmpi(instance_type, Operand(JS_PROXY_TYPE));
2808 __ TailCallBuiltin(Builtin::kCallProxy,
eq);
2812 __ cmpi(instance_type, Operand(JS_WRAPPED_FUNCTION_TYPE));
2813 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
eq);
2817 __ cmpi(instance_type, Operand(JS_CLASS_CONSTRUCTOR_TYPE));
2818 __ beq(&class_constructor);
2823 __ StoreReceiver(target);
2825 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2830 __ bind(&non_callable);
2832 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2834 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2839 __ bind(&class_constructor);
2841 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2843 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2849void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2855 __ AssertConstructor(r4);
2856 __ AssertFunction(r4);
2860 __ LoadRoot(r5, RootIndex::kUndefinedValue);
2862 Label call_generic_stub;
2868 __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2870 __ beq(&call_generic_stub, cr0);
2872 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2874 __ bind(&call_generic_stub);
2875 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2879void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2885 __ AssertConstructor(r4);
2886 __ AssertBoundFunction(r4);
2889 Generate_PushBoundArguments(masm);
2893 __ CompareTagged(r4, r6);
2896 r6,
FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset), r0);
2901 r4,
FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset), r0);
2902 __ TailCallBuiltin(Builtin::kConstruct);
2906void Builtins::Generate_Construct(MacroAssembler* masm) {
2920 Label non_constructor, non_proxy;
2921 __ JumpIfSmi(target, &non_constructor);
2929 __ TestBit(flags, Map::Bits1::IsConstructorBit::kShift, r0);
2930 __ beq(&non_constructor, cr0);
2934 __ CompareInstanceTypeRange(map, instance_type, scratch,
2935 FIRST_JS_FUNCTION_TYPE, LAST_JS_FUNCTION_TYPE);
2936 __ TailCallBuiltin(Builtin::kConstructFunction,
le);
2940 __ cmpi(instance_type, Operand(JS_BOUND_FUNCTION_TYPE));
2941 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
eq);
2944 __ cmpi(instance_type, Operand(JS_PROXY_TYPE));
2946 __ TailCallBuiltin(Builtin::kConstructProxy);
2949 __ bind(&non_proxy);
2952 __ StoreReceiver(target);
2954 __ LoadNativeContextSlot(target,
2955 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2961 __ bind(&non_constructor);
2962 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2965#if V8_ENABLE_WEBASSEMBLY
2967struct SaveWasmParamsScope {
2968 explicit SaveWasmParamsScope(MacroAssembler* masm) : masm(masm) {
2969 for (Register gp_param_reg : wasm::kGpParamRegisters) {
2970 gp_regs.set(gp_param_reg);
2972 for (DoubleRegister fp_param_reg : wasm::kFpParamRegisters) {
2973 fp_regs.set(fp_param_reg);
2979 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedGpParamRegs + 1,
2981 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs,
2983 CHECK_EQ(WasmLiftoffSetupFrameConstants::kNumberOfSavedFpParamRegs,
2986 __ MultiPush(gp_regs);
2987 __ MultiPushF64AndV128(fp_regs, simd_regs, ip, r0);
2989 ~SaveWasmParamsScope() {
2990 __ MultiPopF64AndV128(fp_regs, simd_regs, ip, r0);
2991 __ MultiPop(gp_regs);
2998 MacroAssembler* masm;
3001void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3002 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3005 Label allocate_vector, done;
3010 WasmTrustedInstanceData::kFeedbackVectorsOffset),
3013 __ AddS64(vector, vector, scratch);
3014 __ LoadTaggedField(vector,
3017 __ JumpIfSmi(vector, &allocate_vector);
3023 __ bind(&allocate_vector);
3036 SaveWasmParamsScope save_params(masm);
3039 __ SmiTag(func_index);
3045 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3057void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3063 HardAbortScope hard_abort(masm);
3064 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
3067 SaveWasmParamsScope save_params(masm);
3076 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3089 WasmTrustedInstanceData::kJumpTableStartOffset),
3098void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3099 HardAbortScope hard_abort(masm);
3101 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3113 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3127void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
3132 __ JumpIfEqual(tmp, old_state, &ok);
3135 __ mov(tmp, Operand(new_state));
3140void SwitchStackPointer(MacroAssembler* masm, Register stack) {
3144void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* target,
3152 __ GetLabelAddress(tmp, target);
3157void LoadJumpBuffer(MacroAssembler* masm, Register stack,
bool load_pc,
3159 SwitchStackPointer(masm, stack);
3170void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3173 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3175 LoadJumpBuffer(masm, target_stack,
false, tmp, expected_state);
3179void SwitchStacks(MacroAssembler* masm, Register old_stack,
bool return_switch,
3180 const std::initializer_list<Register> keep) {
3181 using ER = ExternalReference;
3183 for (
auto reg : keep) {
3188 __ PrepareCallCFunction(2, r0);
3194 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3197 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3202void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3203 Register return_value, Register context, Register tmp1,
3204 Register tmp2, Register tmp3) {
3206 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3212 UseScratchRegisterScope temps(masm);
3213 Register scratch = temps.Acquire();
3221 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3224 SwitchStacks(masm, active_stack,
true,
3225 {return_reg, return_value,
context, parent});
3229void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3231 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3233 suspender,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset),
3236 int32_t active_suspender_offset =
3238 RootIndex::kActiveSuspender);
3242void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3243 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset),
3244 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3247class RegisterAllocator {
3251 Scoped(RegisterAllocator* allocator, Register*
reg)
3262 void Ask(Register*
reg) {
3269 bool registerIsAvailable(
const Register&
reg) {
return available_.has(
reg); }
3271 void Pinned(
const Register& requested, Register*
reg) {
3272 DCHECK(registerIsAvailable(requested));
3286 void Reserve(
const Register&
reg) {
3294 void Reserve(
const Register& reg1,
const Register& reg2,
3295 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3296 const Register& reg5 =
no_reg,
const Register& reg6 =
no_reg) {
3305 bool IsUsed(
const Register&
reg) {
3309 void ResetExcept(
const Register& reg1 =
no_reg,
const Register& reg2 =
no_reg,
3310 const Register& reg3 =
no_reg,
const Register& reg4 =
no_reg,
3311 const Register& reg5 =
no_reg,
3312 const Register& reg6 =
no_reg) {
3323 if (registerIsAvailable(**it)) {
3332 static RegisterAllocator WithAllocatableGeneralRegisters() {
3336 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
3337 int code = config->GetAllocatableGeneralCode(
i);
3339 list.set(candidate);
3341 return RegisterAllocator(list);
3350#define DEFINE_REG(Name) \
3351 Register Name = no_reg; \
3354#define DEFINE_REG_W(Name) \
3358#define ASSIGN_REG(Name) regs.Ask(&Name);
3360#define ASSIGN_REG_W(Name) \
3364#define DEFINE_PINNED(Name, Reg) \
3365 Register Name = no_reg; \
3366 regs.Pinned(Reg, &Name);
3368#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3370#define DEFINE_SCOPED(Name) \
3372 RegisterAllocator::Scoped scope_##Name(®s, &Name);
3374#define FREE_REG(Name) regs.Free(&Name);
3378void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3382 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3387 data,
FieldMemOperand(data, WasmImportData::kNativeContextOffset), r0);
3392 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset), r0);
3398void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3403 fp_regs.set(fp_param_reg);
3405 __ MultiPushDoubles(fp_regs);
3413 __ MultiPush(gp_regs);
3416 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3419void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3423void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3424 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3426 __ EnterFrame(StackFrame::STACK_SWITCH);
3435 ResetStackSwitchFrameStackSlots(masm);
3442 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3444 FillJumpBuffer(masm, stack, &resume, scratch);
3447 regs.ResetExcept(suspender, stack);
3450 __ LoadU64(suspender_stack,
3459 __ CmpS64(suspender_stack, stack);
3470 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3473 parent,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset),
3475 int32_t active_suspender_offset =
3477 RootIndex::kActiveSuspender);
3479 regs.ResetExcept(suspender, caller, stack);
3484 SwitchStacks(masm, stack,
false, {caller, suspender});
3490 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3491 __ Zero(GCScanSlotPlace);
3498 __ LeaveFrame(StackFrame::STACK_SWITCH);
3507void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3508 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3509 __ EnterFrame(StackFrame::STACK_SWITCH);
3517 ResetStackSwitchFrameStackSlots(masm);
3519 regs.ResetExcept(closure);
3538 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset),
3541 suspender,
FieldMemOperand(resume_data, WasmResumeData::kSuspenderOffset),
3543 regs.ResetExcept(suspender);
3550 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3552 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3560 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3561 __ StoreTaggedField(
3564 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3567 int32_t active_suspender_offset =
3569 RootIndex::kActiveSuspender);
3577 suspender = target_stack;
3578 __ LoadU64(target_stack,
3582 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3583 SwitchStacks(masm, active_stack,
false, {target_stack});
3584 regs.ResetExcept(target_stack);
3594 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3595 __ Zero(GCScanSlotPlace);
3598 LoadJumpBuffer(masm, target_stack,
false, scratch,
3602 __ LeaveFrame(StackFrame::STACK_SWITCH);
3605 __ CallRuntime(Runtime::kThrow);
3608 LoadJumpBuffer(masm, target_stack,
true, scratch,
3615 __ LeaveFrame(StackFrame::STACK_SWITCH);
3622void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3626void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3630void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3636void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3637 Register wasm_instance, Register wrapper_buffer,
3638 Register& original_fp, Register& new_wrapper_buffer,
3640 ResetStackSwitchFrameStackSlots(masm);
3643 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3648 FillJumpBuffer(masm, parent_stack, suspend, scratch);
3649 SwitchStacks(masm, parent_stack,
false, {wasm_instance, wrapper_buffer});
3654 regs.Pinned(r15, &original_fp);
3655 __ Move(original_fp, fp);
3656 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3657 LoadTargetJumpBuffer(masm, target_stack, scratch,
3665 __ EnterFrame(StackFrame::STACK_SWITCH);
3669 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
3671 __ SubS64(sp, sp, Operand(stack_space));
3672 __ EnforceStackAlignment();
3676 __ Move(new_wrapper_buffer, sp);
3680 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3681 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3686 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3690 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3695 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3700 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3703void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
3709 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3717 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3719 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset),
3724 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3727 ReloadParentStack(masm, promise, return_value,
kContextRegister, tmp, tmp2,
3729 RestoreParentSuspender(masm, tmp);
3732 __ mov(tmp, Operand(1));
3734 tmp,
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3736 __ CallBuiltin(Builtin::kFulfillPromise);
3741 __ bind(return_promise);
3744void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3745 RegisterAllocator& regs,
3746 Label* return_promise) {
3748 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3755 thread_in_wasm_flag_addr = r5;
3759 thread_in_wasm_flag_addr,
3761 __ mov(r0, Operand(0));
3767 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3769 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset),
3776 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3778 ReloadParentStack(masm, promise, reason,
kContextRegister, tmp, tmp2, tmp3);
3779 RestoreParentSuspender(masm, tmp);
3781 __ mov(tmp, Operand(1));
3783 tmp,
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3785 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3786 __ CallBuiltin(Builtin::kRejectPromise);
3790 __ b(return_promise);
3792 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3795void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
3797 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3799 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3800 : StackFrame::JS_TO_WASM);
3802 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3807 __ LoadU64(implicit_arg,
3808 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3817 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
3818 original_fp, new_wrapper_buffer, &suspend);
3821 new_wrapper_buffer = wrapper_buffer;
3824 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
3825 new_wrapper_buffer);
3830 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3834 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3839 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3842 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3849 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
3850 kWrapperBufferStackReturnBufferSize));
3852 __ SubS64(sp, sp, r0);
3859 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart));
3871 int stack_params_offset =
3880 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3887 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3890 __ AddS64(last_stack_param, params_start, Operand(stack_params_offset));
3892 __ bind(&loop_start);
3894 Label finish_stack_params;
3895 __ CmpS64(last_stack_param, params_end);
3896 __ bge(&finish_stack_params);
3904 __ jmp(&loop_start);
3906 __ bind(&finish_stack_params);
3909 size_t next_offset = 0;
3925 DCHECK_EQ(next_offset, stack_params_offset);
3930 __ LoadU64(thread_in_wasm_flag_addr,
3934 __ mov(scratch, Operand(1));
3938 __ Zero(
MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3941 __ LoadWasmCodePointer(
3944 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3945 __ CallWasmCodePointer(call_target);
3954 __ LoadU64(thread_in_wasm_flag_addr,
3957 __ mov(r0, Operand(0));
3963 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3969 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
3974 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
3979 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
3984 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
3991 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3993 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3997 MemOperand(fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3999 r3,
MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4002 GetContextFromImplicitArg(masm, r3, scratch);
4004 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
4006 Label return_promise;
4008 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4012 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4013 : StackFrame::JS_TO_WASM);
4024 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4029void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4033void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4037void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4043static constexpr Register kOldSPRegister = r16;
4045void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
4046 Register target_input,
4047 Register argv_input) {
4048 using ER = ExternalReference;
4052 __ mov(kOldSPRegister, Operand(0));
4057 ER on_central_stack_flag_loc = ER::Create(
4058 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4059 __ Move(ip, on_central_stack_flag_loc);
4062 Label do_not_need_to_switch;
4063 __ CmpU32(ip, Operand(0), r0);
4064 __ bne(&do_not_need_to_switch);
4068 __ Move(kOldSPRegister, sp);
4071 DCHECK(!
AreAliased(central_stack_sp, argc_input, argv_input, target_input));
4073 __ Push(argc_input);
4074 __ Push(target_input);
4075 __ Push(argv_input);
4076 __ PrepareCallCFunction(2, r0);
4079 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4083 __ Pop(target_input);
4089 __ SubS64(sp, central_stack_sp, Operand(kReturnAddressSlotOffset +
kPadding));
4090 __ EnforceStackAlignment();
4095 __ AddS64(central_stack_sp, sp,
4099 __ bind(&do_not_need_to_switch);
4102void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4103 using ER = ExternalReference;
4105 Label no_stack_change;
4107 __ CmpU64(kOldSPRegister, Operand(0), r0);
4108 __ beq(&no_stack_change);
4109 __ Move(sp, kOldSPRegister);
4113 __ PrepareCallCFunction(1, r0);
4115 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4120 __ bind(&no_stack_change);
4128 ArgvMode argv_mode,
bool builtin_exit_frame,
4129 bool switch_to_central_stack) {
4140 using ER = ExternalReference;
4143 static constexpr Register argc_input = r3;
4144 static constexpr Register target_fun = r15;
4145 static constexpr Register argv = r4;
4146 static constexpr Register scratch = ip;
4147 static constexpr Register argc_sav = r14;
4149 __ mr(target_fun, argv);
4157 __ add(argv, argv, sp);
4164 int arg_stack_space = 0;
4167 bool needs_return_buffer =
4169 if (needs_return_buffer) {
4170 arg_stack_space += result_size;
4174 scratch, arg_stack_space,
4175 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
4178 __ mr(argc_sav, argc_input);
4188 if (needs_return_buffer) {
4199#if V8_ENABLE_WEBASSEMBLY
4200 if (switch_to_central_stack) {
4201 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_fun, argv);
4206 __ Move(isolate_reg, ER::isolate_address());
4207 __ StoreReturnAddressAndCall(target_fun);
4210 if (needs_return_buffer) {
4216 Label exception_returned;
4217 __ CompareRoot(r3, RootIndex::kException);
4218 __ beq(&exception_returned);
4220#if V8_ENABLE_WEBASSEMBLY
4221 if (switch_to_central_stack) {
4222 SwitchFromTheCentralStackIfNeeded(masm);
4230 ER exception_address =
4231 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4233 __ ExternalReferenceAsOperand(exception_address,
no_reg));
4234 __ LoadRoot(r0, RootIndex::kTheHoleValue);
4235 __ CompareTagged(r0, scratch);
4247 __ LeaveExitFrame(scratch);
4251 __ AddS64(sp, sp, scratch);
4257 __ bind(&exception_returned);
4259 ER pending_handler_context_address = ER::Create(
4260 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4261 ER pending_handler_entrypoint_address = ER::Create(
4262 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4263 ER pending_handler_constant_pool_address = ER::Create(
4264 IsolateAddressId::kPendingHandlerConstantPoolAddress, masm->isolate());
4265 ER pending_handler_fp_address =
4266 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4267 ER pending_handler_sp_address =
4268 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4274 __ PrepareCallCFunction(3, 0, r3);
4278 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4283 __ Move(
cp, pending_handler_context_address);
4285 __ Move(sp, pending_handler_sp_address);
4287 __ Move(fp, pending_handler_fp_address);
4299 ER c_entry_fp_address =
4300 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4302 __ StoreU64(scratch,
4303 __ ExternalReferenceAsOperand(c_entry_fp_address,
no_reg));
4306 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
4309 __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
no_reg),
4318#if V8_ENABLE_WEBASSEMBLY
4319void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4320 using ER = ExternalReference;
4321 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4330 FrameScope scope(masm, StackFrame::INTERNAL);
4332 __ PrepareCallCFunction(5, r0);
4334 __ CallCFunction(ER::wasm_grow_stack(), 5);
4341 __ beq(&call_runtime);
4344 __ SubS64(fp, fp, sp);
4348 UseScratchRegisterScope temps(masm);
4349 Register scratch = temps.Acquire();
4356 __ bind(&call_runtime);
4361 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4365 WasmTrustedInstanceData::kNativeContextOffset),
4368 __ EnterFrame(StackFrame::INTERNAL);
4371 __ CallRuntime(Runtime::kWasmStackGuard);
4372 __ LeaveFrame(StackFrame::INTERNAL);
4378void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4379 Label out_of_range, only_low, negate, done, fastpath_done;
4382 HardAbortScope hard_abort(masm);
4391 __ Push(result_reg, scratch);
4396 __ lfd(double_scratch,
MemOperand(sp, argument_offset));
4399 __ ConvertDoubleToInt64(double_scratch,
4403 __ TestIfInt32(result_reg, r0);
4404 __ beq(&fastpath_done);
4406 __ Push(scratch_high, scratch_low);
4410 __ lwz(scratch_high,
4424 __ cmpi(scratch, Operand(83));
4425 __ bge(&out_of_range);
4432 __ subfic(scratch, scratch, Operand(51));
4437 __ srw(scratch_low, scratch_low, scratch);
4441 __ subfic(scratch, scratch, Operand(32));
4445 __ oris(result_reg, result_reg,
4447 __ ShiftLeftU32(r0, result_reg, scratch);
4448 __ orx(result_reg, scratch_low, r0);
4451 __ bind(&out_of_range);
4458 __ neg(scratch, scratch);
4459 __ ShiftLeftU32(result_reg, scratch_low, scratch);
4468 __ srawi(r0, scratch_high, 31);
4469 __ srdi(r0, r0, Operand(32));
4470 __ xor_(result_reg, result_reg, r0);
4471 __ srwi(r0, scratch_high, Operand(31));
4472 __ add(result_reg, result_reg, r0);
4475 __ Pop(scratch_high, scratch_low);
4479 __ bind(&fastpath_done);
4480 __ StoreU64(result_reg,
MemOperand(sp, argument_offset));
4481 __ Pop(result_reg, scratch);
4511 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4523 api_function_address =
4524 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4530 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4531 func_templ, scratch));
4533 using FCA = FunctionCallbackArguments;
4534 using ER = ExternalReference;
4535 using FC = ApiCallbackExitFrameConstants;
4537 static_assert(FCA::kArgsLength == 6);
4538 static_assert(FCA::kNewTargetIndex == 5);
4539 static_assert(FCA::kTargetIndex == 4);
4540 static_assert(FCA::kReturnValueIndex == 3);
4541 static_assert(FCA::kContextIndex == 2);
4542 static_assert(FCA::kIsolateIndex == 1);
4543 static_assert(FCA::kUnusedIndex == 0);
4557 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4558 topmost_script_having_context);
4568 __ Move(scratch, ER::isolate_address());
4575 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4576 __ StoreU64(scratch,
4580 __ StoreU64(func_templ,
4584 __ StoreU64(scratch,
4592 __ LoadExternalPointerField(
4593 api_function_address,
4595 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4598 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4599 StackFrame::API_CALLBACK_EXIT);
4607 __ StoreU64(argc, argc_operand);
4610 __ AddS64(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4611 __ StoreU64(scratch,
MemOperand(fp, FC::kFCIImplicitArgsOffset));
4614 __ AddS64(scratch, fp, Operand(FC::kFirstArgumentOffset));
4615 __ StoreU64(scratch,
MemOperand(fp, FC::kFCIValuesOffset));
4618 __ RecordComment(
"v8::FunctionCallback's argument");
4619 __ AddS64(function_callback_info_arg, fp,
4620 Operand(FC::kFunctionCallbackInfoOffset));
4624 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4628 static constexpr int kSlotsToDropOnReturn =
4631 const bool with_profiling =
4634 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4635 &argc_operand, return_value_operand);
4638void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4648 using PCA = PropertyCallbackArguments;
4649 using ER = ExternalReference;
4650 using FC = ApiAccessorExitFrameConstants;
4652 static_assert(PCA::kPropertyKeyIndex == 0);
4653 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4654 static_assert(PCA::kHolderIndex == 2);
4655 static_assert(PCA::kIsolateIndex == 3);
4656 static_assert(PCA::kHolderV2Index == 4);
4657 static_assert(PCA::kReturnValueIndex == 5);
4658 static_assert(PCA::kDataIndex == 6);
4659 static_assert(PCA::kThisIndex == 7);
4660 static_assert(PCA::kArgsLength == 8);
4676 Register api_function_address = r5;
4685 __ LoadTaggedField(scratch,
4688 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4690 __ Push(scratch, smi_zero);
4691 __ Move(scratch, ER::isolate_address());
4692 __ Push(scratch, holder);
4694 __ LoadTaggedField(name_arg,
4697 __ Push(smi_zero, name_arg);
4699 __ RecordComment(
"Load api_function_address");
4700 __ LoadExternalPointerField(
4701 api_function_address,
4706 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4707 StackFrame::API_ACCESSOR_EXIT);
4709 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4711 __ AddS64(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
4713 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
4716#ifdef V8_ENABLE_DIRECT_HANDLE
4721 static_assert(PCA::kPropertyKeyIndex == 0);
4722 __ mr(name_arg, property_callback_info_arg);
4725 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4731 static constexpr int kSlotsToDropOnReturn =
4732 FC::kPropertyCallbackInfoArgsLength;
4733 MemOperand*
const kUseStackSpaceConstant =
nullptr;
4735 const bool with_profiling =
true;
4737 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4738 kUseStackSpaceConstant, return_value_operand);
4741void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4742 UseScratchRegisterScope temps(masm);
4768void Generate_DeoptimizationEntry(MacroAssembler* masm,
4770 Isolate* isolate = masm->isolate();
4777 RegList saved_regs = restored_regs |
sp;
4782 __ subi(sp, sp, Operand(kDoubleRegsSize));
4784 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4785 int code = config->GetAllocatableDoubleCode(
i);
4795 if ((saved_regs.bits() & (1 <<
i)) != 0) {
4800 UseScratchRegisterScope temps(masm);
4801 Register scratch = temps.Acquire();
4803 IsolateAddressId::kCEntryFPAddress, isolate));
4806 const int kSavedRegistersAreaSize =
4813 __ addi(r6, sp, Operand(kSavedRegistersAreaSize));
4818 __ PrepareCallCFunction(5, r8);
4820 Label context_check;
4823 __ JumpIfSmi(r4, &context_check);
4825 __ bind(&context_check);
4826 __ li(r4, Operand(
static_cast<int>(deopt_kind)));
4832 AllowExternalCallThatCantCauseGC scope(masm);
4833 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4852 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4853 int code = config->GetAllocatableDoubleCode(
i);
4854 int dst_offset = code *
kSimd128Size + simd128_regs_offset;
4864 UseScratchRegisterScope temps(masm);
4865 Register is_iterable = temps.Acquire();
4867 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4868 __ li(zero, Operand(0));
4873 __ addi(sp, sp, Operand(kSavedRegistersAreaSize));
4885 Label pop_loop_header;
4886 __ b(&pop_loop_header);
4891 __ bind(&pop_loop_header);
4898 __ PrepareCallCFunction(1, r4);
4901 AllowExternalCallThatCantCauseGC scope(masm);
4902 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
4909 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4917 __ b(&outer_loop_header);
4919 __ bind(&outer_push_loop);
4923 __ b(&inner_loop_header);
4925 __ bind(&inner_push_loop);
4926 __ addi(r6, r6, Operand(-
sizeof(intptr_t)));
4931 __ bind(&inner_loop_header);
4933 __ bne(&inner_push_loop);
4936 __ bind(&outer_loop_header);
4938 __ blt(&outer_push_loop);
4941 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
4942 int code = config->GetAllocatableDoubleCode(
i);
4944 int src_offset = code *
kSimd128Size + simd128_regs_offset;
4956 UseScratchRegisterScope temps(masm);
4957 Register scratch = temps.Acquire();
4958 DCHECK(!(restored_regs.has(scratch)));
4963 if ((restored_regs.bits() & (1 <<
i)) != 0) {
4970 UseScratchRegisterScope temps(masm);
4971 Register is_iterable = temps.Acquire();
4974 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
4975 __ li(
one, Operand(1));
4981 UseScratchRegisterScope temps(masm);
4982 Register scratch = temps.Acquire();
4999void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5003void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5007void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5015 __ LeaveFrame(StackFrame::INTERPRETED);
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static constexpr int kNextFastCallFramePCOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const uint32_t kMantissaMask
static const uint32_t kExponentMask
static const int kMantissaBitsInTopWord
static const int kExponentBias
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
static V8_INLINE Operand Zero()
static constexpr int8_t kNumRegisters
static constexpr DwVfpRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr int kMantissaOffset
static constexpr int kExponentOffset
static constexpr Register no_reg()
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedSlotCount
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr RegList kPushedGpRegs
static constexpr Simd128RegList kPushedSimd128Regs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr Register ObjectRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
#define V8_ENABLE_SANDBOX_BOOL
#define ABI_RETURNS_OBJECT_PAIRS_IN_REGS
#define ABI_USES_FUNCTION_DESCRIPTORS
RegListBase< RegisterT > registers
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
constexpr int kStackFpOffset
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
constexpr int kStackPcOffset
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kFunctionEntryBytecodeOffset
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
const int kStackFrameLRSlot
constexpr int kSimd128Size
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
const RegList kCalleeSaved
@ kUnknownIndirectPointerTag
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
const DoubleRegList kCalleeSavedDoubles
const int kStackFrameExtraParamSlot
constexpr int kNumberOfRegisters
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr Register kConstantPoolRegister
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
RegListBase< Simd128Register > Simd128RegList
constexpr Register kInterpreterDispatchTableRegister
@ kFunctionTemplateInfoCallbackTag
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
const RegList kJSCallerSaved
constexpr int JSParameterCount(int param_count_without_receiver)
Register ToRegister(int num)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
@ kDefaultDerivedConstructor
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define OFFSET_OF_DATA_START(Type)