30#if V8_ENABLE_WEBASSEMBLY
40#define __ ACCESS_MASM(masm)
43 int formal_parameter_count,
Address address) {
54enum class ArgumentsElementType {
59void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
60 Register scratch1, Register scratch2,
61 ArgumentsElementType element_type) {
69 if (element_type == ArgumentsElementType::kHandle) {
71 __ mov(scratch2, value);
72 value = Operand(scratch2, 0);
80void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
90 __ StackOverflowCheck(eax, ecx, &stack_overflow);
94 FrameScope scope(masm, StackFrame::CONSTRUCT);
111 Generate_PushArguments(masm, esi, eax, ecx,
no_reg,
112 ArgumentsElementType::kRaw);
114 __ PushRoot(RootIndex::kTheHoleValue);
132 __ DropArguments(edx, ecx);
135 __ bind(&stack_overflow);
137 FrameScope scope(masm, StackFrame::INTERNAL);
138 __ CallRuntime(Runtime::kThrowStackOverflow);
146void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
157 __ EnterFrame(StackFrame::CONSTRUCT);
159 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
165 __ PushRoot(RootIndex::kTheHoleValue);
176 __ mov(eax,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
178 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(eax);
185 __ CallBuiltin(Builtin::kFastNewObject);
189 __ bind(¬_create_implicit_receiver);
190 __ LoadRoot(eax, RootIndex::kTheHoleValue);
201 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
203 __ bind(&post_instantiation_deopt_entry);
227 Label stack_overflow;
228 __ StackOverflowCheck(eax, ecx, &stack_overflow);
238 Generate_PushArguments(masm, edi, eax, ecx,
no_reg,
239 ArgumentsElementType::kRaw);
253 Label check_result, use_receiver, do_throw, leave_and_return;
255 __ JumpIfNotRoot(eax, RootIndex::kUndefinedValue, &check_result,
260 __ bind(&use_receiver);
262 __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
264 __ bind(&leave_and_return);
267 __ LeaveFrame(StackFrame::CONSTRUCT);
270 __ DropArguments(edx, ecx);
275 __ bind(&check_result);
282 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
283 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
290 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
294 __ bind(&stack_overflow);
297 __ CallRuntime(Runtime::kThrowStackOverflow);
302void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
303 Generate_JSBuiltinsConstructStubHelper(masm);
306void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
307 FrameScope scope(masm, StackFrame::INTERNAL);
309 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
325 Label
invoke, handler_entry, exit;
326 Label not_outermost_js, not_outermost_js_2;
329 NoRootArrayScope uninitialized_root_register(masm);
353 IsolateAddressId::kCEntryFPAddress, masm->isolate());
354 __ push(
__ ExternalReferenceAsOperand(c_entry_fp, edi));
356 __ push(
__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP));
358 __ push(
__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC));
364 __ mov(
__ ExternalReferenceAsOperand(c_entry_fp, edi), Immediate(0));
365 __ mov(
__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP),
367 __ mov(
__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC),
372 IsolateAddressId::kContextAddress, masm->isolate());
373 __ mov(edi,
__ ExternalReferenceAsOperand(context_address, edi));
375 __ mov(Operand(ebp, kOffsetToContextSlot), edi);
379 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
380 __ cmp(
__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
382 __ mov(
__ ExternalReferenceAsOperand(js_entry_sp, edi), ebp);
385 __ bind(¬_outermost_js);
391 __ bind(&handler_entry);
395 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
400 IsolateAddressId::kExceptionAddress, masm->isolate());
401 __ mov(
__ ExternalReferenceAsOperand(exception, edi), eax);
403 __ Move(eax, masm->isolate()->factory()->exception());
408 __ PushStackHandler(edi);
412 __ CallBuiltin(entry_trampoline);
415 __ PopStackHandler(edi);
423 __ mov(
__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
424 __ bind(¬_outermost_js_2);
427 __ pop(
__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC));
428 __ pop(
__ ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP));
429 __ pop(
__ ExternalReferenceAsOperand(c_entry_fp, edi));
444void Builtins::Generate_JSEntry(MacroAssembler* masm) {
445 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
448void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
449 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
450 Builtin::kJSConstructEntryTrampoline);
453void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
454 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
455 Builtin::kRunMicrotasksTrampoline);
461 FrameScope scope(masm, StackFrame::INTERNAL);
468 IsolateAddressId::kContextAddress, masm->isolate());
469 __ mov(esi,
__ ExternalReferenceAsOperand(context_address, scratch1));
472 __ mov(scratch1, Operand(ebp, 0));
483 Label enough_stack_space, stack_overflow;
484 __ StackOverflowCheck(eax, ecx, &stack_overflow);
485 __ jmp(&enough_stack_space);
487 __ bind(&stack_overflow);
488 __ CallRuntime(Runtime::kThrowStackOverflow);
492 __ bind(&enough_stack_space);
497 Generate_PushArguments(masm, scratch1, eax, ecx, scratch2,
498 ArgumentsElementType::kHandle);
501 __ mov(scratch2, Operand(ebp, 0));
512 __ CallBuiltin(builtin);
521void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
525void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
529void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
535 __ TailCallBuiltin(Builtin::kRunMicrotasks);
538static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
543 __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
546 FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
556 __ DecodeField<Code::KindField>(scratch);
557 __ cmp(scratch, Immediate(
static_cast<int>(CodeKind::BASELINE)));
562 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
563 Label* is_baseline, Label* is_unavailable) {
569 FieldOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset));
571 __ LoadMap(scratch1, data);
574 __ CmpInstanceType(scratch1, CODE_TYPE);
580 __ bind(¬_baseline);
586 __ CmpInstanceType(scratch1, BYTECODE_ARRAY_TYPE);
589 __ CmpInstanceType(scratch1, INTERPRETER_DATA_TYPE);
591 __ mov(data,
FieldOperand(data, InterpreterData::kBytecodeArrayOffset));
597void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
604 __ mov(
FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
607 __ RecordWriteField(
object, JSGeneratorObject::kInputOrDebugPosOffset, eax,
611 __ AssertGeneratorObject(edx);
614 __ mov(edi,
FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
618 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
619 Label stepping_prepared;
620 ExternalReference debug_hook =
621 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
622 __ cmpb(
__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
626 ExternalReference debug_suspended_generator =
627 ExternalReference::debug_suspended_generator_address(masm->isolate());
628 __ cmp(edx,
__ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
629 __ j(
equal, &prepare_step_in_suspended_generator);
630 __ bind(&stepping_prepared);
634 Label stack_overflow;
639 __ PopReturnAddressTo(eax);
653 __ mov(ecx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
655 ecx, SharedFunctionInfo::kFormalParameterCountOffset));
658 FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
660 Label done_loop, loop;
674 __ mov(edi,
FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
680 Label is_baseline, is_unavailable, ok;
681 __ mov(ecx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
688 __ bind(&is_unavailable);
689 __ Abort(AbortReason::kMissingBytecodeArray);
691 __ bind(&is_baseline);
693 __ CmpObjectType(ecx, CODE_TYPE, ecx);
701 __ PushReturnAddressFrom(eax);
702 __ mov(eax,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
704 eax, SharedFunctionInfo::kFormalParameterCountOffset));
708 __ JumpJSFunction(edi);
711 __ bind(&prepare_step_in_if_stepping);
713 FrameScope scope(masm, StackFrame::INTERNAL);
717 __ PushRoot(RootIndex::kTheHoleValue);
718 __ CallRuntime(Runtime::kDebugOnFunctionCall);
720 __ mov(edi,
FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
722 __ jmp(&stepping_prepared);
724 __ bind(&prepare_step_in_suspended_generator);
726 FrameScope scope(masm, StackFrame::INTERNAL);
728 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
730 __ mov(edi,
FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
732 __ jmp(&stepping_prepared);
734 __ bind(&stack_overflow);
736 FrameScope scope(masm, StackFrame::INTERNAL);
737 __ CallRuntime(Runtime::kThrowStackOverflow);
749 __ movzx_w(params_size,
750 FieldOperand(params_size, BytecodeArray::kParameterSizeOffset));
752 Register actual_params_size = scratch2;
758 __ cmp(params_size, actual_params_size);
759 __ cmov(
kLessThan, params_size, actual_params_size);
765 __ DropArguments(params_size, scratch2);
774 Register bytecode_array,
775 Register bytecode_offset,
776 Register scratch1, Register scratch2,
777 Register scratch3, Label* if_return) {
779 Register bytecode_size_table = scratch1;
786 Register original_bytecode_offset = scratch3;
788 bytecode, original_bytecode_offset));
789 __ Move(bytecode_size_table,
790 Immediate(ExternalReference::bytecode_size_table_address()));
793 __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset,
times_1, 0));
794 __ Move(original_bytecode_offset, bytecode_offset);
797 Label process_bytecode, extra_wide;
798 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
799 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
800 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
802 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
803 __ cmp(bytecode, Immediate(0x3));
808 __ inc(bytecode_offset);
809 __ test(bytecode, Immediate(0x1));
810 __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset,
times_1, 0));
814 __ add(bytecode_size_table,
818 __ bind(&extra_wide);
820 __ add(bytecode_size_table,
823 __ bind(&process_bytecode);
826#define JUMP_IF_EQUAL(NAME) \
828 Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
829 __ j(equal, if_return);
835 Label
end, not_jump_loop;
837 Immediate(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
842 __ Move(bytecode_offset, original_bytecode_offset);
845 __ bind(¬_jump_loop);
847 __ movzx_b(bytecode_size_table,
848 Operand(bytecode_size_table, bytecode,
times_1, 0));
849 __ add(bytecode_offset, bytecode_size_table);
856void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
857 __ mov_w(
FieldOperand(sfi, SharedFunctionInfo::kAgeOffset), Immediate(0));
860void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
862 const Register shared_function_info(scratch);
863 __ Move(shared_function_info,
864 FieldOperand(js_function, JSFunction::kSharedFunctionInfoOffset));
865 ResetSharedFunctionInfoAge(masm, shared_function_info);
868void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
869 Register feedback_vector, Register scratch) {
871 FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
872 __ and_(scratch, Immediate(~FeedbackVector::OsrUrgencyBits::kMask));
873 __ mov_b(
FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset),
894 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
897 __ mov(ecx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
901 Label is_baseline, compile_lazy;
905 Label push_stack_frame;
909 __ LoadFeedbackVector(feedback_vector, closure, scratch, &push_stack_frame,
913#ifndef V8_ENABLE_LEAPTIERING
917 Label flags_need_processing;
919 XMMRegister saved_feedback_vector = xmm1;
920 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
921 flags, saved_feedback_vector, CodeKind::INTERPRETED_FUNCTION,
922 &flags_need_processing);
925 __ movd(feedback_vector, saved_feedback_vector);
928 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, scratch);
931 __ inc(
FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
943 __ bind(&push_stack_frame);
954 __ mov(eax,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
955 ResetSharedFunctionInfoAge(masm, eax);
957 FieldOperand(eax, SharedFunctionInfo::kTrustedFunctionDataOffset));
967 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
977 Label stack_overflow;
982 BytecodeArray::kFrameSizeOffset));
986 __ sub(eax, frame_size);
995 __ bind(&loop_header);
999 __ bind(&loop_check);
1006 Label no_incoming_new_target_or_generator_register;
1009 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1011 __ j(zero, &no_incoming_new_target_or_generator_register);
1013 __ bind(&no_incoming_new_target_or_generator_register);
1017 Label stack_check_interrupt, after_stack_check_interrupt;
1019 __ j(
below, &stack_check_interrupt);
1020 __ bind(&after_stack_check_interrupt);
1030 __ bind(&do_dispatch);
1032 Immediate(ExternalReference::interpreter_dispatch_table_address(
1041 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1043 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1050 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1072 __ jmp(&do_dispatch);
1074 __ bind(&do_return);
1080 __ bind(&stack_check_interrupt);
1086 __ CallRuntime(Runtime::kStackGuard);
1103 __ jmp(&after_stack_check_interrupt);
1106#ifndef V8_ENABLE_LEAPTIERING
1107 __ bind(&flags_need_processing);
1111 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, xmm1);
1115 __ bind(&compile_lazy);
1118 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1120 __ bind(&is_baseline);
1122#ifndef V8_ENABLE_LEAPTIERING
1125 __ mov(feedback_vector,
1126 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1127 __ mov(feedback_vector,
1128 FieldOperand(feedback_vector, FeedbackCell::kValueOffset));
1130 Label install_baseline_code;
1133 __ LoadMap(eax, feedback_vector);
1134 __ CmpInstanceType(eax, FEEDBACK_VECTOR_TYPE);
1138 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1139 flags, xmm1, CodeKind::BASELINE, &flags_need_processing);
1147 __ ReplaceClosureCodeWithOptimizedCode(ecx, closure, eax, ecx);
1151 __ JumpCodeObject(ecx);
1153 __ bind(&install_baseline_code);
1157 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1161 __ bind(&stack_overflow);
1162 __ CallRuntime(Runtime::kThrowStackOverflow);
1167 Register array_limit,
1168 Register start_address) {
1175 Label loop_header, loop_check;
1176 __ jmp(&loop_check);
1177 __ bind(&loop_header);
1178 __ Push(Operand(array_limit, 0));
1179 __ bind(&loop_check);
1181 __ cmp(array_limit, start_address);
1201 Label stack_overflow;
1208 __ StackOverflowCheck(eax, scratch, &stack_overflow,
true);
1212 __ mov(scratch, eax);
1218 __ PopReturnAddressTo(eax);
1223 __ add(scratch, argv);
1226 __ movd(xmm1, scratch);
1230 __ mov(ecx, Operand(ecx, 0));
1237 __ PushRoot(RootIndex::kUndefinedValue);
1240 __ PushReturnAddressFrom(eax);
1245 __ TailCallBuiltin(Builtin::kCallWithSpread);
1250 __ bind(&stack_overflow);
1252 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1263void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1264 MacroAssembler* masm, Register num_args, Register start_addr,
1265 Register scratch1, Register scratch2,
int num_slots_to_move,
1266 Label* stack_overflow) {
1283 __ StackOverflowCheck(num_args, scratch1, stack_overflow,
true);
1288 __ AllocateStackSpace(scratch1);
1294 for (
int i = 0;
i < num_slots_to_move + 1;
i++) {
1305 __ mov(scratch1, Immediate(0));
1307 Label loop_header, loop_check;
1308 __ jmp(&loop_check);
1309 __ bind(&loop_header);
1310 __ mov(scratch2, Operand(start_addr, 0));
1315 __ bind(&loop_check);
1317 __ cmp(scratch1, eax);
1336 Label stack_overflow;
1346 Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1347 masm, eax, ecx, edx, edi,
1359 __ PopReturnAddressTo(eax);
1363 __ PushReturnAddressFrom(eax);
1369 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1372 __ PopReturnAddressTo(eax);
1377 __ mov(ecx, Operand(ecx, 0));
1378 __ PushReturnAddressFrom(eax);
1380 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1383 __ PopReturnAddressTo(ecx);
1387 __ PushReturnAddressFrom(ecx);
1389 __ TailCallBuiltin(Builtin::kConstruct);
1392 __ bind(&stack_overflow);
1393 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1398void LoadFramePointer(MacroAssembler* masm, Register to,
1400 switch (which_frame) {
1413 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1419 Label stack_overflow;
1422 LoadFramePointer(masm, ecx, which_frame);
1445 __ StackOverflowCheck(eax, ecx, &stack_overflow,
true);
1447 __ AllocateStackSpace(ecx);
1462 spilledConstructorAndNewTargetOffset));
1463 __ mov(Operand(esp, spilledConstructorAndNewTargetOffset), edx);
1476 LoadFramePointer(masm, ecx, which_frame);
1487 __ mov(counter, eax);
1508 __ TailCallBuiltin(Builtin::kConstruct);
1510 __ bind(&stack_overflow);
1512 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1519void NewImplicitReceiver(MacroAssembler* masm) {
1540 __ CallBuiltin(Builtin::kFastNewObject);
1542 __ mov(implicit_receiver, eax);
1550 __ mov(Operand(esp, 0 ), implicit_receiver);
1562void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1563 MacroAssembler* masm) {
1578 __ AssertFunction(edi, edx);
1581 Label non_constructor;
1583 __ LoadMap(edx, edi);
1585 Immediate(Map::Bits1::IsConstructorBit::kMask));
1586 __ j(zero, &non_constructor);
1589 Label stack_overflow;
1590 __ StackOverflowCheck(eax, edx, &stack_overflow,
true);
1599 __ PopReturnAddressTo(eax);
1601 __ PushReturnAddressFrom(eax);
1605 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1608 __ PushRoot(RootIndex::kTheHoleValue);
1618 __ PushRoot(RootIndex::kTheHoleValue);
1625 __ mov(ecx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1627 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1631 Label not_create_implicit_receiver;
1632 __ mov(ecx,
FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset));
1633 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(ecx);
1638 NewImplicitReceiver(masm);
1639 __ bind(¬_create_implicit_receiver);
1655 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1662 Label check_result, use_receiver, do_throw, leave_and_return;
1664 __ JumpIfNotRoot(eax, RootIndex::kUndefinedValue, &check_result,
1669 __ bind(&use_receiver);
1671 __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
1673 __ bind(&leave_and_return);
1674 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1679 __ bind(&check_result);
1686 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
1687 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1694 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1698 __ bind(&builtin_call);
1700 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1705 __ bind(&non_constructor);
1706 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1709 __ bind(&stack_overflow);
1710 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1718 Label builtin_trampoline, trampoline_loaded;
1720 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1723 static constexpr Register scratch = ecx;
1730 __ mov(scratch,
FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
1732 FieldOperand(scratch, SharedFunctionInfo::kTrustedFunctionDataOffset));
1734 __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1738 FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1739 __ LoadCodeInstructionStart(scratch, scratch);
1742 __ bind(&builtin_trampoline);
1744 __ ExternalReferenceAsOperand(
1746 address_of_interpreter_entry_trampoline_instruction_start(
1750 __ bind(&trampoline_loaded);
1752 __ add(scratch, Immediate(interpreter_entry_return_pc_offset.value()));
1757 Immediate(ExternalReference::interpreter_dispatch_table_address(
1771 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1797void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1805 Label enter_bytecode, function_entry_bytecode;
1809 __ j(
equal, &function_entry_bytecode);
1819 __ bind(&enter_bytecode);
1827 __ bind(&function_entry_bytecode);
1834 __ jmp(&enter_bytecode);
1837 __ bind(&if_return);
1839 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1842void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1847void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1850 Register arg_count = descriptor.GetRegisterParameter(
1851 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1852 Register frame_size = descriptor.GetRegisterParameter(
1853 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1856 XMMRegister saved_arg_count = xmm0;
1857 XMMRegister saved_bytecode_array = xmm1;
1858 XMMRegister saved_frame_size = xmm2;
1859 XMMRegister saved_feedback_cell = xmm3;
1860 XMMRegister saved_feedback_vector = xmm4;
1861 __ movd(saved_arg_count, arg_count);
1862 __ movd(saved_frame_size, frame_size);
1868 Register closure = descriptor.GetRegisterParameter(
1869 BaselineOutOfLinePrologueDescriptor::kClosure);
1871 __ mov(feedback_cell,
FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1872 __ movd(saved_feedback_cell, feedback_cell);
1874 __ mov(feedback_vector,
1875 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
1876 __ AssertFeedbackVector(feedback_vector, scratch);
1879#ifdef V8_ENABLE_LEAPTIERING
1880 __ movd(saved_feedback_vector, feedback_vector);
1884 Label flags_need_processing;
1886 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1887 flags, saved_feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1890 __ movd(feedback_vector, saved_feedback_vector);
1895 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, eax);
1896 __ movd(arg_count, saved_arg_count);
1900 __ inc(
FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1902 XMMRegister return_address = xmm5;
1905 __ PopReturnAddressTo(return_address, scratch);
1907 __ Pop(saved_bytecode_array, scratch);
1911 __ EnterFrame(StackFrame::BASELINE);
1913 __ Push(descriptor.GetRegisterParameter(
1914 BaselineOutOfLinePrologueDescriptor::kCalleeContext));
1916 Register callee_js_function = descriptor.GetRegisterParameter(
1917 BaselineOutOfLinePrologueDescriptor::kClosure);
1920 ResetJSFunctionAge(masm, callee_js_function, scratch);
1921 __ Push(callee_js_function);
1922 __ Push(saved_arg_count, scratch);
1926 __ Push(saved_bytecode_array, scratch);
1927 __ Push(saved_feedback_cell, scratch);
1928 __ Push(saved_feedback_vector, scratch);
1931 Label call_stack_guard;
1942 __ movd(frame_size, saved_frame_size);
1943 __ Move(scratch, esp);
1945 __ sub(scratch, frame_size);
1947 __ j(
below, &call_stack_guard);
1951 __ PushReturnAddressFrom(return_address, scratch);
1956#ifndef V8_ENABLE_LEAPTIERING
1957 __ bind(&flags_need_processing);
1965 __ movd(arg_count, saved_arg_count);
1966 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, saved_feedback_vector);
1971 __ bind(&call_stack_guard);
1977 __ PushReturnAddressFrom(return_address, scratch);
1978 FrameScope manual_frame_scope(masm, StackFrame::INTERNAL);
1981 __ SmiTag(frame_size);
1982 __ Push(frame_size);
1983 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
1994void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
2014 __ LeaveFrame(StackFrame::BASELINE);
2017 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
2021void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2022 bool javascript_builtin,
2025 int allocatable_register_count = config->num_allocatable_general_registers();
2027 if (javascript_builtin) {
2034 Operand(esp, config->num_allocatable_general_registers() *
2045 __ mov(eax, Operand(esp, offset_to_builtin_index));
2046 __ LoadEntryFromBuiltinIndex(eax, eax);
2047 __ mov(Operand(esp, offset_to_builtin_index), eax);
2049 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
2050 int code = config->GetAllocatableGeneralCode(
i);
2056 if (with_result && javascript_builtin) {
2068 const int offsetToPC =
2071 __ pop(Operand(esp, offsetToPC));
2077void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2078 Generate_ContinueToBuiltinHelper(masm,
false,
false);
2081void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2082 MacroAssembler* masm) {
2083 Generate_ContinueToBuiltinHelper(masm,
false,
true);
2086void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2087 Generate_ContinueToBuiltinHelper(masm,
true,
false);
2090void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2091 MacroAssembler* masm) {
2092 Generate_ContinueToBuiltinHelper(masm,
true,
true);
2095void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2097 FrameScope scope(masm, StackFrame::INTERNAL);
2098 __ CallRuntime(Runtime::kNotifyDeoptimized);
2108void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2121 Label no_arg_array, no_this_arg;
2122 StackArgumentsAccessor
args(eax);
2124 __ movd(xmm0,
args.GetReceiverOperand());
2126 __ LoadRoot(edx, RootIndex::kUndefinedValue);
2135 __ bind(&no_arg_array);
2137 __ bind(&no_this_arg);
2138 __ DropArgumentsAndPushNewReceiver(eax, edi, ecx);
2157 __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments,
Label::kNear);
2158 __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments,
Label::kNear);
2161 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2165 __ bind(&no_arguments);
2173void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2185 StackArgumentsAccessor
args(eax);
2186 __ mov(edi,
args.GetReceiverOperand());
2190 __ PopReturnAddressTo(edx);
2198 __ PushRoot(RootIndex::kUndefinedValue);
2205 __ PushReturnAddressFrom(edx);
2212void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2227 StackArgumentsAccessor
args(eax);
2228 __ LoadRoot(edi, RootIndex::kUndefinedValue);
2244 __ DropArgumentsAndPushNewReceiver(eax, ecx, edx);
2262 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2265void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2281 StackArgumentsAccessor
args(eax);
2282 __ LoadRoot(edi, RootIndex::kUndefinedValue);
2299 __ DropArgumentsAndPushNewReceiver(
2300 eax, masm->RootAsOperand(RootIndex::kUndefinedValue), ecx);
2323 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2332void Generate_AllocateSpaceAndShiftExistingArguments(
2333 MacroAssembler* masm, Register count, Register argc_in_out,
2334 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2339 Register old_esp = pointer_to_new_space_out;
2341 __ mov(old_esp, esp);
2344 __ AllocateStackSpace(new_space);
2357 __ cmp(current, argc_in_out);
2363 pointer_to_new_space_out,
2366 __ add(argc_in_out, count);
2389 const Register kArgumentsList = esi;
2392 __ PopReturnAddressTo(edx);
2393 __ pop(kArgumentsList);
2394 __ PushReturnAddressFrom(edx);
2400 __ AssertNotSmi(kArgumentsList);
2402 __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
2404 __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
2406 __ cmp(kArgumentsLength, 0);
2410 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2417 Label stack_overflow;
2418 __ StackOverflowCheck(kArgumentsLength, edx, &stack_overflow);
2420 __ movd(xmm4, kArgumentsList);
2426 Generate_AllocateSpaceAndShiftExistingArguments(masm, kArgumentsLength, eax,
2428 __ movd(kArgumentsList, xmm4);
2433 __ Move(eax, Immediate(0));
2434 Label done,
push, loop;
2436 __ cmp(eax, kArgumentsLength);
2441 __ CompareRoot(edi, RootIndex::kTheHoleValue);
2443 __ LoadRoot(edi, RootIndex::kUndefinedValue);
2445 __ mov(Operand(edx, 0), edi);
2459 __ TailCallBuiltin(target_builtin);
2461 __ bind(&stack_overflow);
2463 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2468 CallOrConstructMode mode,
2484 Label new_target_constructor, new_target_not_constructor;
2488 Immediate(Map::Bits1::IsConstructorBit::kMask));
2490 __ bind(&new_target_not_constructor);
2493 __ EnterFrame(StackFrame::INTERNAL);
2496 __ CallRuntime(Runtime::kThrowNotConstructor);
2498 __ bind(&new_target_constructor);
2503 Label stack_done, stack_overflow;
2521 __ StackOverflowCheck(edx, edi, &stack_overflow);
2531 Generate_AllocateSpaceAndShiftExistingArguments(masm, edx, eax, esi, ebx,
2544 Register src = ecx, dest = esi, num = edx;
2556 __ bind(&stack_done);
2562 __ TailCallBuiltin(target_builtin);
2564 __ bind(&stack_overflow);
2567 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2577 StackArgumentsAccessor
args(eax);
2578 __ AssertCallableFunction(edi, edx);
2580 __ mov(edx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2589 Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2590 SharedFunctionInfo::IsStrictBit::kMask));
2602 __ LoadGlobalProxy(ecx);
2604 Label convert_to_object, convert_receiver;
2605 __ mov(ecx,
args.GetReceiverOperand());
2607 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
2608 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
2611 __ mov(ecx,
args.GetReceiverOperand());
2613 Label convert_global_proxy;
2614 __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
2616 __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
2618 __ bind(&convert_global_proxy);
2621 __ LoadGlobalProxy(ecx);
2623 __ jmp(&convert_receiver);
2625 __ bind(&convert_to_object);
2630 FrameScope scope(masm, StackFrame::INTERNAL);
2636 __ CallBuiltin(Builtin::kToObject);
2643 __ mov(edx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2644 __ bind(&convert_receiver);
2646 __ mov(
args.GetReceiverOperand(), ecx);
2648 __ bind(&done_convert);
2658 ecx,
FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2664void Generate_PushBoundArguments(MacroAssembler* masm) {
2673 Label no_bound_arguments;
2674 __ mov(ecx,
FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2678 __ j(zero, &no_bound_arguments);
2690 Label done, stack_overflow;
2691 __ StackOverflowCheck(edx, ecx, &stack_overflow);
2693 __ bind(&stack_overflow);
2696 __ EnterFrame(StackFrame::INTERNAL);
2697 __ CallRuntime(Runtime::kThrowStackOverflow);
2715 __ mov(ecx,
FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2740 __ bind(&no_bound_arguments);
2752 __ AssertBoundFunction(edi);
2755 StackArgumentsAccessor
args(eax);
2756 __ mov(ecx,
FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2757 __ mov(
args.GetReceiverOperand(), ecx);
2760 Generate_PushBoundArguments(masm);
2763 __ mov(edi,
FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2779 StackArgumentsAccessor
args(argc);
2781 Label non_callable, non_smi, non_callable_jsfunction, non_jsboundfunction,
2782 non_proxy, non_wrapped_function, class_constructor;
2783 __ JumpIfSmi(target, &non_callable);
2785 __ LoadMap(map, target);
2786 __ CmpInstanceTypeRange(map, instance_type, map,
2789 __ j(
above, &non_callable_jsfunction);
2792 __ bind(&non_callable_jsfunction);
2793 __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2795 __ TailCallBuiltin(Builtin::kCallBoundFunction);
2798 __ bind(&non_jsboundfunction);
2799 __ LoadMap(map, target);
2801 Immediate(Map::Bits1::IsCallableBit::kMask));
2802 __ j(zero, &non_callable);
2805 __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2807 __ TailCallBuiltin(Builtin::kCallProxy);
2811 __ bind(&non_proxy);
2812 __ cmpw(instance_type, Immediate(JS_WRAPPED_FUNCTION_TYPE));
2814 __ TailCallBuiltin(Builtin::kCallWrappedFunction);
2818 __ bind(&non_wrapped_function);
2819 __ cmpw(instance_type, Immediate(JS_CLASS_CONSTRUCTOR_TYPE));
2820 __ j(
equal, &class_constructor);
2825 __ mov(
args.GetReceiverOperand(), target);
2827 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2832 __ bind(&non_callable);
2834 FrameScope scope(masm, StackFrame::INTERNAL);
2836 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2841 __ bind(&class_constructor);
2843 FrameScope frame(masm, StackFrame::INTERNAL);
2845 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2851void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2857 __ AssertConstructor(edi);
2858 __ AssertFunction(edi, ecx);
2860 Label call_generic_stub;
2863 __ mov(ecx,
FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2865 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2870 __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2871 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
2873 __ bind(&call_generic_stub);
2876 __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2877 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2881void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2887 __ AssertConstructor(edi);
2888 __ AssertBoundFunction(edi);
2891 Generate_PushBoundArguments(masm);
2898 __ mov(edx,
FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2903 __ mov(edi,
FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2904 __ TailCallBuiltin(Builtin::kConstruct);
2908void Builtins::Generate_Construct(MacroAssembler* masm) {
2920 StackArgumentsAccessor
args(argc);
2923 Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
2924 __ JumpIfSmi(target, &non_constructor);
2929 Immediate(Map::Bits1::IsConstructorBit::kMask));
2930 __ j(zero, &non_constructor);
2933 __ CmpInstanceTypeRange(map, map, map, FIRST_JS_FUNCTION_TYPE,
2934 LAST_JS_FUNCTION_TYPE);
2936 __ TailCallBuiltin(Builtin::kConstructFunction);
2940 __ bind(&non_jsfunction);
2942 __ CmpInstanceType(map, JS_BOUND_FUNCTION_TYPE);
2944 __ TailCallBuiltin(Builtin::kConstructBoundFunction);
2947 __ bind(&non_jsboundfunction);
2948 __ CmpInstanceType(map, JS_PROXY_TYPE);
2950 __ TailCallBuiltin(Builtin::kConstructProxy);
2953 __ bind(&non_proxy);
2956 __ mov(
args.GetReceiverOperand(), target);
2958 __ LoadNativeContextSlot(target,
2959 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2965 __ bind(&non_constructor);
2966 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2971void Generate_OSREntry(MacroAssembler* masm, Register entry_address) {
2974 __ mov(Operand(esp, 0), entry_address);
2980enum class OsrSourceTier {
2985void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
2986 Register maybe_target_code,
2987 Register expected_param_count) {
2988 Label jump_to_optimized_code;
2994 __ cmp(maybe_target_code, Immediate(0));
3000 FrameScope scope(masm, StackFrame::INTERNAL);
3001 __ CallRuntime(Runtime::kCompileOptimizedOSR);
3005 __ cmp(eax, Immediate(0));
3009 __ bind(&jump_to_optimized_code);
3015 __ cmpb(
__ ExternalReferenceAsOperand(
3016 ExternalReference::address_of_log_or_trace_osr(), ecx),
3021 FrameScope scope(masm, StackFrame::INTERNAL);
3023 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
3030 if (source == OsrSourceTier::kInterpreter) {
3040 __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOrInterpreterDataOffset -
3049 __ LoadCodeInstructionStart(eax, eax);
3054 Generate_OSREntry(masm, eax);
3059void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3060 using D = OnStackReplacementDescriptor;
3061 static_assert(D::kParameterCount == 2);
3062 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
3063 D::MaybeTargetCodeRegister(),
3064 D::ExpectedParameterCountRegister());
3067void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
3068 using D = OnStackReplacementDescriptor;
3069 static_assert(D::kParameterCount == 2);
3073 OnStackReplacement(masm, OsrSourceTier::kBaseline,
3074 D::MaybeTargetCodeRegister(),
3075 D::ExpectedParameterCountRegister());
3078#if V8_ENABLE_WEBASSEMBLY
3081int SaveWasmParams(MacroAssembler* masm) {
3087 "frame size mismatch");
3093 "frame size mismatch");
3105void RestoreWasmParams(MacroAssembler* masm,
int offset) {
3125void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3126 constexpr Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3135 __ mov(tmp, saved_ebp_slot);
3136 __ mov(saved_ebp_slot, ebp);
3145 Operand marker_slot = Operand(ebp, WasmFrameConstants::kFrameTypeOffset);
3146 Operand instance_data_slot =
3147 Operand(ebp, WasmFrameConstants::kWasmInstanceDataOffset);
3151 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3154 Label allocate_vector;
3155 __ JumpIfSmi(tmp, &allocate_vector);
3159 __ mov(tmp, instance_data_slot);
3162 __ mov(tmp, marker_slot);
3166 __ bind(&allocate_vector);
3180 __ mov(tmp, marker_slot);
3184 int offset = SaveWasmParams(masm);
3188 __ SmiTag(func_index);
3189 __ Push(func_index);
3194 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3198 RestoreWasmParams(masm,
offset);
3208 __ mov(tmp, instance_data_slot);
3214void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3219 HardAbortScope hard_abort(masm);
3220 FrameScope scope(masm, StackFrame::INTERNAL);
3221 int offset = SaveWasmParams(masm);
3229 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3235 RestoreWasmParams(masm,
offset);
3240 WasmTrustedInstanceData::kJumpTableStartOffset -
3248void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3249 HardAbortScope hard_abort(masm);
3251 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3260 constexpr int kFpStackSize =
3262 __ AllocateStackSpace(kFpStackSize);
3263 int offset = kFpStackSize;
3273 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3280 __ add(esp, Immediate(kFpStackSize));
3292void SwitchStackState(MacroAssembler* masm, Register stack,
3303void FillJumpBuffer(MacroAssembler* masm, Register stack, Register scratch,
3311 __ LoadLabelAddress(scratch,
pc);
3315void LoadJumpBuffer(MacroAssembler* masm, Register stack,
bool load_pc,
3326void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3329 MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3330 __ Move(GCScanSlotPlace, Immediate(0));
3332 LoadJumpBuffer(masm, target_stack,
false, expected_state);
3336void SwitchStacks(MacroAssembler* masm, Register old_stack,
bool return_switch,
3337 const std::initializer_list<Register> keep) {
3338 using ER = ExternalReference;
3339 for (
auto reg : keep) {
3343 __ PrepareCallCFunction(2, eax);
3345 Immediate(ER::isolate_address(masm->isolate())));
3348 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3349 for (
auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3354void ReloadParentStack(MacroAssembler* masm, Register promise,
3355 Register return_value, Register context, Register tmp,
3358 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3373 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3377 SwitchStacks(masm, active_stack,
true,
3378 {promise, return_value,
context, parent});
3384void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3387 __ CmpInstanceType(scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3391 __ Move(data,
FieldOperand(data, WasmImportData::kNativeContextOffset));
3395 FieldOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3399void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3401 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3403 FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3404 __ CompareRoot(suspender, RootIndex::kUndefinedValue);
3405 __ mov(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
3408void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3409 __ mov(
MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset),
3411 __ mov(
MemOperand(ebp, StackSwitchFrameConstants::kResultArrayOffset),
3415void SwitchToAllocatedStack(MacroAssembler* masm, Register wrapper_buffer,
3416 Register original_fp, Register new_wrapper_buffer,
3417 Register scratch, Register scratch2,
3419 ResetStackSwitchFrameStackSlots(masm);
3420 Register parent_stack = new_wrapper_buffer;
3421 __ LoadRootRelative(parent_stack, IsolateData::active_stack_offset());
3423 FillJumpBuffer(masm, parent_stack, scratch, suspend);
3424 SwitchStacks(masm, parent_stack,
false, {wrapper_buffer});
3427 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3430 __ mov(original_fp, ebp);
3435 __ Push(Immediate(0));
3439 __ EnterFrame(StackFrame::STACK_SWITCH);
3442 JSToWasmWrapperFrameConstants::kWrapperBufferSize;
3443 __ AllocateStackSpace(stack_space);
3444 __ AlignStackPointer();
3445 __ mov(new_wrapper_buffer, esp);
3449 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3451 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount),
3456 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount));
3459 JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount),
3465 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3469 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray),
3475 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3480 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray +
3485void SwitchBackAndReturnPromise(MacroAssembler* masm, Register tmp,
3487 Label* return_promise) {
3492 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3495 Register promise = desc.GetRegisterParameter(0);
3496 Register return_value = desc.GetRegisterParameter(1);
3500 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3502 FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3505 MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3508 ReloadParentStack(masm, promise, return_value,
kContextRegister, tmp, tmp2);
3509 RestoreParentSuspender(masm, tmp);
3512 __ Move(
MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3515 __ CallBuiltin(Builtin::kFulfillPromise);
3519 __ bind(return_promise);
3522void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3523 Label* return_promise) {
3527 __ lea(esp,
MemOperand(ebp, StackSwitchFrameConstants::kLastSpillOffset));
3530 Register thread_in_wasm_flag_addr = ecx;
3532 thread_in_wasm_flag_addr,
3534 __ mov(
MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3535 thread_in_wasm_flag_addr =
no_reg;
3539 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3540 constexpr Register promise = desc.GetRegisterParameter(0);
3541 constexpr Register reason = desc.GetRegisterParameter(1);
3546 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3547 __ Move(promise,
FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3550 MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3552 static_assert(tmp1 != promise && tmp1 != reason && tmp1 !=
kContextRegister);
3554 static_assert(tmp2 != promise && tmp2 != reason && tmp2 !=
kContextRegister);
3557 RestoreParentSuspender(masm, edi);
3559 __ Move(
MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3562 Register debug_event = desc.GetRegisterParameter(2);
3563 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3564 __ CallBuiltin(Builtin::kRejectPromise);
3569 __ jmp(return_promise);
3571 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3574void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
3576 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3577 : StackFrame::JS_TO_WASM);
3579 constexpr int kNumSpillSlots = StackSwitchFrameConstants::kNumSpillSlots;
3582 ResetStackSwitchFrameStackSlots(masm);
3587 Register original_fp = stack_switch ? esi : ebp;
3588 Register new_wrapper_buffer = stack_switch ? ecx : wrapper_buffer;
3592 SwitchToAllocatedStack(masm, wrapper_buffer, original_fp,
3593 new_wrapper_buffer, eax, edx, &suspend);
3595 __ mov(
MemOperand(ebp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset),
3596 new_wrapper_buffer);
3600 JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3601 __ mov(
MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset), eax);
3604 __ mov(result_array,
3606 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3607 __ mov(
MemOperand(ebp, StackSwitchFrameConstants::kResultArrayOffset),
3615 MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3616 __ Move(GCScanSlotPlace, Immediate(0));
3622 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferSize));
3624 __ sub(esp, result_size);
3628 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart),
3632 new_wrapper_buffer =
no_reg;
3636 __ mov(params_start,
3638 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3642 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3648 int stack_params_offset =
3653 stack_params_offset += param_padding;
3654 __ lea(last_stack_param,
MemOperand(params_start, stack_params_offset));
3657 __ bind(&loop_start);
3659 Label finish_stack_params;
3660 __ cmp(last_stack_param, params_end);
3666 __ jmp(&loop_start);
3668 __ bind(&finish_stack_params);
3670 int next_offset = stack_params_offset;
3680 Register thread_in_wasm_flag_addr = ecx;
3682 thread_in_wasm_flag_addr,
3684 __ mov(
MemOperand(thread_in_wasm_flag_addr, 0), Immediate(1));
3686 next_offset -= param_padding;
3699 MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3702 MemOperand(ebp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3708 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3710 __ Move(
MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3713 __ CallWasmCodePointer(call_target);
3716 thread_in_wasm_flag_addr,
3718 __ mov(
MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3719 thread_in_wasm_flag_addr =
no_reg;
3721 wrapper_buffer = esi;
3722 __ mov(wrapper_buffer,
3723 MemOperand(ebp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3727 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1),
3731 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2),
3735 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1),
3739 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2),
3747 __ mov(eax,
MemOperand(ebp, StackSwitchFrameConstants::kImplicitArgOffset));
3748 __ mov(ecx,
MemOperand(ebp, StackSwitchFrameConstants::kResultArrayOffset));
3751 MemOperand(ebp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3754 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3757 GetContextFromImplicitArg(masm, eax, scratch);
3758 __ mov(edx, wrapper_buffer);
3759 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
3761 Label return_promise;
3764 SwitchBackAndReturnPromise(masm, edx, edi, mode, &return_promise);
3768 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
3769 : StackFrame::JS_TO_WASM);
3775 GenerateExceptionHandlingLandingPad(masm, &return_promise);
3780void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
3784void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
3788void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
3792void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3800 __ sub(esp, Immediate(required_stack_space));
3820 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3823void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3827void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3829 __ EnterFrame(StackFrame::STACK_SWITCH);
3833 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3836 ResetStackSwitchFrameStackSlots(masm);
3843 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3844 FillJumpBuffer(masm, stack, ecx, &resume);
3849 __ Move(suspender_stack,
3850 FieldOperand(suspender, WasmSuspenderObject::kStackOffset));
3858 __ cmp(suspender_stack, stack);
3870 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3872 __ Move(parent,
FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3873 __ mov(masm->RootAsOperand(RootIndex::kActiveSuspender), parent);
3879 SwitchStacks(masm, stack,
false, {caller, suspender});
3881 FieldOperand(suspender, WasmSuspenderObject::kPromiseOffset));
3883 MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3884 __ Move(GCScanSlotPlace, Immediate(0));
3888 __ LeaveFrame(StackFrame::STACK_SWITCH);
3897void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3898 __ EnterFrame(StackFrame::STACK_SWITCH);
3902 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3905 ResetStackSwitchFrameStackSlots(masm);
3917 __ Move(function_data,
3918 FieldOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3923 FieldOperand(function_data, WasmResumeData::kSuspenderOffset));
3933 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3934 FillJumpBuffer(masm, active_stack, edx, &suspend);
3947 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3948 __ mov(
FieldOperand(suspender, WasmSuspenderObject::kParentOffset),
3950 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3952 __ mov(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
3954 active_suspender =
no_reg;
3957 __ Move(target_stack,
3958 FieldOperand(suspender, WasmSuspenderObject::kStackOffset));
3961 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3962 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3963 SwitchStacks(masm, active_stack,
false, {target_stack});
3970 __ Move(
MemOperand(ebp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3977 __ LeaveFrame(StackFrame::STACK_SWITCH);
3981 __ CallRuntime(Runtime::kThrow);
3988 __ LeaveFrame(StackFrame::STACK_SWITCH);
3994void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3998void Builtins::Generate_WasmReject(MacroAssembler* masm) {
4002void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
4008static constexpr Register kOldSPRegister = esi;
4010void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm,
int edi_slot_index) {
4011 using ER = ExternalReference;
4018 __ Move(kOldSPRegister, 0);
4022 ER on_central_stack_flag = ER::Create(
4023 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4025 Label do_not_need_to_switch;
4026 __ cmpb(
__ ExternalReferenceAsOperand(on_central_stack_flag, ecx),
4031 __ mov(kOldSPRegister, esp);
4041 __ PrepareCallCFunction(2, ecx);
4044 Immediate(ER::isolate_address()));
4047 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4056 __ sub(central_stack_sp, Immediate(kReturnAddressSlotOffset));
4057 __ mov(esp, central_stack_sp);
4061 __ AlignStackPointer();
4072 __ bind(&do_not_need_to_switch);
4075 __ bind(&exitLabel);
4078void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4079 using ER = ExternalReference;
4081 Label no_stack_change;
4082 __ cmp(kOldSPRegister, Immediate(0));
4083 __ j(
equal, &no_stack_change);
4084 __ mov(esp, kOldSPRegister);
4091 __ PrepareCallCFunction(1, ecx);
4093 Immediate(ER::isolate_address()));
4094 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4101 __ bind(&no_stack_change);
4109 ArgvMode argv_mode,
bool builtin_exit_frame,
4110 bool switch_to_central_stack) {
4111 CHECK(result_size == 1 || result_size == 2);
4113 using ER = ExternalReference;
4135 const int kSwitchToTheCentralStackSlots = switch_to_central_stack ? 1 : 0;
4136 const int kReservedStackSlots = 3 + kSwitchToTheCentralStackSlots;
4138#if V8_ENABLE_WEBASSEMBLY
4139 const int kEdiSlot = kReservedStackSlots - 1;
4143 kReservedStackSlots,
4144 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT, edi);
4148 static constexpr Register kArgvRegister = edi;
4150 __ mov(kArgvRegister, ecx);
4163#if V8_ENABLE_WEBASSEMBLY
4164 if (switch_to_central_stack) {
4165 SwitchToTheCentralStackIfNeeded(masm, kEdiSlot);
4172 __ CheckStackAlignment();
4177 __ Move(ecx, Immediate(ER::isolate_address()));
4184 Label exception_returned;
4185 __ CompareRoot(eax, RootIndex::kException);
4186 __ j(
equal, &exception_returned);
4192 __ LoadRoot(edx, RootIndex::kTheHoleValue);
4194 ER exception_address =
4195 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4196 __ cmp(edx,
__ ExternalReferenceAsOperand(exception_address, ecx));
4204#if V8_ENABLE_WEBASSEMBLY
4205 if (switch_to_central_stack) {
4206 SwitchFromTheCentralStackIfNeeded(masm);
4210 __ LeaveExitFrame(esi);
4214 __ PopReturnAddressTo(ecx);
4215 __ lea(esp, Operand(kArgvRegister, kReceiverOnStackSize));
4216 __ PushReturnAddressFrom(ecx);
4221 __ bind(&exception_returned);
4223 ER pending_handler_context_address = ER::Create(
4224 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4225 ER pending_handler_entrypoint_address = ER::Create(
4226 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4227 ER pending_handler_fp_address =
4228 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4229 ER pending_handler_sp_address =
4230 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4234 ER find_handler = ER::Create(Runtime::kUnwindAndFindExceptionHandler);
4237 __ PrepareCallCFunction(3, eax);
4240 __ Move(esi, Immediate(ER::isolate_address()));
4246 __ mov(esp,
__ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
4247 __ mov(ebp,
__ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
4249 __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
4260 ER c_entry_fp_address =
4261 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4262 __ mov(
__ ExternalReferenceAsOperand(c_entry_fp_address, esi), Immediate(0));
4265 __ mov(edi,
__ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
4270#if V8_ENABLE_WEBASSEMBLY
4271void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4272 using ER = ExternalReference;
4274 WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4278 __ mov(original_fp, ebp);
4279 __ mov(original_sp, esp);
4281 __ sub(frame_base, esp);
4285 FrameScope scope(masm, StackFrame::INTERNAL);
4287 __ PrepareCallCFunction(5, scratch);
4294 __ CallCFunction(ER::wasm_grow_stack(), 5);
4316 __ bind(&call_runtime);
4318 MemOperand(ebp, WasmFrameConstants::kWasmInstanceDataOffset));
4321 WasmTrustedInstanceData::kNativeContextOffset));
4323 __ EnterFrame(StackFrame::INTERNAL);
4326 __ CallRuntime(Runtime::kWasmStackGuard);
4327 __ LeaveFrame(StackFrame::INTERNAL);
4333void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4334 Label check_negative, process_64_bits, done;
4344 MemOperand return_operand = mantissa_operand;
4359 __ mov(scratch1, mantissa_operand);
4361 CpuFeatureScope scope(masm, SSE3);
4363 __ fld_d(mantissa_operand);
4365 __ mov(ecx, exponent_operand);
4371 __ j(
below, &process_64_bits);
4379 __ sub(ecx, Immediate(delta));
4380 __ xor_(result_reg, result_reg);
4381 __ cmp(ecx, Immediate(31));
4383 __ shl_cl(scratch1);
4384 __ jmp(&check_negative);
4386 __ bind(&process_64_bits);
4388 CpuFeatureScope scope(masm, SSE3);
4392 __ fisttp_d(Operand(esp, 0));
4393 __ mov(result_reg, Operand(esp, 0));
4398 __ sub(ecx, Immediate(delta));
4400 __ mov(result_reg, exponent_operand);
4406 __ shrd_cl(scratch1, result_reg);
4407 __ shr_cl(result_reg);
4408 __ test(ecx, Immediate(32));
4413 __ bind(&check_negative);
4414 __ mov(result_reg, scratch1);
4416 __ cmp(exponent_operand, Immediate(0));
4421 __ mov(return_operand, result_reg);
4453 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4465 api_function_address =
4466 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4472 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4473 func_templ, scratch));
4475 using FCA = FunctionCallbackArguments;
4476 using ER = ExternalReference;
4477 using FC = ApiCallbackExitFrameConstants;
4479 static_assert(FCA::kArgsLength == 6);
4480 static_assert(FCA::kNewTargetIndex == 5);
4481 static_assert(FCA::kTargetIndex == 4);
4482 static_assert(FCA::kReturnValueIndex == 3);
4483 static_assert(FCA::kContextIndex == 2);
4484 static_assert(FCA::kIsolateIndex == 1);
4485 static_assert(FCA::kUnusedIndex == 0);
4503 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4504 topmost_script_having_context);
4511 __ movd(xmm0, argc);
4513 __ PopReturnAddressTo(argc);
4514 __ PushRoot(RootIndex::kUndefinedValue);
4515 __ Push(func_templ);
4516 __ PushRoot(RootIndex::kUndefinedValue);
4520 __ Push(Immediate(ER::isolate_address()));
4521 __ PushRoot(RootIndex::kUndefinedValue);
4525 static constexpr int kApiArgc = 1;
4529 __ mov(api_function_address,
4531 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset));
4534 __ PushReturnAddressFrom(argc);
4538 constexpr int extra_slots =
4539 FC::getExtraSlotsCountFrom<ExitFrameConstants>() + kApiArgc;
4540 __ EnterExitFrame(extra_slots, StackFrame::API_CALLBACK_EXIT,
4541 api_function_address);
4548 __ movd(argc, xmm0);
4550 Operand argc_operand = Operand(ebp, FC::kFCIArgcOffset);
4556 __ mov(argc_operand, argc);
4559 __ lea(scratch, Operand(ebp, FC::kImplicitArgsArrayOffset));
4560 __ mov(Operand(ebp, FC::kFCIImplicitArgsOffset), scratch);
4563 __ lea(scratch, Operand(ebp, FC::kFirstArgumentOffset));
4564 __ mov(Operand(ebp, FC::kFCIValuesOffset), scratch);
4567 __ RecordComment(
"v8::FunctionCallback's argument.");
4568 __ lea(scratch, Operand(ebp, FC::kFunctionCallbackInfoOffset));
4571 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4574 Operand return_value_operand = Operand(ebp, FC::kReturnValueOffset);
4575 static constexpr int kSlotsToDropOnReturn =
4578 const bool with_profiling =
4581 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4582 &argc_operand, return_value_operand);
4585void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4602 using PCA = PropertyCallbackArguments;
4603 using ER = ExternalReference;
4604 using FC = ApiAccessorExitFrameConstants;
4606 static_assert(PCA::kPropertyKeyIndex == 0);
4607 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4608 static_assert(PCA::kHolderIndex == 2);
4609 static_assert(PCA::kIsolateIndex == 3);
4610 static_assert(PCA::kHolderV2Index == 4);
4611 static_assert(PCA::kReturnValueIndex == 5);
4612 static_assert(PCA::kDataIndex == 6);
4613 static_assert(PCA::kThisIndex == 7);
4614 static_assert(PCA::kArgsLength == 8);
4631 __ PopReturnAddressTo(scratch);
4634 __ PushRoot(RootIndex::kUndefinedValue);
4637 __ LoadAddress(isolate_reg, ER::isolate_address());
4645 __ PushReturnAddressFrom(scratch);
4649 static constexpr int kApiArgc = 2;
4654 __ RecordComment(
"Load function_address");
4655 __ mov(api_function_address,
4658 __ EnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>() + kApiArgc,
4659 StackFrame::API_ACCESSOR_EXIT, api_function_address);
4664 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
4667 __ lea(property_callback_info_arg, Operand(ebp, FC::kArgsArrayOffset));
4672 __ RecordComment(
"Local<Name>");
4673#ifdef V8_ENABLE_DIRECT_HANDLE
4678 static_assert(PCA::kPropertyKeyIndex == 0);
4682 __ RecordComment(
"v8::PropertyCallbackInfo<T>&");
4685 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4690 Operand return_value_operand = Operand(ebp, FC::kReturnValueOffset);
4691 static constexpr int kSlotsToDropOnReturn =
4692 FC::kPropertyCallbackInfoArgsLength;
4693 Operand*
const kUseStackSpaceConstant =
nullptr;
4695 const bool with_profiling =
true;
4697 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4698 kUseStackSpaceConstant, return_value_operand);
4701void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4707enum Direction { FORWARD, BACKWARD };
4708enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
4715void MemMoveEmitMainLoop(MacroAssembler* masm, Label* move_last_15,
4716 Direction
direction, Alignment alignment) {
4722 Label loop, move_last_31, move_last_63;
4723 __ cmp(loop_count, 0);
4727 if (
direction == BACKWARD)
__ sub(src, Immediate(0x40));
4728 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
4729 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
4730 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
4731 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
4732 if (
direction == FORWARD)
__ add(src, Immediate(0x40));
4733 if (
direction == BACKWARD)
__ sub(dst, Immediate(0x40));
4734 __ movdqa(Operand(dst, 0x00), xmm0);
4735 __ movdqa(Operand(dst, 0x10), xmm1);
4736 __ movdqa(Operand(dst, 0x20), xmm2);
4737 __ movdqa(Operand(dst, 0x30), xmm3);
4738 if (
direction == FORWARD)
__ add(dst, Immediate(0x40));
4742 __ bind(&move_last_63);
4743 __ test(count, Immediate(0x20));
4744 __ j(zero, &move_last_31);
4745 if (
direction == BACKWARD)
__ sub(src, Immediate(0x20));
4746 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
4747 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
4748 if (
direction == FORWARD)
__ add(src, Immediate(0x20));
4749 if (
direction == BACKWARD)
__ sub(dst, Immediate(0x20));
4750 __ movdqa(Operand(dst, 0x00), xmm0);
4751 __ movdqa(Operand(dst, 0x10), xmm1);
4752 if (
direction == FORWARD)
__ add(dst, Immediate(0x20));
4754 __ bind(&move_last_31);
4755 __ test(count, Immediate(0x10));
4756 __ j(zero, move_last_15);
4757 if (
direction == BACKWARD)
__ sub(src, Immediate(0x10));
4758 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
4759 if (
direction == FORWARD)
__ add(src, Immediate(0x10));
4760 if (
direction == BACKWARD)
__ sub(dst, Immediate(0x10));
4761 __ movdqa(Operand(dst, 0), xmm0);
4762 if (
direction == FORWARD)
__ add(dst, Immediate(0x10));
4765void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
4773void Builtins::Generate_MemMove(MacroAssembler* masm) {
4791 const size_t kSmallCopySize = 8;
4793 const size_t kMediumCopySize = 63;
4796 const size_t kMinMoveDistance = 16;
4800 int stack_offset = 0;
4802 Label backward, backward_much_overlap;
4803 Label forward_much_overlap, small_size, medium_size, pop_and_return;
4811 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
4812 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
4813 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
4818 __ prefetch(Operand(src, 0), 1);
4819 __ cmp(count, kSmallCopySize);
4821 __ cmp(count, kMediumCopySize);
4828 Label unaligned_source, move_last_15, skip_last_move;
4831 __ cmp(eax, kMinMoveDistance);
4832 __ j(
below, &forward_much_overlap);
4834 __ movdqu(xmm0, Operand(src, 0));
4835 __ movdqu(Operand(dst, 0), xmm0);
4840 __ add(edx, Immediate(16));
4845 __ mov(loop_count, count);
4846 __ shr(loop_count, 6);
4848 __ test(src, Immediate(0xF));
4851 MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_ALIGNED);
4853 __ bind(&move_last_15);
4854 __ and_(count, 0xF);
4856 __ movdqu(xmm0, Operand(src, count,
times_1, -0x10));
4857 __ movdqu(Operand(dst, count,
times_1, -0x10), xmm0);
4858 __ bind(&skip_last_move);
4859 MemMoveEmitPopAndReturn(masm);
4862 __ bind(&unaligned_source);
4863 MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
4864 __ jmp(&move_last_15);
4867 Label loop_until_aligned, last_15_much_overlap;
4868 __ bind(&loop_until_aligned);
4869 __ mov_b(eax, Operand(src, 0));
4871 __ mov_b(Operand(dst, 0), eax);
4874 __ bind(&forward_much_overlap);
4875 __ test(dst, Immediate(0xF));
4878 __ mov(loop_count, count);
4879 __ shr(loop_count, 6);
4880 MemMoveEmitMainLoop(masm, &last_15_much_overlap, FORWARD, MOVE_UNALIGNED);
4881 __ bind(&last_15_much_overlap);
4882 __ and_(count, 0xF);
4883 __ j(zero, &pop_and_return);
4884 __ cmp(count, kSmallCopySize);
4886 __ jmp(&medium_size);
4891 Label unaligned_source, move_first_15, skip_last_move;
4898 __ cmp(eax, kMinMoveDistance);
4899 __ j(
below, &backward_much_overlap);
4901 __ movdqu(xmm0, Operand(src, -0x10));
4902 __ movdqu(Operand(dst, -0x10), xmm0);
4910 __ mov(loop_count, count);
4911 __ shr(loop_count, 6);
4913 __ test(src, Immediate(0xF));
4916 MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
4918 __ bind(&move_first_15);
4919 __ and_(count, 0xF);
4923 __ movdqu(xmm0, Operand(src, 0));
4924 __ movdqu(Operand(dst, 0), xmm0);
4925 __ bind(&skip_last_move);
4926 MemMoveEmitPopAndReturn(masm);
4929 __ bind(&unaligned_source);
4930 MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
4931 __ jmp(&move_first_15);
4934 Label loop_until_aligned, first_15_much_overlap;
4935 __ bind(&loop_until_aligned);
4938 __ mov_b(eax, Operand(src, 0));
4939 __ mov_b(Operand(dst, 0), eax);
4941 __ bind(&backward_much_overlap);
4942 __ test(dst, Immediate(0xF));
4945 __ mov(loop_count, count);
4946 __ shr(loop_count, 6);
4947 MemMoveEmitMainLoop(masm, &first_15_much_overlap, BACKWARD, MOVE_UNALIGNED);
4948 __ bind(&first_15_much_overlap);
4949 __ and_(count, 0xF);
4950 __ j(zero, &pop_and_return);
4954 __ cmp(count, kSmallCopySize);
4956 __ jmp(&medium_size);
4962 Label f9_16, f17_32, f33_48, f49_63;
4965 __ movsd(xmm0, Operand(src, 0));
4966 __ movsd(xmm1, Operand(src, count,
times_1, -8));
4967 __ movsd(Operand(dst, 0), xmm0);
4968 __ movsd(Operand(dst, count,
times_1, -8), xmm1);
4969 MemMoveEmitPopAndReturn(masm);
4972 __ movdqu(xmm0, Operand(src, 0));
4973 __ movdqu(xmm1, Operand(src, count,
times_1, -0x10));
4974 __ movdqu(Operand(dst, 0x00), xmm0);
4975 __ movdqu(Operand(dst, count,
times_1, -0x10), xmm1);
4976 MemMoveEmitPopAndReturn(masm);
4979 __ movdqu(xmm0, Operand(src, 0x00));
4980 __ movdqu(xmm1, Operand(src, 0x10));
4981 __ movdqu(xmm2, Operand(src, count,
times_1, -0x10));
4982 __ movdqu(Operand(dst, 0x00), xmm0);
4983 __ movdqu(Operand(dst, 0x10), xmm1);
4984 __ movdqu(Operand(dst, count,
times_1, -0x10), xmm2);
4985 MemMoveEmitPopAndReturn(masm);
4988 __ movdqu(xmm0, Operand(src, 0x00));
4989 __ movdqu(xmm1, Operand(src, 0x10));
4990 __ movdqu(xmm2, Operand(src, 0x20));
4991 __ movdqu(xmm3, Operand(src, count,
times_1, -0x10));
4992 __ movdqu(Operand(dst, 0x00), xmm0);
4993 __ movdqu(Operand(dst, 0x10), xmm1);
4994 __ movdqu(Operand(dst, 0x20), xmm2);
4995 __ movdqu(Operand(dst, count,
times_1, -0x10), xmm3);
4996 MemMoveEmitPopAndReturn(masm);
4998 __ bind(&medium_size);
5011 Label eax_is_2_or_3;
5018 __ bind(&eax_is_2_or_3);
5025 Label f0, f1, f2, f3, f4, f5_8;
5027 MemMoveEmitPopAndReturn(masm);
5030 __ mov_b(eax, Operand(src, 0));
5031 __ mov_b(Operand(dst, 0), eax);
5032 MemMoveEmitPopAndReturn(masm);
5035 __ mov_w(eax, Operand(src, 0));
5036 __ mov_w(Operand(dst, 0), eax);
5037 MemMoveEmitPopAndReturn(masm);
5040 __ mov_w(eax, Operand(src, 0));
5041 __ mov_b(edx, Operand(src, 2));
5042 __ mov_w(Operand(dst, 0), eax);
5043 __ mov_b(Operand(dst, 2), edx);
5044 MemMoveEmitPopAndReturn(masm);
5047 __ mov(eax, Operand(src, 0));
5048 __ mov(Operand(dst, 0), eax);
5049 MemMoveEmitPopAndReturn(masm);
5052 __ mov(eax, Operand(src, 0));
5053 __ mov(edx, Operand(src, count,
times_1, -4));
5054 __ mov(Operand(dst, 0), eax);
5055 __ mov(Operand(dst, count,
times_1, -4), edx);
5056 MemMoveEmitPopAndReturn(masm);
5058 __ bind(&small_size);
5068 Label count_is_above_3, count_is_2_or_3;
5078 __ bind(&count_is_2_or_3);
5083 __ bind(&count_is_above_3);
5089 __ bind(&pop_and_return);
5090 MemMoveEmitPopAndReturn(masm);
5095void Generate_DeoptimizationEntry(MacroAssembler* masm,
5097 Isolate* isolate = masm->isolate();
5103 __ AllocateStackSpace(kXmmRegsSize);
5106 config->num_allocatable_simd128_registers());
5107 DCHECK_EQ(config->num_allocatable_simd128_registers(),
5108 config->num_allocatable_double_registers());
5109 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
5110 int code = config->GetAllocatableSimd128Code(
i);
5113 __ movdqu(Operand(esp,
offset), xmm_reg);
5118 ExternalReference c_entry_fp_address =
5120 __ mov(masm->ExternalReferenceAsOperand(c_entry_fp_address, esi), ebp);
5122 const int kSavedRegistersAreaSize =
5127 __ mov(ecx, Operand(esp, kSavedRegistersAreaSize));
5134 __ PrepareCallCFunction(5, eax);
5135 __ mov(eax, Immediate(0));
5136 Label context_check;
5138 __ JumpIfSmi(edi, &context_check);
5140 __ bind(&context_check);
5143 Immediate(
static_cast<int>(deopt_kind)));
5150 AllowExternalCallThatCantCauseGC scope(masm);
5151 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
5167 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
5168 int code = config->GetAllocatableSimd128Code(
i);
5169 int dst_offset = code *
kSimd128Size + simd128_regs_offset;
5171 __ movdqu(xmm0, Operand(esp, src_offset));
5172 __ movdqu(Operand(esi, dst_offset), xmm0);
5182 __ mov_b(
__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
5197 Label pop_loop_header;
5198 __ jmp(&pop_loop_header);
5201 __ pop(Operand(edx, 0));
5202 __ add(edx, Immediate(
sizeof(uint32_t)));
5203 __ bind(&pop_loop_header);
5209 __ PrepareCallCFunction(1, esi);
5212 AllowExternalCallThatCantCauseGC scope(masm);
5213 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
5220 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
5226 __ jmp(&outer_loop_header);
5227 __ bind(&outer_push_loop);
5230 __ mov(esi, Operand(eax, 0));
5232 __ jmp(&inner_loop_header);
5233 __ bind(&inner_push_loop);
5234 __ sub(ecx, Immediate(
sizeof(uint32_t)));
5236 __ bind(&inner_loop_header);
5240 __ bind(&outer_loop_header);
5242 __ j(
below, &outer_push_loop);
5245 for (
int i = 0;
i < config->num_allocatable_simd128_registers(); ++
i) {
5246 int code = config->GetAllocatableSimd128Code(
i);
5248 int src_offset = code *
kSimd128Size + simd128_regs_offset;
5249 __ movdqu(xmm_reg, Operand(esi, src_offset));
5257 Label push_registers;
5259 __ j(zero, &push_registers);
5261 __ bind(&push_registers);
5270 __ mov_b(
__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
5276 __ InitializeRootRegister();
5284void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5288void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5295void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
5296 MacroAssembler* masm) {
5311 FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
5314 FieldOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset));
5326 __ mov(feedback_cell,
FieldOperand(closure, JSFunction::kFeedbackCellOffset));
5328 __ mov(feedback_vector,
5329 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
5331 Label install_baseline_code;
5334 __ CmpObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE,
5353 feedback_vector =
no_reg;
5357 __ LoadAddress(get_baseline_pc,
5358 ExternalReference::baseline_pc_for_next_executed_bytecode());
5367 FrameScope scope(masm, StackFrame::INTERNAL);
5368 __ PrepareCallCFunction(3, eax);
5374 __ CallCFunction(get_baseline_pc, 3);
5376 __ LoadCodeInstructionStart(code_obj, code_obj);
5383 ResetJSFunctionAge(masm, closure, closure);
5384 Generate_OSREntry(masm, code_obj);
5387 __ bind(&install_baseline_code);
5395 FrameScope scope(masm, StackFrame::INTERNAL);
5401 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5408void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5417 __ LeaveFrame(StackFrame::INTERPRETED);
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr uint64_t kSignificandMask
static constexpr int kPhysicalSignificandSize
static constexpr uint64_t kHiddenBit
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
int GetStackParameterCount() const
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static bool IsSupported(CpuFeature f)
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kRootRegisterValueOffset
static constexpr int kFunctionArgOffset
static constexpr int kMicrotaskQueueArgOffset
static constexpr int kArgvOffset
static constexpr int kArgcOffset
static constexpr int kReceiverArgOffset
static constexpr int kNewTargetArgOffset
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const int kMantissaBits
static const uint32_t kExponentMask
static const int kExponentBias
static const int kExponentShift
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static constexpr int8_t kNumRegisters
static constexpr XMMRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSize
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr int kNumPushedFpRegisters
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr int kNumberOfSavedGpParamRegs
static constexpr int kNumberOfSavedFpParamRegs
static constexpr Register ObjectRegister()
static constexpr Register SlotAddressRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_SANDBOX_BOOL
base::Vector< const DirectHandle< Object > > args
ArrayReduceDirection direction
V8_INLINE Dest bit_cast(Source const &source)
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
constexpr int kStackFpOffset
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
constexpr int kStackPcOffset
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kFunctionEntryBytecodeOffset
constexpr Register kRuntimeCallFunctionRegister
constexpr int kSimd128Size
DwVfpRegister DoubleRegister
constexpr Register kRuntimeCallArgvRegister
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kNumberOfRegisters
Operand FieldOperand(Register object, int offset)
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr uint32_t kZapValue
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr Register kRuntimeCallArgCountRegister
constexpr Register kInterpreterDispatchTableRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
constexpr int JSParameterCount(int param_count_without_receiver)
constexpr Register kJavaScriptCallCodeStartRegister
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
@ kDefaultDerivedConstructor
@ times_system_pointer_size
constexpr int kDoubleSize
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr Register kInterpreterBytecodeArrayRegister
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define OFFSET_OF_DATA_START(Type)