5#if V8_TARGET_ARCH_ARM64
30#if V8_ENABLE_WEBASSEMBLY
46#define __ ACCESS_MASM(masm)
53 int formal_parameter_count,
Address address) {
63void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
73 ASM_LOCATION(
"Builtins::Generate_JSConstructStubHelper");
76 __ StackOverflowCheck(x0, &stack_overflow);
80 FrameScope scope(masm, StackFrame::CONSTRUCT);
81 Label already_aligned;
88 __ Check(
eq, AbortReason::kUnexpectedValue);
96 Register slot_count_without_rounding = x12;
97 __ Add(slot_count_without_rounding, argc, 1);
98 __ Bic(slot_count, slot_count_without_rounding, 1);
102 __ LoadRoot(x4, RootIndex::kTheHoleValue);
106 __ SlotAddress(x2, argc);
109 __ Tbnz(slot_count_without_rounding, 0, &already_aligned);
111 __ Bind(&already_aligned);
123 __ SlotAddress(dst, 0);
131 __ CopyDoubleWords(dst, src, count);
172 __ DropArguments(x1);
175 __ Bind(&stack_overflow);
177 FrameScope scope(masm, StackFrame::INTERNAL);
178 __ CallRuntime(Runtime::kThrowStackOverflow);
186void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
196 ASM_LOCATION(
"Builtins::Generate_JSConstructStubGeneric");
200 __ EnterFrame(StackFrame::CONSTRUCT);
201 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
207 __ Check(
eq, AbortReason::kUnexpectedValue);
224 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(w4);
228 ¬_create_implicit_receiver);
231 __ CallBuiltin(Builtin::kFastNewObject);
233 __ B(&post_instantiation_deopt_entry);
236 __ Bind(¬_create_implicit_receiver);
237 __ LoadRoot(x0, RootIndex::kTheHoleValue);
248 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
251 __ Bind(&post_instantiation_deopt_entry);
291 Register argc_without_receiver = x11;
296 Label stack_overflow;
297 __ StackOverflowCheck(x10, &stack_overflow);
310 __ Mov(count, argc_without_receiver);
312 __ SlotAddress(dst, 1);
315 __ CopyDoubleWords(dst, src, count);
325 Label use_receiver, do_throw, leave_and_return, check_receiver;
328 __ CompareRoot(x0, RootIndex::kUndefinedValue);
329 __ B(
ne, &check_receiver);
333 __ Bind(&use_receiver);
335 __ CompareRoot(x0, RootIndex::kTheHoleValue);
338 __ Bind(&leave_and_return);
342 __ LeaveFrame(StackFrame::CONSTRUCT);
344 __ DropArguments(x1);
349 __ bind(&check_receiver);
352 __ JumpIfSmi(x0, &use_receiver);
355 __ JumpIfJSAnyIsNotPrimitive(x0, x4, &leave_and_return);
361 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
364 __ Bind(&stack_overflow);
367 __ CallRuntime(Runtime::kThrowStackOverflow);
370void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
371 Generate_JSBuiltinsConstructStubHelper(masm);
374void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
375 FrameScope scope(masm, StackFrame::INTERNAL);
377 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
381static void AssertCodeIsBaselineAllowClobber(MacroAssembler* masm,
382 Register code, Register scratch) {
385 __ DecodeField<Code::KindField>(scratch);
386 __ Cmp(scratch, Operand(
static_cast<int>(CodeKind::BASELINE)));
387 __ Assert(
eq, AbortReason::kExpectedBaselineData);
393 return AssertCodeIsBaselineAllowClobber(masm, code, scratch);
396static void CheckSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
400 Label* is_bytecode) {
401#if V8_STATIC_ROOTS_BOOL
402 __ IsObjectTypeFast(data, scratch, CODE_TYPE);
404 __ CompareObjectType(data, scratch, scratch, CODE_TYPE);
408 __ B(
ne, ¬_baseline);
410 __ B(
eq, is_baseline);
411 __ Bind(¬_baseline);
413 __ B(
eq, is_baseline);
416#if V8_STATIC_ROOTS_BOOL
419 INTERPRETER_DATA_TYPE);
422 __ Cmp(scratch, INTERPRETER_DATA_TYPE);
424 __ B(
ne, is_bytecode);
430 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
431 Label* is_baseline, Label* is_unavailable) {
437 __ LoadTrustedPointerField(
443 __ IsObjectType(data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
446 CheckSharedFunctionInfoBytecodeOrBaseline(masm, data, scratch1, is_baseline,
450 __ LoadProtectedPointerField(
451 bytecode,
FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset));
454 __ IsObjectType(bytecode, scratch1, scratch1, BYTECODE_ARRAY_TYPE);
455 __ B(
ne, is_unavailable);
459void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
469 __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0,
472 __ AssertGeneratorObject(x1);
475 __ LoadTaggedField(x5,
480 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
481 Label stepping_prepared;
482 ExternalReference debug_hook =
483 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
484 __ Mov(x10, debug_hook);
486 __ CompareAndBranch(x10, Operand(0),
ne, &prepare_step_in_if_stepping);
489 ExternalReference debug_suspended_generator =
490 ExternalReference::debug_suspended_generator_address(masm->isolate());
491 __ Mov(x10, debug_suspended_generator);
493 __ CompareAndBranch(x10, Operand(x1),
eq,
494 &prepare_step_in_suspended_generator);
495 __ Bind(&stepping_prepared);
499 Label stack_overflow;
502 __ B(
lo, &stack_overflow);
508#if V8_ENABLE_LEAPTIERING
512 __ Ldr(dispatch_handle.W(),
514 __ LoadEntrypointAndParameterCountFromJSDispatchTable(
515 code, argc, dispatch_handle, scratch);
526 argc, SharedFunctionInfo::kFormalParameterCountOffset));
535 __ Add(x11, argc, 1);
544 __ LoadTaggedField(x6,
546 __ Poke(x6,
__ ReceiverOperand());
566 FieldMemOperand(x1, JSGeneratorObject::kParametersAndRegistersOffset));
568 __ SlotAddress(x12, x10);
581 Label ok, is_baseline, is_unavailable;
588 &is_baseline, &is_unavailable);
591 __ Bind(&is_unavailable);
592 __ Abort(AbortReason::kMissingBytecodeArray);
594 __ Bind(&is_baseline);
595 __ IsObjectType(bytecode, scratch, scratch, CODE_TYPE);
596 __ Assert(
eq, AbortReason::kMissingBytecodeArray);
608#if V8_ENABLE_LEAPTIERING
618 __ JumpJSFunction(x1);
622 __ Bind(&prepare_step_in_if_stepping);
624 FrameScope scope(masm, StackFrame::INTERNAL);
626 __ LoadRoot(x6, RootIndex::kTheHoleValue);
628 __ CallRuntime(Runtime::kDebugOnFunctionCall);
630 __ LoadTaggedField(x5,
633 __ B(&stepping_prepared);
635 __ Bind(&prepare_step_in_suspended_generator);
637 FrameScope scope(masm, StackFrame::INTERNAL);
639 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
641 __ LoadTaggedField(x5,
644 __ B(&stepping_prepared);
646 __ bind(&stack_overflow);
648 FrameScope scope(masm, StackFrame::INTERNAL);
649 __ CallRuntime(Runtime::kThrowStackOverflow);
680 Label
invoke, handler_entry, exit;
683 NoRootArrayScope no_root_array(masm);
685#if defined(V8_OS_WIN)
691 win64_unwindinfo::XdataEncoder* xdata_encoder = masm->GetXdataEncoder();
693 xdata_encoder->onFramePointerAdjustment(
699 __ PushCalleeSavedRegisters();
702 __ Fmov(fp_zero, 0.0);
708#ifdef V8_COMPRESS_POINTERS
711 IsolateData::cage_base_offset());
741 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
742 __ Mov(x12, js_entry_sp);
757 __ LoadIsolateField(x9, IsolateFieldId::kFastCCallCallerFP);
760 __ LoadIsolateField(x9, IsolateFieldId::kFastCCallCallerPC);
763 __ Push(x10, x11, x7, x8);
786 Assembler::BlockPoolsScope block_pools(masm);
790 __ BindExceptionHandler(&handler_entry);
791 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
801 __ LoadRoot(x0, RootIndex::kException);
809 "Unexpected offset for StackHandlerConstants::kSize");
811 "Unexpected offset for StackHandlerConstants::kNextOffset");
821 UseScratchRegisterScope temps(masm);
822 Register scratch = temps.AcquireX();
834 __ CallBuiltin(entry_trampoline);
838 "Unexpected offset for StackHandlerConstants::kNextOffset");
860 __ LoadIsolateField(x8, IsolateFieldId::kFastCCallCallerPC);
862 __ LoadIsolateField(x9, IsolateFieldId::kFastCCallCallerFP);
866 Label non_outermost_js_2;
869 __ PeekPair(x10, c_entry_fp, 0);
871 __ B(
ne, &non_outermost_js_2);
872 __ Mov(x12, js_entry_sp);
874 __ Bind(&non_outermost_js_2);
885 "Size of entry frame is not a multiple of 16 bytes");
891 __ PopCalleeSavedRegisters();
897void Builtins::Generate_JSEntry(MacroAssembler* masm) {
898 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
901void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
902 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
903 Builtin::kJSConstructEntryTrampoline);
906void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
907 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
908 Builtin::kRunMicrotasksTrampoline);
931 FrameScope scope(masm, StackFrame::INTERNAL);
940 constexpr int additional_slots = 2;
941 __ Add(slots_to_claim, argc, additional_slots);
942 __ Bic(slots_to_claim, slots_to_claim, 1);
945 Label enough_stack_space, stack_overflow;
946 __ StackOverflowCheck(slots_to_claim, &stack_overflow);
947 __ B(&enough_stack_space);
949 __ Bind(&stack_overflow);
950 __ CallRuntime(Runtime::kThrowStackOverflow);
953 __ Bind(&enough_stack_space);
954 __ Claim(slots_to_claim);
957 __ SlotAddress(scratch, slots_to_claim);
963 __ SlotAddress(scratch, argc);
977 __ SlotAddress(x0, 1);
993 __ Mov(x1, function);
1001 __ LoadRoot(x19, RootIndex::kUndefinedValue);
1008#ifndef V8_COMPRESS_POINTERS
1018 __ CallBuiltin(builtin);
1028void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1032void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1036void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
1043 __ TailCallBuiltin(Builtin::kRunMicrotasks);
1047 Register scratch2) {
1053 __ Ldrh(params_size.W(),
1056 Register actual_params_size = scratch2;
1058 __ Ldr(actual_params_size,
1063 __ Cmp(params_size, actual_params_size);
1064 __ Csel(params_size, actual_params_size, params_size,
kLessThan);
1067 __ LeaveFrame(StackFrame::INTERPRETED);
1070 __ DropArguments(params_size);
1079 Register bytecode_array,
1080 Register bytecode_offset,
1081 Register bytecode, Register scratch1,
1082 Register scratch2, Label* if_return) {
1084 Register bytecode_size_table = scratch1;
1090 Register original_bytecode_offset = scratch2;
1092 bytecode, original_bytecode_offset));
1094 __ Mov(bytecode_size_table, ExternalReference::bytecode_size_table_address());
1095 __ Mov(original_bytecode_offset, bytecode_offset);
1098 Label process_bytecode, extra_wide;
1099 static_assert(0 ==
static_cast<int>(interpreter::Bytecode::kWide));
1100 static_assert(1 ==
static_cast<int>(interpreter::Bytecode::kExtraWide));
1101 static_assert(2 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1103 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1104 __ Cmp(bytecode, Operand(0x3));
1105 __ B(
hi, &process_bytecode);
1106 __ Tst(bytecode, Operand(0x1));
1109 __ Add(bytecode_offset, bytecode_offset, Operand(1));
1110 __ Ldrb(bytecode,
MemOperand(bytecode_array, bytecode_offset));
1111 __ B(
ne, &extra_wide);
1114 __ Add(bytecode_size_table, bytecode_size_table,
1116 __ B(&process_bytecode);
1118 __ Bind(&extra_wide);
1120 __ Add(bytecode_size_table, bytecode_size_table,
1123 __ Bind(&process_bytecode);
1126#define JUMP_IF_EQUAL(NAME) \
1127 __ Cmp(x1, Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1128 __ B(if_return, eq);
1134 Label
end, not_jump_loop;
1135 __ Cmp(bytecode, Operand(
static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1136 __ B(
ne, ¬_jump_loop);
1139 __ Mov(bytecode_offset, original_bytecode_offset);
1142 __ bind(¬_jump_loop);
1144 __ Ldrb(scratch1.W(),
MemOperand(bytecode_size_table, bytecode));
1145 __ Add(bytecode_offset, bytecode_offset, scratch1);
1152void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
1156void ResetJSFunctionAge(MacroAssembler* masm, Register js_function,
1158 const Register shared_function_info(scratch);
1160 shared_function_info,
1162 ResetSharedFunctionInfoAge(masm, shared_function_info);
1165void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
1166 Register feedback_vector, Register scratch) {
1170 __ And(scratch, scratch, Operand(~FeedbackVector::OsrUrgencyBits::kMask));
1178void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1179 UseScratchRegisterScope temps(masm);
1185 Register closure = descriptor.GetRegisterParameter(
1186 BaselineOutOfLinePrologueDescriptor::kClosure);
1188 Register feedback_cell = temps.AcquireX();
1189 Register feedback_vector = temps.AcquireX();
1190 Register scratch = temps.AcquireX();
1191 __ LoadTaggedField(feedback_cell,
1196 __ AssertFeedbackVector(feedback_vector, scratch);
1198#ifndef V8_ENABLE_LEAPTIERING
1200 Label flags_need_processing;
1202 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1203 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1207 UseScratchRegisterScope temps(masm);
1208 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, temps.AcquireW());
1213 UseScratchRegisterScope temps(masm);
1214 Register invocation_count = temps.AcquireW();
1215 __ Ldr(invocation_count,
1217 FeedbackVector::kInvocationCountOffset));
1218 __ Add(invocation_count, invocation_count, Operand(1));
1219 __ Str(invocation_count,
1221 FeedbackVector::kInvocationCountOffset));
1231 Register callee_context = descriptor.GetRegisterParameter(
1232 BaselineOutOfLinePrologueDescriptor::kCalleeContext);
1233 Register callee_js_function = descriptor.GetRegisterParameter(
1234 BaselineOutOfLinePrologueDescriptor::kClosure);
1236 UseScratchRegisterScope temps(masm);
1237 ResetJSFunctionAge(masm, callee_js_function, temps.AcquireX());
1239 __ Push(callee_context, callee_js_function);
1243 Register argc = descriptor.GetRegisterParameter(
1244 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
1247 Register bytecode_array = descriptor.GetRegisterParameter(
1248 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1249 __ Push(argc, bytecode_array, feedback_cell, feedback_vector);
1250 __ AssertFeedbackVector(feedback_vector, scratch);
1253 Label call_stack_guard;
1254 Register frame_size = descriptor.GetRegisterParameter(
1255 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1263 UseScratchRegisterScope temps(masm);
1265 Register sp_minus_frame_size = temps.AcquireX();
1266 __ Sub(sp_minus_frame_size, sp, frame_size);
1267 Register interrupt_limit = temps.AcquireX();
1269 __ Cmp(sp_minus_frame_size, interrupt_limit);
1270 __ B(
lo, &call_stack_guard);
1277#ifndef V8_ENABLE_LEAPTIERING
1278 __ bind(&flags_need_processing);
1282 __ Pop<MacroAssembler::kAuthLR>(fp, lr);
1283 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1288 __ bind(&call_stack_guard);
1292 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallNewTarget);
1294 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1300 static_assert(kJSDispatchHandleShift > 0);
1302 __ SmiTag(frame_size);
1303 __ PushArgument(frame_size);
1304 __ CallRuntime(Runtime::kStackGuardWithGap);
1312void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
1325 __ LeaveFrame(StackFrame::BASELINE);
1328 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
1347 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1354 sfi,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1355 ResetSharedFunctionInfoAge(masm, sfi);
1359 Label is_baseline, compile_lazy;
1362 x11, &is_baseline, &compile_lazy);
1364#ifdef V8_ENABLE_SANDBOX
1374 __ LoadParameterCountFromJSDispatchTable(x6, dispatch_handle, x7);
1376 BytecodeArray::kParameterSizeOffset));
1378 __ SbxCheck(
eq, AbortReason::kJSSignatureMismatch);
1381 Label push_stack_frame;
1383 __ LoadFeedbackVector(feedback_vector, closure, x7, &push_stack_frame);
1386#ifndef V8_ENABLE_LEAPTIERING
1389 Label flags_need_processing;
1391 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1392 flags, feedback_vector, CodeKind::INTERPRETED_FUNCTION,
1393 &flags_need_processing);
1396 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, w7);
1400 FeedbackVector::kInvocationCountOffset));
1401 __ Add(w10, w10, Operand(1));
1403 FeedbackVector::kInvocationCountOffset));
1415 __ Bind(&push_stack_frame);
1417 __ Push<MacroAssembler::kSignLR>(lr, fp);
1419 __ Push(
cp, closure);
1429 __ Push(x6, feedback_vector);
1432 Label stack_overflow;
1436 BytecodeArray::kFrameSizeOffset));
1438 BytecodeArray::kMaxArgumentsOffset));
1442 __ Sub(x10, sp, Operand(x12));
1444 UseScratchRegisterScope temps(masm);
1445 Register scratch = temps.AcquireX();
1447 __ Cmp(x10, scratch);
1449 __ B(
lo, &stack_overflow);
1458 __ Add(x11, x11, 1);
1459 __ Bic(x11, x11, 1);
1462 __ Bind(&loop_header);
1467 Label no_incoming_new_target_or_generator_register;
1471 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1472 __ Cbz(x10, &no_incoming_new_target_or_generator_register);
1474 __ Bind(&no_incoming_new_target_or_generator_register);
1478 Label stack_check_interrupt, after_stack_check_interrupt;
1481 __ B(
lo, &stack_check_interrupt);
1482 __ Bind(&after_stack_check_interrupt);
1489 __ bind(&do_dispatch);
1492 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1500 __ RecordComment(
"--- InterpreterEntryReturnPC point ---");
1502 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1509 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1533 __ bind(&do_return);
1538 __ bind(&stack_check_interrupt);
1546 __ CallRuntime(Runtime::kStackGuard);
1560 __ jmp(&after_stack_check_interrupt);
1563#ifndef V8_ENABLE_LEAPTIERING
1564 __ bind(&flags_need_processing);
1565 __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1568 __ bind(&is_baseline);
1570#ifndef V8_ENABLE_LEAPTIERING
1579 Label install_baseline_code;
1585 __ Cmp(x7, FEEDBACK_VECTOR_TYPE);
1586 __ B(
ne, &install_baseline_code);
1589 __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1590 flags, feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1600 __ ReplaceClosureCodeWithOptimizedCode(x2, closure);
1603 __ bind(&install_baseline_code);
1606 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1610 __ bind(&compile_lazy);
1611 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1614 __ bind(&stack_overflow);
1615 __ CallRuntime(Runtime::kThrowStackOverflow);
1620 Register first_arg_index,
1621 Register spread_arg_out,
1631 slots_to_claim, slots_to_copy));
1633 DCHECK(!
AreAliased(spread_arg_out, last_arg_addr, stack_addr, slots_to_claim,
1638 __ Sub(num_args, num_args, 1);
1642 __ Add(slots_to_claim, num_args, 1);
1643 __ Bic(slots_to_claim, slots_to_claim, 1);
1645 __ Claim(slots_to_claim);
1648 UseScratchRegisterScope temps(masm);
1649 Register scratch = temps.AcquireX();
1650 __ Sub(scratch, slots_to_claim, 1);
1654 const bool skip_receiver =
1656 if (skip_receiver) {
1659 __ Mov(slots_to_copy, num_args);
1661 __ SlotAddress(stack_addr, skip_receiver ? 1 : 0);
1663 __ Sub(last_arg_addr, first_arg_index,
1672 __ CopyDoubleWords(stack_addr, last_arg_addr, slots_to_copy,
1678 __ LoadRoot(
receiver, RootIndex::kUndefinedValue);
1704 receiver_mode, mode);
1708 __ TailCallBuiltin(Builtin::kCallWithSpread);
1724 __ AssertUndefinedOrAllocationSite(x2);
1737 __ AssertFunction(x1);
1741 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1744 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1748 __ TailCallBuiltin(Builtin::kConstruct);
1754 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1759 Label stack_overflow;
1762 switch (which_frame) {
1782 Register argc_without_receiver = x13;
1785 __ Add(slots_to_claim, x0, 1);
1786 __ Bic(slots_to_claim, slots_to_claim, 1);
1788 __ StackOverflowCheck(slots_to_claim, &stack_overflow);
1791 __ Claim(slots_to_claim);
1794 UseScratchRegisterScope temps(masm);
1795 Register scratch = temps.AcquireX();
1796 __ Sub(scratch, slots_to_claim, 1);
1802 __ SlotAddress(stack_addr, 1);
1803 __ CopyDoubleWords(stack_addr, x4, argc_without_receiver);
1806 __ Mov(x14, Operand(0));
1810 __ TailCallBuiltin(Builtin::kConstruct);
1812 __ Bind(&stack_overflow);
1814 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1821void NewImplicitReceiver(MacroAssembler* masm) {
1840 __ CallBuiltin(Builtin::kFastNewObject);
1842 __ Mov(implicit_receiver, x0);
1850 __ Str(implicit_receiver,
1860void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1861 MacroAssembler* masm) {
1869 __ AssertFunction(x1);
1872 Label non_constructor;
1875 __ TestAndBranchIfAllClear(x2, Map::Bits1::IsConstructorBit::kMask,
1880 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1886 __ Check(
eq, AbortReason::kUnexpectedValue);
1890 __ LoadRoot(x2, RootIndex::kTheHoleValue);
1904 __ TestAndBranchIfAnySet(w2, SharedFunctionInfo::ConstructAsBuiltinBit::kMask,
1908 Label not_create_implicit_receiver;
1909 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(w2);
1913 ¬_create_implicit_receiver);
1914 NewImplicitReceiver(masm);
1915 __ bind(¬_create_implicit_receiver);
1931 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1937 Label use_receiver, do_throw, leave_and_return, check_receiver;
1940 __ CompareRoot(x0, RootIndex::kUndefinedValue);
1941 __ B(
ne, &check_receiver);
1945 __ Bind(&use_receiver);
1948 __ CompareRoot(x0, RootIndex::kTheHoleValue);
1949 __ B(
eq, &do_throw);
1951 __ Bind(&leave_and_return);
1953 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1958 __ bind(&check_receiver);
1961 __ JumpIfSmi(x0, &use_receiver);
1964 __ JumpIfJSAnyIsNotPrimitive(x0, x4, &leave_and_return);
1965 __ B(&use_receiver);
1967 __ bind(&builtin_call);
1970 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1976 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1981 __ bind(&non_constructor);
1982 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1989 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1999 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2001 BYTECODE_ARRAY_TYPE);
2003 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
2027 UseScratchRegisterScope temps(masm);
2035 Label builtin_trampoline, trampoline_loaded;
2037 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
2047 __ LoadTrustedPointerField(
2048 x1,
FieldMemOperand(x1, SharedFunctionInfo::kTrustedFunctionDataOffset),
2052 __ B(
ne, &builtin_trampoline);
2054 __ LoadProtectedPointerField(
2055 x1,
FieldMemOperand(x1, InterpreterData::kInterpreterTrampolineOffset));
2057 __ B(&trampoline_loaded);
2059 __ Bind(&builtin_trampoline);
2060 __ Mov(x1, ExternalReference::
2061 address_of_interpreter_entry_trampoline_instruction_start(
2065 __ Bind(&trampoline_loaded);
2068 UseScratchRegisterScope temps(masm);
2070 __ Add(x17, x1, Operand(interpreter_entry_return_pc_offset.value()));
2075void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
2082 Label enter_bytecode, function_entry_bytecode;
2086 __ B(
eq, &function_entry_bytecode);
2098 __ bind(&enter_bytecode);
2105 __ bind(&function_entry_bytecode);
2112 __ B(&enter_bytecode);
2115 __ bind(&if_return);
2116 __ Abort(AbortReason::kInvalidBytecodeAdvance);
2119void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
2124void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2125 bool javascript_builtin,
2128 int allocatable_register_count = config->num_allocatable_general_registers();
2130 (allocatable_register_count +
2132 allocatable_register_count)) *
2135 UseScratchRegisterScope temps(masm);
2136 Register scratch = temps.AcquireX();
2139 __ Add(fp, sp, frame_size);
2142 if (javascript_builtin) {
2143 __ mov(scratch, x0);
2155 for (
int i = allocatable_register_count - 1;
i > 0;
i -= 2) {
2156 int code1 = config->GetAllocatableGeneralCode(
i);
2157 int code2 = config->GetAllocatableGeneralCode(
i - 1);
2165 if (allocatable_register_count % 2 != 0) {
2166 int code = config->GetAllocatableGeneralCode(0);
2172 if (javascript_builtin && with_result) {
2176 constexpr int return_offset =
2180 __ add(x0, x0, return_offset);
2183 __ sub(x0, x0, return_offset);
2195 __ Pop<MacroAssembler::kAuthLR>(fp, lr);
2197 __ LoadEntryFromBuiltinIndex(builtin, builtin);
2202void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2203 Generate_ContinueToBuiltinHelper(masm,
false,
false);
2206void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2207 MacroAssembler* masm) {
2208 Generate_ContinueToBuiltinHelper(masm,
false,
true);
2211void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2212 Generate_ContinueToBuiltinHelper(masm,
true,
false);
2215void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2216 MacroAssembler* masm) {
2217 Generate_ContinueToBuiltinHelper(masm,
true,
true);
2220void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2222 FrameScope scope(masm, StackFrame::INTERNAL);
2223 __ CallRuntime(Runtime::kNotifyDeoptimized);
2234void Generate_OSREntry(MacroAssembler* masm, Register entry_address,
2235 Operand
offset = Operand(0)) {
2244 UseScratchRegisterScope temps(masm);
2247 __ Mov(x17, entry_address);
2254enum class OsrSourceTier {
2259void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
2260 Register maybe_target_code,
2261 Register expected_param_count) {
2262 Label jump_to_optimized_code;
2268 __ CompareTaggedAndBranch(maybe_target_code,
Smi::zero(),
ne,
2269 &jump_to_optimized_code);
2274 FrameScope scope(masm, StackFrame::INTERNAL);
2275 __ Push(expected_param_count,
padreg);
2276 __ CallRuntime(Runtime::kCompileOptimizedOSR);
2278 __ Pop(
padreg, expected_param_count);
2282 __ CompareTaggedAndBranch(maybe_target_code,
Smi::zero(),
ne,
2283 &jump_to_optimized_code);
2286 __ Bind(&jump_to_optimized_code);
2294 __ Mov(scratch, ExternalReference::address_of_log_or_trace_osr());
2296 __ Tst(scratch, 0xFF);
2300 FrameScope scope(masm, StackFrame::INTERNAL);
2302 __ Push(maybe_target_code, expected_param_count);
2303 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
2304 __ Pop(expected_param_count, maybe_target_code);
2310 if (source == OsrSourceTier::kInterpreter) {
2313 __ LeaveFrame(StackFrame::STUB);
2321 __ SbxCheck(Condition::kNotEqual, AbortReason::kExpectedOsrCode);
2325 __ Ldrh(scratch.W(),
2328 __ Cmp(scratch.W(), expected_param_count.W());
2329 __ SbxCheck(Condition::kEqual, AbortReason::kOsrUnexpectedStackSize);
2333 __ LoadProtectedPointerField(
2336 Code::kDeoptimizationDataOrInterpreterDataOffset));
2345 __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code,
2350 Generate_OSREntry(masm, maybe_target_code, scratch);
2355void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2356 using D = OnStackReplacementDescriptor;
2357 static_assert(D::kParameterCount == 2);
2358 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
2359 D::MaybeTargetCodeRegister(),
2360 D::ExpectedParameterCountRegister());
2363void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
2364 using D = OnStackReplacementDescriptor;
2365 static_assert(D::kParameterCount == 2);
2369 OnStackReplacement(masm, OsrSourceTier::kBaseline,
2370 D::MaybeTargetCodeRegister(),
2371 D::ExpectedParameterCountRegister());
2374#ifdef V8_ENABLE_MAGLEV
2378 bool save_new_target) {
2384 FrameScope scope(masm, StackFrame::INTERNAL);
2386 if (save_new_target) {
2388 __ AssertSmiOrHeapObjectInMainCompressionCage(
2393 __ PushArgument(x0);
2394 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2395 if (save_new_target) {
2405void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2413 ASM_LOCATION(
"Builtins::Generate_FunctionPrototypeApply");
2422 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2423 __ LoadRoot(null_value, RootIndex::kNullValue);
2431 __ Mov(arg_array, undefined_value);
2440 __ DropArguments(argc);
2455 __ CmpTagged(arg_array, null_value);
2456 __ CcmpTagged(arg_array, undefined_value,
ZFlag,
ne);
2457 __ B(
eq, &no_arguments);
2460 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2464 __ Bind(&no_arguments);
2473void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2477 ASM_LOCATION(
"Builtins::Generate_FunctionPrototypeCall");
2480 __ Peek(function,
__ ReceiverOperand());
2487 __ B(
gt, &non_zero);
2488 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2491 __ Poke(scratch, 0);
2497 Label arguments_ready;
2505 UseScratchRegisterScope temps(masm);
2506 Register argc_without_receiver = temps.AcquireX();
2510 __ Mov(count, argc_without_receiver);
2511 __ Tbz(argc_without_receiver, 0, &even);
2515 __ SlotAddress(copy_from, 1);
2517 __ CopyDoubleWords(copy_to, copy_from, count);
2520 __ B(&arguments_ready);
2525 __ SlotAddress(copy_from, count);
2527 __ CopyDoubleWords(copy_to, copy_from, count,
2534 __ Bind(&arguments_ready);
2535 __ Sub(argc, argc, 1);
2539void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2556 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2563 __ Mov(target, undefined_value);
2564 __ Mov(this_argument, undefined_value);
2565 __ Mov(arguments_list, undefined_value);
2576 __ DropArguments(argc);
2577 __ PushArgument(this_argument);
2590 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2593void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2610 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2618 __ Mov(target, undefined_value);
2619 __ Mov(arguments_list, undefined_value);
2633 __ DropArguments(argc);
2636 __ PushArgument(undefined_value);
2654 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2662void Generate_PrepareForCopyingVarargs(MacroAssembler* masm, Register argc,
2664 Label exit, even, init;
2668 __ Mov(slots_to_copy, argc);
2669 __ Mov(slots_to_claim, len);
2670 __ Tbz(slots_to_claim, 0, &even);
2678 __ Add(slots_to_claim, len, 1);
2679 __ And(scratch, argc, 1);
2680 __ Sub(slots_to_claim, slots_to_claim, Operand(scratch,
LSL, 1));
2684 __ Cbz(slots_to_claim, &exit);
2685 __ Claim(slots_to_claim);
2688 __ Cbz(slots_to_copy, &init);
2694 __ SlotAddress(src, slots_to_claim);
2695 __ SlotAddress(dst, 0);
2696 __ CopyDoubleWords(dst, src, slots_to_copy);
2710 __ Tbz(len, 0, &exit);
2732 __ AssertNotSmi(x2, AbortReason::kOperandIsNotAFixedArray);
2735 __ Cmp(x13, FIXED_ARRAY_TYPE);
2737 __ Cmp(x13, FIXED_DOUBLE_ARRAY_TYPE);
2743 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2752 Label stack_overflow;
2753 __ StackOverflowCheck(len, &stack_overflow);
2759 Generate_PrepareForCopyingVarargs(masm, argc, len);
2767 __ Add(src, arguments_list,
2769#if !V8_STATIC_ROOTS_BOOL
2773 __ LoadTaggedRoot(the_hole_value, RootIndex::kTheHoleValue);
2775 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2778 __ SlotAddress(dst, argc);
2779 __ Add(argc, argc, len);
2781 __ Sub(len, len, 1);
2783#if V8_STATIC_ROOTS_BOOL
2784 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2786 __ CmpTagged(scratch, the_hole_value);
2788 __ Csel(scratch, scratch, undefined_value,
ne);
2790 __ Cbnz(len, &loop);
2794 __ TailCallBuiltin(target_builtin);
2796 __ bind(&stack_overflow);
2797 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2802 CallOrConstructMode mode,
2816 Label new_target_constructor, new_target_not_constructor;
2817 __ JumpIfSmi(x3, &new_target_not_constructor);
2820 __ TestAndBranchIfAnySet(x5, Map::Bits1::IsConstructorBit::kMask,
2821 &new_target_constructor);
2822 __ Bind(&new_target_not_constructor);
2825 __ EnterFrame(StackFrame::INTERNAL);
2826 __ PushArgument(x3);
2827 __ CallRuntime(Runtime::kThrowNotConstructor);
2830 __ Bind(&new_target_constructor);
2834 Label stack_done, stack_overflow;
2837 __ Subs(len, len, start_index);
2838 __ B(
le, &stack_done);
2840 __ StackOverflowCheck(len, &stack_overflow);
2842 Generate_PrepareForCopyingVarargs(masm, argc, len);
2852 __ Add(args_fp, args_fp, start_index);
2854 __ SlotAddress(dst, argc);
2856 __ Add(argc, argc, len);
2857 __ CopyDoubleWords(dst, args_fp, len);
2860 __ Bind(&stack_done);
2862 __ TailCallBuiltin(target_builtin);
2864 __ Bind(&stack_overflow);
2865 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2876 __ AssertCallableFunction(x1);
2888 __ TestAndBranchIfAnySet(w3,
2889 SharedFunctionInfo::IsNativeBit::kMask |
2890 SharedFunctionInfo::IsStrictBit::kMask,
2902 __ LoadGlobalProxy(x3);
2904 Label convert_to_object, convert_receiver;
2905 __ Peek(x3,
__ ReceiverOperand());
2906 __ JumpIfSmi(x3, &convert_to_object);
2907 __ JumpIfJSAnyIsNotPrimitive(x3, x4, &done_convert);
2909 Label convert_global_proxy;
2910 __ JumpIfRoot(x3, RootIndex::kUndefinedValue, &convert_global_proxy);
2911 __ JumpIfNotRoot(x3, RootIndex::kNullValue, &convert_to_object);
2912 __ Bind(&convert_global_proxy);
2915 __ LoadGlobalProxy(x3);
2917 __ B(&convert_receiver);
2919 __ Bind(&convert_to_object);
2924 FrameScope scope(masm, StackFrame::INTERNAL);
2928 __ CallBuiltin(Builtin::kToObject);
2935 __ Bind(&convert_receiver);
2937 __ Poke(x3,
__ ReceiverOperand());
2939 __ Bind(&done_convert);
2948#ifdef V8_ENABLE_LEAPTIERING
2952 FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2959void Generate_PushBoundArguments(MacroAssembler* masm) {
2970 Label no_bound_arguments;
2972 bound_argv,
FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2973 __ SmiUntagField(bound_argc,
2975 __ Cbz(bound_argc, &no_bound_arguments);
2996 __ Sub(x10, sp, x10);
3000 __ TailCallRuntime(Runtime::kThrowStackOverflow);
3004 Label copy_bound_args;
3011 __ Add(total_argc, argc, bound_argc);
3015 __ Add(slots_to_claim, bound_argc, 1);
3016 __ Bic(slots_to_claim, slots_to_claim, 1);
3019 __ Tbz(bound_argc, 0, ©_bound_args);
3022 __ Tbz(argc, 0, &argc_even);
3031 __ SlotAddress(copy_to, slots_to_claim);
3033 __ CopyDoubleWords(copy_to, copy_from, argc);
3036 __ Add(scratch, total_argc, 1);
3038 __ B(©_bound_args);
3040 __ Bind(&argc_even);
3051 __ SlotAddress(copy_to, total_argc);
3053 __ CopyDoubleWords(copy_to, copy_from, argc,
3060 __ Bind(©_bound_args);
3069 __ Add(bound_argv, bound_argv,
3071 __ SlotAddress(copy_to, 1);
3073 __ Sub(counter, counter, 1);
3074 __ LoadTaggedField(scratch,
3077 __ Cbnz(counter, &loop);
3082 __ Bind(&no_bound_arguments);
3093 __ AssertBoundFunction(x1);
3096 __ LoadTaggedField(x10,
3098 __ Poke(x10,
__ ReceiverOperand());
3101 Generate_PushBoundArguments(masm);
3105 x1,
FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
3120 Label non_callable, class_constructor;
3121 __ JumpIfSmi(target, &non_callable);
3122 __ LoadMap(map, target);
3123 __ CompareInstanceTypeRange(map, instance_type,
3127 __ Cmp(instance_type, JS_BOUND_FUNCTION_TYPE);
3128 __ TailCallBuiltin(Builtin::kCallBoundFunction,
eq);
3135 __ TestAndBranchIfAllClear(flags, Map::Bits1::IsCallableBit::kMask,
3140 __ Cmp(instance_type, JS_PROXY_TYPE);
3141 __ TailCallBuiltin(Builtin::kCallProxy,
eq);
3145 __ Cmp(instance_type, JS_WRAPPED_FUNCTION_TYPE);
3146 __ TailCallBuiltin(Builtin::kCallWrappedFunction,
eq);
3150 __ Cmp(instance_type, JS_CLASS_CONSTRUCTOR_TYPE);
3151 __ B(
eq, &class_constructor);
3156 __ Poke(target,
__ ReceiverOperand());
3159 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
3164 __ bind(&non_callable);
3166 FrameScope scope(masm, StackFrame::INTERNAL);
3167 __ PushArgument(target);
3168 __ CallRuntime(Runtime::kThrowCalledNonCallable);
3173 __ bind(&class_constructor);
3175 FrameScope frame(masm, StackFrame::INTERNAL);
3176 __ PushArgument(target);
3177 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
3183void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
3189 __ AssertConstructor(x1);
3190 __ AssertFunction(x1);
3194 __ LoadRoot(x2, RootIndex::kUndefinedValue);
3196 Label call_generic_stub;
3202 __ TestAndBranchIfAllClear(
3203 w4, SharedFunctionInfo::ConstructAsBuiltinBit::kMask, &call_generic_stub);
3205 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub);
3207 __ bind(&call_generic_stub);
3208 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
3212void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
3218 __ AssertConstructor(x1);
3219 __ AssertBoundFunction(x1);
3222 Generate_PushBoundArguments(masm);
3227 __ CmpTagged(x1, x3);
3230 x3,
FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
3236 x1,
FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
3237 __ TailCallBuiltin(Builtin::kConstruct);
3241void Builtins::Generate_Construct(MacroAssembler* masm) {
3254 Label non_constructor, non_proxy;
3255 __ JumpIfSmi(target, &non_constructor);
3263 __ TestAndBranchIfAllClear(flags, Map::Bits1::IsConstructorBit::kMask,
3268 __ CompareInstanceTypeRange(map, instance_type, FIRST_JS_FUNCTION_TYPE,
3269 LAST_JS_FUNCTION_TYPE);
3270 __ TailCallBuiltin(Builtin::kConstructFunction,
ls);
3274 __ Cmp(instance_type, JS_BOUND_FUNCTION_TYPE);
3275 __ TailCallBuiltin(Builtin::kConstructBoundFunction,
eq);
3278 __ Cmp(instance_type, JS_PROXY_TYPE);
3279 __ B(
ne, &non_proxy);
3280 __ TailCallBuiltin(Builtin::kConstructProxy);
3283 __ bind(&non_proxy);
3286 __ Poke(target,
__ ReceiverOperand());
3289 __ LoadNativeContextSlot(target,
3290 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
3296 __ bind(&non_constructor);
3297 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
3300#if V8_ENABLE_WEBASSEMBLY
3305constexpr RegList kSavedGpRegs = ([]()
constexpr {
3308 saved_gp_regs.set(gp_param_reg);
3315 CHECK_EQ(0, saved_gp_regs.Count() % 2);
3317 saved_gp_regs.Count());
3318 return saved_gp_regs;
3324 saved_fp_regs.set(fp_param_reg);
3329 saved_fp_regs.Count());
3330 return saved_fp_regs;
3348void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3349 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3352 Label allocate_vector, done;
3356 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3358 __ LoadTaggedField(vector,
3360 __ JumpIfSmi(vector, &allocate_vector);
3362 __ Push(vector, xzr);
3365 __ bind(&allocate_vector);
3372 __ PushXRegList(kSavedGpRegs);
3373 __ PushQRegList(kSavedFpRegs);
3374 __ Push<MacroAssembler::kSignLR>(lr, xzr);
3379 __ SmiTag(func_index);
3382 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3386 __ Pop<MacroAssembler::kAuthLR>(xzr, lr);
3387 __ PopQRegList(kSavedFpRegs);
3388 __ PopXRegList(kSavedGpRegs);
3391 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
3397void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3404 UseScratchRegisterScope temps(masm);
3407 HardAbortScope hard_abort(masm);
3408 FrameScope scope(masm, StackFrame::INTERNAL);
3415 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
3418 __ PushXRegList(kSavedGpRegs);
3419 __ PushQRegList(kSavedFpRegs);
3425 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3428 static_assert(!kSavedGpRegs.has(x17));
3432 __ PopQRegList(kSavedFpRegs);
3433 __ PopXRegList(kSavedGpRegs);
3436 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
3443 static_assert(!kSavedGpRegs.has(temp));
3445 WasmTrustedInstanceData::kJumpTableStartOffset));
3446 __ add(x17, temp, Operand(x17));
3451void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3452 HardAbortScope hard_abort(masm);
3454 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3464 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3476void SwitchStackState(MacroAssembler* masm, Register stack, Register tmp,
3479#if V8_ENABLE_SANDBOX
3481 __ Cmp(tmp.W(), old_state);
3487 __ Mov(tmp.W(), new_state);
3495void SwitchStackPointerAndSimulatorStackLimit(MacroAssembler* masm,
3496 Register stack, Register tmp) {
3497 if (masm->options().enable_simulator_code) {
3498 UseScratchRegisterScope temps(masm);
3510void FillJumpBuffer(MacroAssembler* masm, Register stack, Label*
pc,
3521void LoadJumpBuffer(MacroAssembler* masm, Register stack,
bool load_pc,
3523 SwitchStackPointerAndSimulatorStackLimit(masm, stack, tmp);
3534void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3538 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
3540 LoadJumpBuffer(masm, target_stack,
false, tmp, expected_state);
3544void SwitchStacks(MacroAssembler* masm, Register old_stack,
bool return_switch,
3545 const std::initializer_list<CPURegister> keep) {
3546 using ER = ExternalReference;
3547 for (
size_t i = 0;
i < (keep.size() & ~0x1);
i += 2) {
3548 __ Push(keep.begin()[
i], keep.begin()[
i + 1]);
3550 if (keep.size() % 2 == 1) {
3551 __ Push(*(keep.end() - 1),
padreg);
3559 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3561 if (keep.size() % 2 == 1) {
3564 for (
size_t i = (keep.size() & ~0x1);
i > 0;
i -= 2) {
3565 __ Pop(keep.begin()[
i - 1], keep.begin()[
i - 2]);
3569void ReloadParentStack(MacroAssembler* masm, Register return_reg,
3570 Register return_value, Register context, Register tmp1,
3571 Register tmp2, Register tmp3) {
3573 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3579 UseScratchRegisterScope temps(masm);
3580 Register scratch = temps.AcquireX();
3588 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3591 SwitchStacks(masm, active_stack,
true,
3592 {return_reg, return_value,
context, parent});
3596void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3598 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3602 int32_t active_suspender_offset =
3604 RootIndex::kActiveSuspender);
3608void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3609 __ Str(xzr,
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
3610 __ Str(xzr,
MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
3614class RegisterAllocator {
3618 Scoped(RegisterAllocator* allocator, Register*
reg):
3626 explicit RegisterAllocator(
const CPURegList&
registers)
3629 void Ask(Register*
reg) {
3636 void Pinned(
const Register& requested, Register*
reg) {
3651 void Reserve(
const Register&
reg) {
3659 void Reserve(
const Register& reg1,
3660 const Register& reg2,
3661 const Register& reg3 =
NoReg,
3662 const Register& reg4 =
NoReg,
3663 const Register& reg5 =
NoReg,
3664 const Register& reg6 =
NoReg) {
3673 bool IsUsed(
const Register&
reg) {
3678 void ResetExcept(
const Register& reg1 =
NoReg,
3679 const Register& reg2 =
NoReg,
3680 const Register& reg3 =
NoReg,
3681 const Register& reg4 =
NoReg,
3682 const Register& reg5 =
NoReg,
3683 const Register& reg6 =
NoReg) {
3685 if (reg1 !=
NoReg) {
3688 if (reg5 !=
NoReg) {
3702 static RegisterAllocator WithAllocatableGeneralRegisters() {
3705 list.set_bits(config->allocatable_general_codes_mask());
3706 return RegisterAllocator(list);
3715#define DEFINE_REG(Name) \
3716 Register Name = no_reg; \
3719#define DEFINE_REG_W(Name) \
3723#define ASSIGN_REG(Name) \
3726#define ASSIGN_REG_W(Name) \
3730#define DEFINE_PINNED(Name, Reg) \
3731 Register Name = no_reg; \
3732 regs.Pinned(Reg, &Name);
3734#define ASSIGN_PINNED(Name, Reg) regs.Pinned(Reg, &Name);
3736#define DEFINE_SCOPED(Name) \
3738 RegisterAllocator::Scoped scope_##Name(®s, &Name);
3740#define FREE_REG(Name) regs.Free(&Name);
3744void GetContextFromImplicitArg(MacroAssembler* masm, Register data,
3747 __ CompareInstanceType(scratch, scratch, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3750 __ B(
eq, &instance);
3757 FieldMemOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3763void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3776 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3779void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3781 WasmFrameConstants::kProtectedInstructionReturnAddressOffset);
3782 __ TailCallBuiltin(Builtin::kWasmTrapHandlerThrowTrap);
3785void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3786 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3788 __ EnterFrame(StackFrame::STACK_SWITCH);
3794 Immediate(StackSwitchFrameConstants::kNumSpillSlots *
3797 ResetStackSwitchFrameStackSlots(masm);
3804 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3806 FillJumpBuffer(masm, stack, &resume, scratch);
3809 regs.ResetExcept(suspender, stack);
3812 __ LoadExternalPointerField(
3823 __ cmp(suspender_stack, stack);
3834 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3837 parent,
FieldMemOperand(suspender, WasmSuspenderObject::kParentOffset));
3838 int32_t active_suspender_offset =
3840 RootIndex::kActiveSuspender);
3842 regs.ResetExcept(suspender, caller, stack);
3847 SwitchStacks(masm, stack,
false, {caller, suspender});
3853 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3854 __ Str(xzr, GCScanSlotPlace);
3859 __ LeaveFrame(StackFrame::STACK_SWITCH);
3868void Generate_WasmResumeHelper(MacroAssembler* masm,
wasm::OnResume on_resume) {
3869 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
3870 __ EnterFrame(StackFrame::STACK_SWITCH);
3875 Immediate(StackSwitchFrameConstants::kNumSpillSlots *
3878 ResetStackSwitchFrameStackSlots(masm);
3880 regs.ResetExcept(closure);
3898 FieldMemOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
3904 regs.ResetExcept(suspender);
3911 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3913 FillJumpBuffer(masm, active_stack, &suspend, scratch);
3921 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
3922 __ StoreTaggedField(
3925 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
3928 int32_t active_suspender_offset =
3930 RootIndex::kActiveSuspender);
3934 __ LoadExternalPointerField(
3940 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
3942 SwitchStacks(masm, active_stack,
false, {target_stack});
3944 regs.ResetExcept(target_stack);
3954 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3955 __ Str(xzr, GCScanSlotPlace);
3958 LoadJumpBuffer(masm, target_stack,
false, scratch,
3962 __ LeaveFrame(StackFrame::STACK_SWITCH);
3965 __ CallRuntime(Runtime::kThrow);
3968 LoadJumpBuffer(masm, target_stack,
true, scratch,
3973 __ LeaveFrame(StackFrame::STACK_SWITCH);
3975 __ DropArguments(2);
3980void Builtins::Generate_WasmResume(MacroAssembler* masm) {
3984void Builtins::Generate_WasmReject(MacroAssembler* masm) {
3988void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
3993void SwitchToAllocatedStack(MacroAssembler* masm, RegisterAllocator& regs,
3994 Register wasm_instance, Register wrapper_buffer,
3995 Register& original_fp, Register& new_wrapper_buffer,
3997 ResetStackSwitchFrameStackSlots(masm);
4000 __ LoadRootRelative(parent_stack, IsolateData::active_stack_offset());
4002 FillJumpBuffer(masm, parent_stack, suspend, scratch);
4003 SwitchStacks(masm, parent_stack,
false, {wasm_instance, wrapper_buffer});
4007 regs.Pinned(x9, &original_fp);
4008 __ Mov(original_fp, fp);
4010 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
4011 LoadTargetJumpBuffer(masm, target_stack, scratch,
4018 __ EnterFrame(StackFrame::STACK_SWITCH);
4021 JSToWasmWrapperFrameConstants::kWrapperBufferSize,
4023 __ Sub(sp, sp, Immediate(stack_space));
4025 __ Mov(new_wrapper_buffer, sp);
4029 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
4030 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
4033 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
4036 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
4041 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
4046 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
4049void SwitchBackAndReturnPromise(MacroAssembler* masm, RegisterAllocator& regs,
4055 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
4063 __ LoadRoot(promise, RootIndex::kActiveSuspender);
4065 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
4068 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4071 ReloadParentStack(masm, promise, return_value,
kContextRegister, tmp, tmp2,
4073 RestoreParentSuspender(masm, tmp);
4078 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
4080 __ CallBuiltin(Builtin::kFulfillPromise);
4085 __ bind(return_promise);
4088void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
4089 RegisterAllocator& regs,
4090 Label* return_promise) {
4092 static const Builtin_RejectPromise_InterfaceDescriptor desc;
4100 thread_in_wasm_flag_addr = x2;
4103 thread_in_wasm_flag_addr,
4110 __ LoadRoot(promise, RootIndex::kActiveSuspender);
4112 promise,
FieldMemOperand(promise, WasmSuspenderObject::kPromiseOffset));
4115 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4121 ReloadParentStack(masm, promise, reason,
kContextRegister, tmp, tmp2, tmp3);
4122 RestoreParentSuspender(masm, tmp);
4126 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
4128 __ LoadRoot(debug_event, RootIndex::kTrueValue);
4129 __ CallBuiltin(Builtin::kRejectPromise);
4133 __ jmp(return_promise);
4135 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
4138void JSToWasmWrapperHelper(MacroAssembler* masm,
wasm::Promise mode) {
4140 auto regs = RegisterAllocator::WithAllocatableGeneralRegisters();
4142 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
4143 : StackFrame::JS_TO_WASM);
4146 Immediate(StackSwitchFrameConstants::kNumSpillSlots *
4151 __ Ldr(implicit_arg,
4152 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4161 SwitchToAllocatedStack(masm, regs, implicit_arg, wrapper_buffer,
4162 original_fp, new_wrapper_buffer, &suspend);
4165 new_wrapper_buffer = wrapper_buffer;
4168 regs.ResetExcept(original_fp, wrapper_buffer, implicit_arg,
4169 new_wrapper_buffer);
4172 __ Str(new_wrapper_buffer,
4173 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
4175 __ Str(implicit_arg,
4176 MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4181 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
4183 MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
4189 MemOperand(wrapper_buffer, JSToWasmWrapperFrameConstants::
4190 kWrapperBufferStackReturnBufferSize));
4194 __ Add(result_size, result_size, 1);
4195 __ Bic(result_size, result_size, 1);
4200 __ Mov(scratch, sp);
4202 JSToWasmWrapperFrameConstants::
4203 kWrapperBufferStackReturnBufferStart));
4215 int stack_params_offset =
4221 __ Ldr(params_start,
4223 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
4229 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
4232 __ Add(last_stack_param, params_start, Immediate(stack_params_offset));
4238 __ Sub(scratch, params_end, last_stack_param);
4239 __ TestAndBranchIfAllClear(scratch, 0x8, &loop_start);
4243 __ Push(xzr, scratch);
4245 __ bind(&loop_start);
4247 Label finish_stack_params;
4248 __ Cmp(last_stack_param, params_end);
4249 __ B(
ge, &finish_stack_params);
4255 __ Ldp(scratch2, scratch1,
4257 __ Push(scratch1, scratch2);
4259 __ jmp(&loop_start);
4261 __ bind(&finish_stack_params);
4264 size_t next_offset = 0;
4281 DCHECK_EQ(next_offset, stack_params_offset);
4286 __ Ldr(thread_in_wasm_flag_addr,
4291 __ Str(scratch.W(),
MemOperand(thread_in_wasm_flag_addr, 0));
4294 MemOperand(fp, StackSwitchFrameConstants::kGCScanSlotCountOffset));
4297 __ LoadWasmCodePointer(
4300 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
4306 __ CallWasmCodePointerNoSignatureCheck(call_target);
4314 __ Ldr(thread_in_wasm_flag_addr,
4320 __ Ldr(wrapper_buffer,
4321 MemOperand(fp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
4326 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1));
4330 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2));
4334 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1));
4338 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2));
4344 __ Ldr(x1,
MemOperand(fp, StackSwitchFrameConstants::kResultArrayOffset));
4345 __ Ldr(x0,
MemOperand(fp, StackSwitchFrameConstants::kImplicitArgOffset));
4348 fp, JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
4350 MemOperand(fp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
4353 GetContextFromImplicitArg(masm, x0, scratch);
4354 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
4356 Label return_promise;
4358 SwitchBackAndReturnPromise(masm, regs, mode, &return_promise);
4362 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
4363 : StackFrame::JS_TO_WASM);
4368 constexpr int64_t stack_arguments_in = 2;
4369 __ DropArguments(stack_arguments_in);
4375 GenerateExceptionHandlingLandingPad(masm, regs, &return_promise);
4380void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
4384void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
4388void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
4393void SwitchSimulatorStackLimit(MacroAssembler* masm) {
4394 if (masm->options().enable_simulator_code) {
4395 UseScratchRegisterScope temps(masm);
4402static constexpr Register kOldSPRegister = x23;
4403static constexpr Register kSwitchFlagRegister = x24;
4405void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input,
4406 Register target_input,
4407 Register argv_input) {
4408 using ER = ExternalReference;
4410 __ Mov(kSwitchFlagRegister, 0);
4411 __ Mov(kOldSPRegister, sp);
4416 ER on_central_stack_flag_loc = ER::Create(
4417 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4418 const Register& on_central_stack_flag = x2;
4419 __ Mov(on_central_stack_flag, on_central_stack_flag_loc);
4420 __ Ldrb(on_central_stack_flag,
MemOperand(on_central_stack_flag));
4422 Label do_not_need_to_switch;
4423 __ Cbnz(on_central_stack_flag, &do_not_need_to_switch);
4426 static constexpr Register central_stack_sp = x4;
4427 DCHECK(!
AreAliased(central_stack_sp, argc_input, argv_input, target_input));
4429 __ Push(argc_input, target_input, argv_input,
padreg);
4432 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4435 __ Pop(
padreg, argv_input, target_input, argc_input);
4441 UseScratchRegisterScope temps{masm};
4442 Register new_sp_after_call = temps.AcquireX();
4447 SwitchSimulatorStackLimit(masm);
4451 __ Sub(sp, central_stack_sp, kReturnAddressSlotOffset +
kPadding);
4452 __ Mov(kSwitchFlagRegister, 1);
4454 __ bind(&do_not_need_to_switch);
4457void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) {
4458 using ER = ExternalReference;
4460 Label no_stack_change;
4461 __ Cbz(kSwitchFlagRegister, &no_stack_change);
4466 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4471 SwitchSimulatorStackLimit(masm);
4473 __ Mov(sp, kOldSPRegister);
4475 __ bind(&no_stack_change);
4483 ArgvMode argv_mode,
bool builtin_exit_frame,
4484 bool switch_to_central_stack) {
4487 using ER = ExternalReference;
4506 static constexpr Register argc_input = x0;
4507 static constexpr Register target_input = x1;
4509 static constexpr Register argv_input = x11;
4514 __ SlotAddress(argv_input, argc_input);
4515 __ Sub(argv_input, argv_input, kReceiverOnStackSize);
4520 static constexpr Register argc = x22;
4523 const int kNoExtraSpace = 0;
4527 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
4530 __ Mov(argc, argc_input);
4533#if V8_ENABLE_WEBASSEMBLY
4534 if (switch_to_central_stack) {
4535 SwitchToTheCentralStackIfNeeded(masm, argc_input, target_input, argv_input);
4564 static_assert(target_input == x1);
4565 static_assert(argv_input == x11);
4566 __ Swap(target_input, argv_input);
4567 static constexpr Register target = x11;
4568 static constexpr Register argv = x1;
4569 static_assert(!
AreAliased(argc_input, argc, target, argv));
4572 static_assert(argc_input == x0);
4573 static_assert(argv == x1);
4574 __ Mov(x2, ER::isolate_address());
4576 __ StoreReturnAddressAndCall(target);
4586 Label exception_returned;
4590 __ B(
eq, &exception_returned);
4592#if V8_ENABLE_WEBASSEMBLY
4593 if (switch_to_central_stack) {
4594 SwitchFromTheCentralStackIfNeeded(masm);
4601 __ LeaveExitFrame(x10, x9);
4602 __ DropArguments(x11);
4604 __ LeaveExitFrame(x10, x9);
4607 __ AssertFPCRState();
4611 __ Bind(&exception_returned);
4619 __ Mov(x2, ER::isolate_address());
4620 __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3,
4625 __ Mov(
cp, ER::Create(IsolateAddressId::kPendingHandlerContextAddress,
4629 UseScratchRegisterScope temps(masm);
4630 Register scratch = temps.AcquireX();
4631 __ Mov(scratch, ER::Create(IsolateAddressId::kPendingHandlerSPAddress,
4634 __ Mov(sp, scratch);
4636 __ Mov(fp, ER::Create(IsolateAddressId::kPendingHandlerFPAddress,
4643 __ Cbz(
cp, ¬_js_frame);
4645 __ Bind(¬_js_frame);
4649 UseScratchRegisterScope temps(masm);
4650 Register scratch = temps.AcquireX();
4652 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate()));
4660 UseScratchRegisterScope temps(masm);
4662 __ Mov(x17, ER::Create(IsolateAddressId::kPendingHandlerEntrypointAddress,
4668#if V8_ENABLE_WEBASSEMBLY
4669void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4670 using ER = ExternalReference;
4671 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4680 FrameScope scope(masm, StackFrame::INTERNAL);
4683 __ CallCFunction(ER::wasm_grow_stack(), 5);
4691 UseScratchRegisterScope temps(masm);
4692 Register new_fp = temps.AcquireX();
4695 __ Sub(new_fp, fp, new_fp);
4699 SwitchSimulatorStackLimit(masm);
4702 UseScratchRegisterScope temps(masm);
4703 Register scratch = temps.AcquireX();
4709 __ bind(&call_runtime);
4714 MemOperand(fp, WasmFrameConstants::kWasmInstanceDataOffset));
4717 WasmTrustedInstanceData::kNativeContextOffset));
4719 __ EnterFrame(StackFrame::INTERNAL);
4721 __ PushArgument(gap);
4722 __ CallRuntime(Runtime::kWasmStackGuard);
4723 __ LeaveFrame(StackFrame::INTERNAL);
4729void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4735 HardAbortScope hard_abort(masm);
4736 UseScratchRegisterScope temps(masm);
4737 Register scratch1 = temps.AcquireX();
4738 Register scratch2 = temps.AcquireX();
4745 __ Peek(double_scratch, kArgumentOffset);
4749 __ TryConvertDoubleToInt64(
result, double_scratch, &done);
4772 __ Check(
ge, AbortReason::kUnexpectedValue);
4782 __ Cneg(mantissa, mantissa,
ne);
4786 __ Sub(exponent, exponent,
4788 __ Lsl(
result, mantissa, exponent);
4821 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4833 api_function_address =
4834 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4840 DCHECK(!
AreAliased(api_function_address, topmost_script_having_context, argc,
4841 func_templ, scratch));
4843 using FCA = FunctionCallbackArguments;
4844 using ER = ExternalReference;
4845 using FC = ApiCallbackExitFrameConstants;
4847 static_assert(FCA::kArgsLength == 6);
4848 static_assert(FCA::kNewTargetIndex == 5);
4849 static_assert(FCA::kTargetIndex == 4);
4850 static_assert(FCA::kReturnValueIndex == 3);
4851 static_assert(FCA::kContextIndex == 2);
4852 static_assert(FCA::kIsolateIndex == 1);
4853 static_assert(FCA::kUnusedIndex == 0);
4866 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4867 topmost_script_having_context);
4874 static constexpr int kStackSize = FCA::kArgsLength;
4875 static_assert(kStackSize % 2 == 0);
4879 __ Mov(scratch, ER::isolate_address());
4886 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4900 __ LoadExternalPointerField(
4901 api_function_address,
4903 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4907 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4908 StackFrame::API_CALLBACK_EXIT);
4923 __ cmp(scratch, fp);
4933 __ Str(argc, argc_operand);
4936 __ Add(scratch, fp, Operand(FC::kImplicitArgsArrayOffset));
4937 __ Str(scratch,
MemOperand(fp, FC::kFCIImplicitArgsOffset));
4940 __ Add(scratch, fp, Operand(FC::kFirstArgumentOffset));
4944 __ RecordComment(
"v8::FunctionCallback's argument.");
4946 __ Add(function_callback_info_arg, fp,
4947 Operand(FC::kFunctionCallbackInfoOffset));
4951 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4955 static constexpr int kSlotsToDropOnReturn =
4958 const bool with_profiling =
4961 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4962 &argc_operand, return_value_operand);
4965void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4976 Register api_function_address = x2;
4988 using PCA = PropertyCallbackArguments;
4989 using ER = ExternalReference;
4990 using FC = ApiAccessorExitFrameConstants;
4992 static_assert(PCA::kPropertyKeyIndex == 0);
4993 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4994 static_assert(PCA::kHolderIndex == 2);
4995 static_assert(PCA::kIsolateIndex == 3);
4996 static_assert(PCA::kHolderV2Index == 4);
4997 static_assert(PCA::kReturnValueIndex == 5);
4998 static_assert(PCA::kDataIndex == 6);
4999 static_assert(PCA::kThisIndex == 7);
5000 static_assert(PCA::kArgsLength == 8);
5013 __ LoadTaggedField(scratch,
5015 __ LoadRoot(undef, RootIndex::kUndefinedValue);
5016 __ Mov(scratch2, ER::isolate_address());
5023 __ LoadTaggedField(name_arg,
5026 Register should_throw_on_error = xzr;
5027 __ Push(should_throw_on_error, name_arg);
5029 __ RecordComment(
"Load api_function_address");
5030 __ LoadExternalPointerField(
5031 api_function_address,
5036 __ EnterExitFrame(scratch, FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
5037 StackFrame::API_ACCESSOR_EXIT);
5039 __ RecordComment(
"Create v8::PropertyCallbackInfo object on the stack.");
5041 __ Add(property_callback_info_arg, fp, Operand(FC::kArgsArrayOffset));
5043 DCHECK(!
AreAliased(api_function_address, property_callback_info_arg, name_arg,
5046#ifdef V8_ENABLE_DIRECT_HANDLE
5051 static_assert(PCA::kPropertyKeyIndex == 0);
5052 __ mov(name_arg, property_callback_info_arg);
5055 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
5061 static constexpr int kSlotsToDropOnReturn =
5062 FC::kPropertyCallbackInfoArgsLength;
5063 MemOperand*
const kUseStackSpaceConstant =
nullptr;
5065 const bool with_profiling =
true;
5067 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
5068 kUseStackSpaceConstant, return_value_operand);
5071void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
5079 __ Poke<MacroAssembler::kSignLR>(lr, 0);
5081 __ Peek<MacroAssembler::kAuthLR>(lr, 0);
5082 __ AssertFPCRState();
5088template <
typename RegisterT>
5089void CopyRegListToFrame(MacroAssembler* masm,
const Register& dst,
5090 int dst_offset,
const CPURegList& reg_list,
5091 const RegisterT& temp0,
const RegisterT& temp1,
5092 int src_offset = 0) {
5095 UseScratchRegisterScope temps(masm);
5096 CPURegList copy_to_input = reg_list;
5097 int reg_size = reg_list.RegisterSizeInBytes();
5098 DCHECK_EQ(temp0.SizeInBytes(), reg_size);
5099 DCHECK_EQ(temp1.SizeInBytes(), reg_size);
5105 masm->Add(src, sp, src_offset);
5106 masm->Add(dst, dst, dst_offset);
5109 for (
int i = 0;
i < reg_list.Count();
i += 2) {
5110 masm->Ldp(temp0, temp1,
MemOperand(src,
i * reg_size));
5112 CPURegister reg0 = copy_to_input.PopLowestIndex();
5113 CPURegister reg1 = copy_to_input.PopLowestIndex();
5114 int offset0 = reg0.code() * reg_size;
5115 int offset1 = reg1.code() * reg_size;
5118 if (offset1 == offset0 + reg_size) {
5119 masm->Stp(temp0, temp1,
MemOperand(dst, offset0));
5125 masm->Sub(dst, dst, dst_offset);
5128void RestoreRegList(MacroAssembler* masm,
const CPURegList& reg_list,
5129 const Register& src_base,
int src_offset) {
5132 UseScratchRegisterScope temps(masm);
5133 CPURegList restore_list = reg_list;
5134 int reg_size = restore_list.RegisterSizeInBytes();
5140 masm->Add(src, src_base, src_offset);
5143 restore_list.Remove(
padreg);
5146 while (!restore_list.IsEmpty()) {
5147 CPURegister reg0 = restore_list.PopLowestIndex();
5148 CPURegister reg1 = restore_list.PopLowestIndex();
5149 int offset0 = reg0.code() * reg_size;
5156 int offset1 = reg1.code() * reg_size;
5159 if (offset1 == offset0 + reg_size) {
5160 masm->Ldp(reg0, reg1,
MemOperand(src, offset0));
5168void Generate_DeoptimizationEntry(MacroAssembler* masm,
5170 Isolate* isolate = masm->isolate();
5177 CPURegList saved_simd128_registers(
5181 DCHECK_EQ(saved_simd128_registers.Count() % 2, 0);
5182 __ PushCPURegList(saved_simd128_registers);
5187 saved_registers.Remove(ip0);
5188 saved_registers.Remove(ip1);
5189 saved_registers.Remove(x18);
5190 saved_registers.Combine(fp);
5191 saved_registers.Align();
5192 DCHECK_EQ(saved_registers.Count() % 2, 0);
5193 __ PushCPURegList(saved_registers);
5196 IsolateAddressId::kCEntryFPAddress, isolate)));
5199 const int kSavedRegistersAreaSize =
5201 (saved_simd128_registers.Count() *
kQRegSize);
5204 const int kSimd128RegistersOffset = saved_registers.Count() *
kXRegSize;
5210 __ Mov(code_object, lr);
5212 __ Add(fp_to_sp, sp, kSavedRegistersAreaSize);
5213 __ Sub(fp_to_sp, fp, fp_to_sp);
5226 __ Mov(x1,
static_cast<int>(deopt_kind));
5234 AllowExternalCallThatCantCauseGC scope(masm);
5235 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
5246 saved_registers, x2, x3);
5250 saved_simd128_registers, q2, q3, kSimd128RegistersOffset);
5255 UseScratchRegisterScope temps(masm);
5256 Register is_iterable = temps.AcquireX();
5257 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
5274 __ SlotAddress(x1, 0);
5276 __ Mov(x5, unwind_limit);
5277 __ CopyDoubleWords(x3, x1, x5);
5281 __ Bic(unwind_limit, unwind_limit, 1);
5282 __ Drop(unwind_limit);
5288 AllowExternalCallThatCantCauseGC scope(masm);
5289 __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
5294 UseScratchRegisterScope temps(masm);
5295 Register scratch = temps.AcquireX();
5297 __ Mov(sp, scratch);
5301 Label outer_push_loop, outer_loop_header;
5305 __ B(&outer_loop_header);
5307 __ Bind(&outer_push_loop);
5316 __ SlotAddress(x6, 0);
5317 __ CopyDoubleWords(x6, x7, frame_size);
5319 __ Bind(&outer_loop_header);
5321 __ B(
lt, &outer_push_loop);
5323 RestoreRegList(masm, saved_simd128_registers, current_frame,
5327 UseScratchRegisterScope temps(masm);
5328 Register is_iterable = temps.AcquireX();
5330 __ LoadIsolateField(is_iterable, IsolateFieldId::kStackIsIterable);
5331 __ Mov(
one, Operand(1));
5344 DCHECK(!saved_registers.IncludesAliasOf(lr));
5346 __ Mov(last_output_frame, current_frame);
5348 RestoreRegList(masm, saved_registers, last_output_frame,
5351 UseScratchRegisterScope temps(masm);
5357#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
5372void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5376void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5383void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
5384 MacroAssembler* masm) {
5398 ResetSharedFunctionInfoAge(masm, code_obj);
5400 __ LoadTrustedPointerField(
5402 FieldMemOperand(code_obj, SharedFunctionInfo::kTrustedFunctionDataOffset),
5407 __ IsObjectType(code_obj, x3, x3, CODE_TYPE);
5408 __ Assert(
eq, AbortReason::kExpectedBaselineData);
5415 __ LoadTaggedField(feedback_cell,
5421 Label install_baseline_code;
5424 __ IsObjectType(feedback_vector, x3, x3, FEEDBACK_VECTOR_TYPE);
5425 __ B(
ne, &install_baseline_code);
5433 __ Str(feedback_cell,
5439 __ Str(feedback_vector,
5441 feedback_vector =
no_reg;
5445 __ Mov(get_baseline_pc,
5446 ExternalReference::baseline_pc_for_next_executed_bytecode());
5460 FrameScope scope(masm, StackFrame::INTERNAL);
5461 __ CallCFunction(get_baseline_pc, 3, 0);
5467 Generate_OSREntry(masm, code_obj);
5470 __ bind(&install_baseline_code);
5472 FrameScope scope(masm, StackFrame::INTERNAL);
5474 __ PushArgument(closure);
5475 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5482void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5491 __ LeaveFrame(StackFrame::INTERPRETED);
5495#ifdef V8_ENABLE_LEAPTIERING
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
RegisterAllocator * allocator_
std::vector< Register * > allocated_registers_
#define ASSIGN_PINNED(Name, Reg)
#define DEFINE_PINNED(Name, Reg)
#define DEFINE_SCOPED(Name)
interpreter::Bytecode bytecode
#define RETURN_BYTECODE_LIST(V)
static constexpr Register HolderRegister()
static constexpr Register CallbackRegister()
static constexpr int kFeedbackCellFromFp
static int PaddingSlotCount(int register_count)
static constexpr int kBuiltinIndexOffset
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr BytecodeOffset None()
constexpr int ToInt() const
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kCallerSPOffset
static constexpr int kCallerFPOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static constexpr int kConstructorOffset
static constexpr int kLengthOffset
static constexpr int kContextOffset
static const int kOsrPcOffsetIndex
static int caller_frame_top_offset()
static int output_offset()
static int input_offset()
static int output_count_offset()
static constexpr int kDirectCallerSPOffset
static constexpr int kOffsetToCalleeSavedRegisters
static constexpr int kCalleeSavedRegisterBytesPushedAfterFpLrPair
static constexpr int kDirectCallerFPOffset
static constexpr int kFixedFrameSize
static constexpr int kSPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kImplicitReceiverOffset
static constexpr int kContextOffset
static constexpr int simd128_registers_offset()
static int frame_size_offset()
static int continuation_offset()
static int frame_content_offset()
static int registers_offset()
static const int kMantissaBits
static const int kExponentBits
static const int kExponentBias
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kBytecodeOffsetFromFp
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
@ kDstLessThanSrcAndReverse
constexpr void clear(RegisterT reg)
static constexpr RegListBase FromBits()
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Register no_reg()
static constexpr Register MicrotaskQueueRegister()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
@ OUTERMOST_JSENTRY_FRAME
static const int kNextOffset
static const int kSlotCount
static constexpr int kContextOffset
static constexpr int kArgCOffset
static constexpr int kFunctionOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr int kFeedbackVectorFromFp
static constexpr int kBytecodeArrayFromFp
static constexpr RegList kPushedGpRegs
static constexpr DoubleRegList kPushedFpRegs
static constexpr Register GapRegister()
static constexpr Register WrapperBufferRegister()
static constexpr int kNumberOfSavedGpParamRegs
static constexpr int kNumberOfSavedFpParamRegs
static constexpr Register ObjectRegister()
static const int kBytecodeCount
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_LEAPTIERING_BOOL
#define V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE_BOOL
DirectHandle< Object > new_target
ZoneVector< RpoNumber > & result
MovableLabel continuation
#define ASM_LOCATION(message)
RegListBase< RegisterT > registers
void Add(RWDigits Z, Digits X, Digits Y)
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
int invoke(const char *params)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Sub(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
constexpr int kStackStateOffset
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
constexpr int kStackFpOffset
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
constexpr int kStackPcOffset
constexpr Register no_reg
constexpr Register kRootRegister
constexpr AddrMode PreIndex
constexpr int kFunctionEntryBytecodeOffset
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
DwVfpRegister DoubleRegister
@ kUnknownIndirectPointerTag
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
constexpr uint16_t kDontAdaptArgumentsSentinel
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
constexpr int kJSArgcReceiverSlots
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr int kTaggedSizeLog2
const Instr kImmExceptionIsSwitchStackLimit
constexpr Register kReturnRegister0
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr Register kInterpreterDispatchTableRegister
@ kFunctionTemplateInfoCallbackTag
constexpr Register kWasmTrapHandlerFaultAddressRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
constexpr int JSParameterCount(int param_count_without_receiver)
constexpr Register kJavaScriptCallCodeStartRegister
constexpr AddrMode PostIndex
constexpr CPURegister NoCPUReg
constexpr Register kPtrComprCageBaseRegister
const intptr_t kSmiTagMask
Register ReassignRegister(Register &source)
constexpr Register kWasmCompileLazyFuncIndexRegister
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
constexpr int kXRegSizeInBits
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
@ kDefaultDerivedConstructor
constexpr int kQRegSizeInBits
constexpr Register kCArgRegs[]
constexpr int64_t kXSignMask
constexpr int kDoubleSize
constexpr Register kJavaScriptCallDispatchHandleRegister
constexpr int kXRegSizeLog2
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr Register kInterpreterBytecodeArrayRegister
constexpr Register padreg
constexpr bool PointerCompressionIsEnabled()
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define OFFSET_OF_DATA_START(Type)