v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-x64.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_X64
6
9#include "src/base/iterator.h"
14// For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
18#include "src/common/globals.h"
22#include "src/heap/heap-inl.h"
24#include "src/objects/cell.h"
25#include "src/objects/code.h"
27#include "src/objects/foreign.h"
31#include "src/objects/smi.h"
32
33#if V8_ENABLE_WEBASSEMBLY
36#include "src/wasm/stacks.h"
40#endif // V8_ENABLE_WEBASSEMBLY
41
42namespace v8 {
43namespace internal {
44
45#define __ ACCESS_MASM(masm)
46
47void Builtins::Generate_Adaptor(MacroAssembler* masm,
48 int formal_parameter_count, Address address) {
49 __ CodeEntry();
50
53 __ TailCallBuiltin(
54 Builtins::AdaptorWithBuiltinExitFrame(formal_parameter_count));
55}
56
57namespace {
58
59constexpr int kReceiverOnStackSize = kSystemPointerSize;
60
61enum class ArgumentsElementType {
62 kRaw, // Push arguments as they are.
63 kHandle // Dereference arguments before pushing.
64};
65
66void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc,
67 Register scratch,
68 ArgumentsElementType element_type) {
69 DCHECK(!AreAliased(array, argc, scratch, kScratchRegister));
70 Register counter = scratch;
71 Label loop, entry;
72 __ leaq(counter, Operand(argc, -kJSArgcReceiverSlots));
73 __ jmp(&entry);
74 __ bind(&loop);
75 Operand value(array, counter, times_system_pointer_size, 0);
76 if (element_type == ArgumentsElementType::kHandle) {
77 __ movq(kScratchRegister, value);
78 value = Operand(kScratchRegister, 0);
79 }
80 __ Push(value);
81 __ bind(&entry);
82 __ decq(counter);
83 __ j(greater_equal, &loop, Label::kNear);
84}
85
86void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
87 // ----------- S t a t e -------------
88 // -- rax: number of arguments
89 // -- rdi: constructor function
90 // -- rdx: new target
91 // -- rsi: context
92 // -----------------------------------
93
94 Label stack_overflow;
95 __ StackOverflowCheck(rax, &stack_overflow, Label::kFar);
96
97 // Enter a construct frame.
98 {
99 FrameScope scope(masm, StackFrame::CONSTRUCT);
100
101 // Preserve the incoming parameters on the stack.
102 __ Push(rsi);
103 __ Push(rax);
104
105 // TODO(victorgomes): When the arguments adaptor is completely removed, we
106 // should get the formal parameter count and copy the arguments in its
107 // correct position (including any undefined), instead of delaying this to
108 // InvokeFunction.
109
110 // Set up pointer to first argument (skip receiver).
113 // Copy arguments to the expression stack.
114 // rbx: Pointer to start of arguments.
115 // rax: Number of arguments.
116 Generate_PushArguments(masm, rbx, rax, rcx, ArgumentsElementType::kRaw);
117 // The receiver for the builtin/api call.
118 __ PushRoot(RootIndex::kTheHoleValue);
119
120 // Call the function.
121 // rax: number of arguments (untagged)
122 // rdi: constructor function
123 // rdx: new target
124 __ InvokeFunction(rdi, rdx, rax, InvokeType::kCall);
125
126 // Restore arguments count from the frame.
127 __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
128
129 // Leave construct frame.
130 }
131
132 // Remove caller arguments from the stack and return.
133 __ DropArguments(rbx, rcx);
134
135 __ ret(0);
136
137 __ bind(&stack_overflow);
138 {
139 FrameScope scope(masm, StackFrame::INTERNAL);
140 __ CallRuntime(Runtime::kThrowStackOverflow);
141 __ int3(); // This should be unreachable.
142 }
143}
144
145} // namespace
146
147// This code needs to be present in all continuations pushed onto the
148// stack during the deoptimization process. It is part of a scheme to ensure
149// that the return address immediately after the call to
150// Builtin::kAdaptShadowStackForDeopt is present on the hardware shadow stack.
151// Below, you'll see that this call is unconditionally jumped over. However,
152// during deoptimization, the address of the call is jumped to directly
153// and executed. The end result being that later, returning to that address
154// after the call will be successful because the user stack and the
155// shadow stack will be found to match perfectly.
156void Generate_CallToAdaptShadowStackForDeopt(MacroAssembler* masm,
157 bool add_jump) {
158#ifdef V8_ENABLE_CET_SHADOW_STACK
159 ASM_CODE_COMMENT(masm);
160 Label post_adapt_shadow_stack;
161 if (add_jump) __ jmp(&post_adapt_shadow_stack, Label::kNear);
162 const auto saved_pc_offset = masm->pc_offset();
164 Builtin::kAdaptShadowStackForDeopt)));
166 masm->pc_offset() - saved_pc_offset);
167 if (add_jump) __ bind(&post_adapt_shadow_stack);
168#endif // V8_ENABLE_CET_SHADOW_STACK
169}
170
171// The construct stub for ES5 constructor functions and ES6 class constructors.
172void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
173 // ----------- S t a t e -------------
174 // -- rax: number of arguments (untagged)
175 // -- rdi: constructor function
176 // -- rdx: new target
177 // -- rsi: context
178 // -- sp[...]: constructor arguments
179 // -----------------------------------
180
181 FrameScope scope(masm, StackFrame::MANUAL);
182 // Enter a construct frame.
183 __ EnterFrame(StackFrame::CONSTRUCT);
184 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
185
186 // Preserve the incoming parameters on the stack.
187 __ Push(rsi);
188 __ Push(rax);
189 __ Push(rdi);
190 __ PushRoot(RootIndex::kTheHoleValue);
191 __ Push(rdx);
192
193 // ----------- S t a t e -------------
194 // -- sp[0*kSystemPointerSize]: new target
195 // -- sp[1*kSystemPointerSize]: padding
196 // -- rdi and sp[2*kSystemPointerSize]: constructor function
197 // -- sp[3*kSystemPointerSize]: argument count
198 // -- sp[4*kSystemPointerSize]: context
199 // -----------------------------------
200
201 const TaggedRegister shared_function_info(rbx);
202 __ LoadTaggedField(shared_function_info,
203 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
204 __ movl(rbx,
205 FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset));
206 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(rbx);
207 __ JumpIfIsInRange(
208 rbx, static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
209 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
210 &not_create_implicit_receiver, Label::kNear);
211
212 // If not derived class constructor: Allocate the new receiver object.
213 __ CallBuiltin(Builtin::kFastNewObject);
214 __ jmp(&post_instantiation_deopt_entry, Label::kNear);
215
216 // Else: use TheHoleValue as receiver for constructor call
217 __ bind(&not_create_implicit_receiver);
218 __ LoadRoot(rax, RootIndex::kTheHoleValue);
219
220 // ----------- S t a t e -------------
221 // -- rax implicit receiver
222 // -- Slot 4 / sp[0*kSystemPointerSize] new target
223 // -- Slot 3 / sp[1*kSystemPointerSize] padding
224 // -- Slot 2 / sp[2*kSystemPointerSize] constructor function
225 // -- Slot 1 / sp[3*kSystemPointerSize] number of arguments
226 // -- Slot 0 / sp[4*kSystemPointerSize] context
227 // -----------------------------------
228 __ bind(&post_instantiation_deopt_entry);
229
230 // Restore new target.
231 __ Pop(rdx);
232
233 // Push the allocated receiver to the stack.
234 __ Push(rax);
235
236 // We need two copies because we may have to return the original one
237 // and the calling conventions dictate that the called function pops the
238 // receiver. The second copy is pushed after the arguments, we saved in r8
239 // since rax needs to store the number of arguments before
240 // InvokingFunction.
241 __ movq(r8, rax);
242
243 // Set up pointer to first argument (skip receiver).
246
247 // Restore constructor function and argument count.
248 __ movq(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
249 __ movq(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
250
251 // Check if we have enough stack space to push all arguments.
252 // Argument count in rax.
253 Label stack_overflow;
254 __ StackOverflowCheck(rax, &stack_overflow);
255
256 // TODO(victorgomes): When the arguments adaptor is completely removed, we
257 // should get the formal parameter count and copy the arguments in its
258 // correct position (including any undefined), instead of delaying this to
259 // InvokeFunction.
260
261 // Copy arguments to the expression stack.
262 // rbx: Pointer to start of arguments.
263 // rax: Number of arguments.
264 Generate_PushArguments(masm, rbx, rax, rcx, ArgumentsElementType::kRaw);
265
266 // Push implicit receiver.
267 __ Push(r8);
268
269 // Call the function.
270 __ InvokeFunction(rdi, rdx, rax, InvokeType::kCall);
271
272 // If the result is an object (in the ECMA sense), we should get rid
273 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
274 // on page 74.
275 Label use_receiver, do_throw, leave_and_return, check_result;
276
277 // If the result is undefined, we'll use the implicit receiver. Otherwise we
278 // do a smi check and fall through to check if the return value is a valid
279 // receiver.
280 __ JumpIfNotRoot(rax, RootIndex::kUndefinedValue, &check_result,
282
283 // Throw away the result of the constructor invocation and use the
284 // on-stack receiver as the result.
285 __ bind(&use_receiver);
286 __ movq(rax, Operand(rsp, 0 * kSystemPointerSize));
287 __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
288
289 __ bind(&leave_and_return);
290 // Restore the arguments count.
291 __ movq(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
292 __ LeaveFrame(StackFrame::CONSTRUCT);
293 // Remove caller arguments from the stack and return.
294 __ DropArguments(rbx, rcx);
295 __ ret(0);
296
297 // If the result is a smi, it is *not* an object in the ECMA sense.
298 __ bind(&check_result);
299 __ JumpIfSmi(rax, &use_receiver, Label::kNear);
300
301 // Check if the type of the result is not an object in the ECMA sense.
302 __ JumpIfJSAnyIsNotPrimitive(rax, rcx, &leave_and_return, Label::kNear);
303 __ jmp(&use_receiver);
304
305 __ bind(&do_throw);
306 // Restore context from the frame.
307 __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
308 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
309 // We don't return here.
310 __ int3();
311
312 __ bind(&stack_overflow);
313 // Restore the context from the frame.
314 __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
315 __ CallRuntime(Runtime::kThrowStackOverflow);
316 // This should be unreachable.
317 __ int3();
318
319 // Since the address below is returned into instead of being called directly,
320 // special code to get that address on the shadow stack is necessary to avoid
321 // a security exception.
322 Generate_CallToAdaptShadowStackForDeopt(masm, false);
323 // Deoptimizer enters here.
324 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
325 masm->pc_offset());
326 __ jmp(&post_instantiation_deopt_entry, Label::kNear);
327}
328
329void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
330 Generate_JSBuiltinsConstructStubHelper(masm);
331}
332
333void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
334 FrameScope scope(masm, StackFrame::INTERNAL);
335 __ Push(rdi);
336 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
337}
338
339namespace {
340
341// Called with the native C calling convention. The corresponding function
342// signature is either:
343// using JSEntryFunction = GeneratedCode<Address(
344// Address root_register_value, Address new_target, Address target,
345// Address receiver, intptr_t argc, Address** argv)>;
346// or
347// using JSEntryFunction = GeneratedCode<Address(
348// Address root_register_value, MicrotaskQueue* microtask_queue)>;
349void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
350 Builtin entry_trampoline) {
351 Label invoke, handler_entry, exit;
352 Label not_outermost_js, not_outermost_js_2;
353
354 {
355 NoRootArrayScope uninitialized_root_register(masm);
356
357 // Set up the frame.
358 //
359 // Note: at this point we are entering V8-generated code from C++ and thus
360 // rbp can be an arbitrary value (-fomit-frame-pointer). Since V8 still
361 // needs to know where the next interesting frame is for the purpose of
362 // stack walks, we instead push the stored EXIT frame fp
363 // (IsolateAddressId::kCEntryFPAddress) below to a dedicated slot.
364 __ pushq(rbp);
365 __ movq(rbp, rsp);
366
367 // Push the stack frame type.
368 __ Push(Immediate(StackFrame::TypeToMarker(type)));
369 // Reserve a slot for the context. It is filled after the root register has
370 // been set up.
371 __ AllocateStackSpace(kSystemPointerSize);
372 // Save callee-saved registers (X64/X32/Win64 calling conventions).
373 __ pushq(r12);
374 __ pushq(r13);
375 __ pushq(r14);
376 __ pushq(r15);
377#ifdef V8_TARGET_OS_WIN
378 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
379 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
380#endif
381 __ pushq(rbx);
382
383#ifdef V8_TARGET_OS_WIN
384 // On Win64 XMM6-XMM15 are callee-save.
385 __ AllocateStackSpace(EntryFrameConstants::kXMMRegistersBlockSize);
386 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
387 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
388 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
389 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
390 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
391 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
392 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
393 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
394 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
395 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
396 static_assert(EntryFrameConstants::kCalleeSaveXMMRegisters == 10);
397 static_assert(EntryFrameConstants::kXMMRegistersBlockSize ==
399 EntryFrameConstants::kCalleeSaveXMMRegisters);
400#endif
401
402 // Initialize the root register.
403 // C calling convention. The first argument is passed in kCArgRegs[0].
404 __ movq(kRootRegister, kCArgRegs[0]);
405
406#ifdef V8_COMPRESS_POINTERS
407 // Initialize the pointer cage base register.
408 __ LoadRootRelative(kPtrComprCageBaseRegister,
409 IsolateData::cage_base_offset());
410#endif
411 }
412
413 // Save copies of the top frame descriptor on the stack.
414 ExternalReference c_entry_fp = ExternalReference::Create(
415 IsolateAddressId::kCEntryFPAddress, masm->isolate());
416
417 {
418 // Keep this static_assert to preserve a link between the offset constant
419 // and the code location it refers to.
420#ifdef V8_TARGET_OS_WIN
423 EntryFrameConstants::kXMMRegistersBlockSize);
424#else
427#endif // V8_TARGET_OS_WIN
428 Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
429 __ Push(c_entry_fp_operand);
430
431 // Clear c_entry_fp, now we've pushed its previous value to the stack.
432 // If the c_entry_fp is not already zero and we don't clear it, the
433 // StackFrameIteratorForProfiler will assume we are executing C++ and miss
434 // the JS frames on top.
435 // Do the same for the fast C call fp and pc.
436 __ Move(c_entry_fp_operand, 0);
437
438 Operand fast_c_call_fp_operand =
439 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP);
440 Operand fast_c_call_pc_operand =
441 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC);
442 __ Push(fast_c_call_fp_operand);
443 __ Move(fast_c_call_fp_operand, 0);
444
445 __ Push(fast_c_call_pc_operand);
446 __ Move(fast_c_call_pc_operand, 0);
447 }
448
449 // Store the context address in the previously-reserved slot.
450 ExternalReference context_address = ExternalReference::Create(
451 IsolateAddressId::kContextAddress, masm->isolate());
452 __ Load(kScratchRegister, context_address);
453 static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
454 __ movq(Operand(rbp, kOffsetToContextSlot), kScratchRegister);
455
456 // If this is the outermost JS call, set js_entry_sp value.
457 ExternalReference js_entry_sp = ExternalReference::Create(
458 IsolateAddressId::kJSEntrySPAddress, masm->isolate());
459 __ Load(rax, js_entry_sp);
460 __ testq(rax, rax);
461 __ j(not_zero, &not_outermost_js);
463 __ movq(rax, rbp);
464 __ Store(js_entry_sp, rax);
465 Label cont;
466 __ jmp(&cont);
467 __ bind(&not_outermost_js);
468 __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
469 __ bind(&cont);
470
471 // Jump to a faked try block that does the invoke, with a faked catch
472 // block that sets the exception.
473 __ jmp(&invoke);
474 __ BindExceptionHandler(&handler_entry);
475
476 // Store the current pc as the handler offset. It's used later to create the
477 // handler table.
478 masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
479
480 // Caught exception: Store result (exception) in the exception
481 // field in the JSEnv and return a failure sentinel.
482 ExternalReference exception = ExternalReference::Create(
483 IsolateAddressId::kExceptionAddress, masm->isolate());
484 __ Store(exception, rax);
485 __ LoadRoot(rax, RootIndex::kException);
486 __ jmp(&exit);
487
488 // Invoke: Link this frame into the handler chain.
489 __ bind(&invoke);
490 __ PushStackHandler();
491
492 // Invoke the function by calling through JS entry trampoline builtin and
493 // pop the faked function when we return.
494 __ CallBuiltin(entry_trampoline);
495
496 // Unlink this frame from the handler chain.
497 __ PopStackHandler();
498
499 __ bind(&exit);
500 // Check if the current stack frame is marked as the outermost JS frame.
501 __ Pop(rbx);
502 __ cmpq(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
503 __ j(not_equal, &not_outermost_js_2);
504 __ Move(kScratchRegister, js_entry_sp);
505 __ movq(Operand(kScratchRegister, 0), Immediate(0));
506 __ bind(&not_outermost_js_2);
507
508 // Restore the top frame descriptor from the stack.
509 {
510 Operand fast_c_call_pc_operand =
511 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC);
512 __ Pop(fast_c_call_pc_operand);
513
514 Operand fast_c_call_fp_operand =
515 masm->ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP);
516 __ Pop(fast_c_call_fp_operand);
517
518 Operand c_entry_fp_operand = masm->ExternalReferenceAsOperand(c_entry_fp);
519 __ Pop(c_entry_fp_operand);
520 }
521
522 // Restore callee-saved registers (X64 conventions).
523#ifdef V8_TARGET_OS_WIN
524 // On Win64 XMM6-XMM15 are callee-save
525 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
526 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
527 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
528 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
529 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
530 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
531 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
532 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
533 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
534 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
535 __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
536#endif
537
538 __ popq(rbx);
539#ifdef V8_TARGET_OS_WIN
540 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
541 __ popq(rsi);
542 __ popq(rdi);
543#endif
544 __ popq(r15);
545 __ popq(r14);
546 __ popq(r13);
547 __ popq(r12);
548 __ addq(rsp, Immediate(2 * kSystemPointerSize)); // remove markers
549
550 // Restore frame pointer and return.
551 __ popq(rbp);
552 __ ret(0);
553}
554
555} // namespace
556
557void Builtins::Generate_JSEntry(MacroAssembler* masm) {
558 Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
559}
560
561void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
562 Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
563 Builtin::kJSConstructEntryTrampoline);
564}
565
566void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
567 Generate_JSEntryVariant(masm, StackFrame::ENTRY,
568 Builtin::kRunMicrotasksTrampoline);
569}
570
571static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
572 bool is_construct) {
573 // Expects six C++ function parameters.
574 // - Address root_register_value
575 // - Address new_target (tagged Object pointer)
576 // - Address function (tagged JSFunction pointer)
577 // - Address receiver (tagged Object pointer)
578 // - intptr_t argc
579 // - Address** argv (pointer to array of tagged Object pointers)
580 // (see Handle::Invoke in execution.cc).
581
582 // Open a C++ scope for the FrameScope.
583 {
584 // Platform specific argument handling. After this, the stack contains
585 // an internal frame and the pushed function and receiver, and
586 // register rax and rbx holds the argument count and argument array,
587 // while rdi holds the function pointer, rsi the context, and rdx the
588 // new.target.
589
590 // MSVC parameters in:
591 // rcx : root_register_value
592 // rdx : new_target
593 // r8 : function
594 // r9 : receiver
595 // [rsp+0x20] : argc
596 // [rsp+0x28] : argv
597 //
598 // GCC parameters in:
599 // rdi : root_register_value
600 // rsi : new_target
601 // rdx : function
602 // rcx : receiver
603 // r8 : argc
604 // r9 : argv
605
606 __ movq(rdi, kCArgRegs[2]);
607 __ Move(rdx, kCArgRegs[1]);
608 // rdi : function
609 // rdx : new_target
610
611 // Clear the context before we push it when entering the internal frame.
612 __ Move(rsi, 0);
613
614 // Enter an internal frame.
615 FrameScope scope(masm, StackFrame::INTERNAL);
616
617 // Setup the context (we need to use the caller context from the isolate).
618 ExternalReference context_address = ExternalReference::Create(
619 IsolateAddressId::kContextAddress, masm->isolate());
620 __ movq(rsi, masm->ExternalReferenceAsOperand(context_address));
621
622 // Push the function onto the stack.
623 __ Push(rdi);
624
625#ifdef V8_TARGET_OS_WIN
626 // Load the previous frame pointer to access C arguments on stack
627 __ movq(kScratchRegister, Operand(rbp, 0));
628 // Load the number of arguments and setup pointer to the arguments.
631#else // V8_TARGET_OS_WIN
632 // Load the number of arguments and setup pointer to the arguments.
633 __ movq(rax, r8);
634 __ movq(rbx, r9);
635 __ movq(r9, kCArgRegs[3]); // Temporarily saving the receiver.
636#endif // V8_TARGET_OS_WIN
637
638 // Current stack contents:
639 // [rsp + kSystemPointerSize] : Internal frame
640 // [rsp] : function
641 // Current register contents:
642 // rax : argc
643 // rbx : argv
644 // rsi : context
645 // rdi : function
646 // rdx : new.target
647 // r9 : receiver
648
649 // Check if we have enough stack space to push all arguments.
650 // Argument count in rax.
651 Label enough_stack_space, stack_overflow;
652 __ StackOverflowCheck(rax, &stack_overflow, Label::kNear);
653 __ jmp(&enough_stack_space, Label::kNear);
654
655 __ bind(&stack_overflow);
656 __ CallRuntime(Runtime::kThrowStackOverflow);
657 // This should be unreachable.
658 __ int3();
659
660 __ bind(&enough_stack_space);
661
662 // Copy arguments to the stack.
663 // Register rbx points to array of pointers to handle locations.
664 // Push the values of these handles.
665 // rbx: Pointer to start of arguments.
666 // rax: Number of arguments.
667 Generate_PushArguments(masm, rbx, rax, rcx, ArgumentsElementType::kHandle);
668
669 // Push the receiver.
670 __ Push(r9);
671
672 // Invoke the builtin code.
673 Builtin builtin = is_construct ? Builtin::kConstruct : Builtins::Call();
674 __ CallBuiltin(builtin);
675
676 // Exit the internal frame. Notice that this also removes the empty
677 // context and the function left on the stack by the code
678 // invocation.
679 }
680
681 __ ret(0);
682}
683
684void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
686}
687
688void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
690}
691
692void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
693 // kCArgRegs[1]: microtask_queue
695 __ TailCallBuiltin(Builtin::kRunMicrotasks);
696}
697
698static void AssertCodeIsBaselineAllowClobber(MacroAssembler* masm,
699 Register code, Register scratch) {
700 // Verify that the code kind is baseline code via the CodeKind.
701 __ movl(scratch, FieldOperand(code, Code::kFlagsOffset));
702 __ DecodeField<Code::KindField>(scratch);
703 __ cmpl(scratch, Immediate(static_cast<int>(CodeKind::BASELINE)));
704 __ Assert(equal, AbortReason::kExpectedBaselineData);
705}
706
707static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
708 Register scratch) {
709 DCHECK(!AreAliased(code, scratch));
710 return AssertCodeIsBaselineAllowClobber(masm, code, scratch);
711}
712
713static void CheckSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
714 Register data,
715 Register scratch,
716 Label* is_baseline,
717 Label* is_bytecode) {
718#if V8_STATIC_ROOTS_BOOL
719 __ IsObjectTypeFast(data, CODE_TYPE, scratch);
720#else
721 __ CmpObjectType(data, CODE_TYPE, scratch);
722#endif // V8_STATIC_ROOTS_BOOL
723 if (v8_flags.debug_code) {
724 Label not_baseline;
725 __ j(not_equal, &not_baseline);
726 AssertCodeIsBaseline(masm, data, scratch);
727 __ j(equal, is_baseline);
728 __ bind(&not_baseline);
729 } else {
730 __ j(equal, is_baseline);
731 }
732
733#if V8_STATIC_ROOTS_BOOL
734 // Scratch1 already contains the compressed map.
735 __ CompareInstanceTypeWithUniqueCompressedMap(scratch, INTERPRETER_DATA_TYPE);
736#else
737 // Scratch1 already contains the instance type.
738 __ CmpInstanceType(scratch, INTERPRETER_DATA_TYPE);
739#endif // V8_STATIC_ROOTS_BOOL
740 __ j(not_equal, is_bytecode, Label::kNear);
741}
742
744 MacroAssembler* masm, Register sfi, Register bytecode, Register scratch1,
745 Label* is_baseline, Label* is_unavailable) {
746 ASM_CODE_COMMENT(masm);
747 Label done;
748
749 Register data = bytecode;
750 __ LoadTrustedPointerField(
751 data, FieldOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset),
753
754 if (V8_JITLESS_BOOL) {
755 __ IsObjectType(data, INTERPRETER_DATA_TYPE, scratch1);
756 __ j(not_equal, &done, Label::kNear);
757 } else {
758 CheckSharedFunctionInfoBytecodeOrBaseline(masm, data, scratch1, is_baseline,
759 &done);
760 }
761
762 __ LoadProtectedPointerField(
763 bytecode, FieldOperand(data, InterpreterData::kBytecodeArrayOffset));
764
765 __ bind(&done);
766 __ IsObjectType(bytecode, BYTECODE_ARRAY_TYPE, scratch1);
767 __ j(not_equal, is_unavailable);
768}
769
770// static
771void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
772 // ----------- S t a t e -------------
773 // -- rax : the value to pass to the generator
774 // -- rdx : the JSGeneratorObject to resume
775 // -- rsp[0] : return address
776 // -----------------------------------
777
778 // Store input value into generator object.
779 __ StoreTaggedField(
780 FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
782 __ Move(object, rdx);
783 __ RecordWriteField(object, JSGeneratorObject::kInputOrDebugPosOffset, rax,
786 // Check that rdx is still valid, RecordWrite might have clobbered it.
787 __ AssertGeneratorObject(rdx);
788
789 // Load suspended function and context.
790 __ LoadTaggedField(rdi,
791 FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
792 __ LoadTaggedField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
793
794 // Flood function if we are stepping.
795 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
796 Label stepping_prepared;
797 ExternalReference debug_hook =
798 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
799 Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
800 __ cmpb(debug_hook_operand, Immediate(0));
801 __ j(not_equal, &prepare_step_in_if_stepping);
802
803 // Flood function if we need to continue stepping in the suspended generator.
804 ExternalReference debug_suspended_generator =
805 ExternalReference::debug_suspended_generator_address(masm->isolate());
806 Operand debug_suspended_generator_operand =
807 masm->ExternalReferenceAsOperand(debug_suspended_generator);
808 __ cmpq(rdx, debug_suspended_generator_operand);
809 __ j(equal, &prepare_step_in_suspended_generator);
810 __ bind(&stepping_prepared);
811
812 // Check the stack for overflow. We are not trying to catch interruptions
813 // (i.e. debug break and preemption) here, so check the "real stack limit".
814 Label stack_overflow;
815 __ cmpq(rsp, __ StackLimitAsOperand(StackLimitKind::kRealStackLimit));
816 __ j(below, &stack_overflow);
817
818 // ----------- S t a t e -------------
819 // -- rdx : the JSGeneratorObject to resume
820 // -- rdi : generator function
821 // -- rsi : generator context
822 // -----------------------------------
823
824 Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r8 : no_reg;
826 Register index = r9;
827 Register return_address = r11;
828 Register params_array = rbx;
829
830 __ PopReturnAddressTo(return_address);
831
832 // Compute actual arguments count value as a formal parameter count without
833 // receiver, loaded from the dispatch table entry or shared function info.
834#if V8_ENABLE_LEAPTIERING
835 static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
836 static_assert(kJavaScriptCallDispatchHandleRegister == r15, "ABI mismatch");
837 __ movl(r15, FieldOperand(rdi, JSFunction::kDispatchHandleOffset));
838 __ LoadEntrypointAndParameterCountFromJSDispatchTable(rcx, argc, r15);
839#else
840 __ LoadTaggedField(argc,
841 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
842 __ movzxwq(argc, FieldOperand(
843 argc, SharedFunctionInfo::kFormalParameterCountOffset));
844#endif // V8_ENABLE_LEAPTIERING
845
846 // Сopy the function arguments from the generator object's register file.
847 {
848 Label push_arguments, done_loop, loop;
849
850#if V8_ENABLE_LEAPTIERING
851 // In case the formal parameter count is kDontAdaptArgumentsSentinel the
852 // actual arguments count should be set accordingly.
854 __ cmpl(argc, Immediate(JSParameterCount(0)));
855 __ j(kGreaterThan, &push_arguments, Label::kNear);
856 __ movl(argc, Immediate(JSParameterCount(0)));
857 __ jmp(&done_loop, Label::kNear);
858#else
859 // Generator functions are always created from user code and thus the
860 // formal parameter count is never equal to kDontAdaptArgumentsSentinel,
861 // which is used only for certain non-generator builtin functions.
862#endif // V8_ENABLE_LEAPTIERING
863
864 __ bind(&push_arguments);
865 __ LoadTaggedField(
866 params_array,
867 FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
868
869 // Exclude receiver.
870 __ leal(index, Operand(argc, -1));
871
872 __ bind(&loop);
873 __ decl(index);
874 __ j(kLessThan, &done_loop, Label::kNear);
875 __ PushTaggedField(FieldOperand(params_array, index, times_tagged_size,
876 OFFSET_OF_DATA_START(FixedArray)),
877 decompr_scratch1);
878 __ jmp(&loop);
879 __ bind(&done_loop);
880
881 // Push the receiver.
882 __ PushTaggedField(FieldOperand(rdx, JSGeneratorObject::kReceiverOffset),
883 decompr_scratch1);
884 }
885
886 // Underlying function needs to have bytecode available.
887 if (v8_flags.debug_code) {
888 Label is_baseline, is_unavailable, ok;
889 Register scratch = ReassignRegister(params_array);
890 __ LoadTaggedField(
891 scratch, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
892 GetSharedFunctionInfoBytecodeOrBaseline(masm, scratch, scratch,
893 kScratchRegister, &is_baseline,
894 &is_unavailable);
895 __ jmp(&ok);
896
897 __ bind(&is_unavailable);
898 __ Abort(AbortReason::kMissingBytecodeArray);
899
900 __ bind(&is_baseline);
901 __ IsObjectType(scratch, CODE_TYPE, scratch);
902 __ Assert(equal, AbortReason::kMissingBytecodeArray);
903
904 __ bind(&ok);
905 }
906
907 // Resume (Ignition/TurboFan) generator object.
908 {
909 __ PushReturnAddressFrom(return_address);
910 // We abuse new.target both to indicate that this is a resume call and to
911 // pass in the generator object. In ordinary calls, new.target is always
912 // undefined because generator functions are non-constructable.
913 static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
914#if V8_ENABLE_LEAPTIERING
915 // Actual arguments count and code start are already initialized above.
916 __ jmp(rcx);
917#else
918 // Actual arguments count is already initialized above.
919 __ JumpJSFunction(rdi);
920#endif // V8_ENABLE_LEAPTIERING
921 }
922
923 __ bind(&prepare_step_in_if_stepping);
924 {
925 FrameScope scope(masm, StackFrame::INTERNAL);
926 __ Push(rdx);
927 __ Push(rdi);
928 // Push hole as receiver since we do not use it for stepping.
929 __ PushRoot(RootIndex::kTheHoleValue);
930 __ CallRuntime(Runtime::kDebugOnFunctionCall);
931 __ Pop(rdx);
932 __ LoadTaggedField(rdi,
933 FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
934 }
935 __ jmp(&stepping_prepared);
936
937 __ bind(&prepare_step_in_suspended_generator);
938 {
939 FrameScope scope(masm, StackFrame::INTERNAL);
940 __ Push(rdx);
941 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
942 __ Pop(rdx);
943 __ LoadTaggedField(rdi,
944 FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
945 }
946 __ jmp(&stepping_prepared);
947
948 __ bind(&stack_overflow);
949 {
950 FrameScope scope(masm, StackFrame::INTERNAL);
951 __ CallRuntime(Runtime::kThrowStackOverflow);
952 __ int3(); // This should be unreachable.
953 }
954}
955
956static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
957 Register scratch2) {
958 ASM_CODE_COMMENT(masm);
959 Register params_size = scratch1;
960 // Get the size of the formal parameters (in bytes).
961 __ movq(params_size,
963 __ movzxwl(params_size,
964 FieldOperand(params_size, BytecodeArray::kParameterSizeOffset));
965
966 Register actual_params_size = scratch2;
967 // Compute the size of the actual parameters (in bytes).
968 __ movq(actual_params_size,
970
971 // If actual is bigger than formal, then we should use it to free up the stack
972 // arguments.
973 __ cmpq(params_size, actual_params_size);
974 __ cmovq(kLessThan, params_size, actual_params_size);
975
976 // Leave the frame (also dropping the register file).
977 __ leave();
978
979 // Drop receiver + arguments.
980 __ DropArguments(params_size, scratch2);
981}
982
983// Tail-call |function_id| if |actual_state| == |expected_state|
984// Advance the current bytecode offset. This simulates what all bytecode
985// handlers do upon completion of the underlying operation. Will bail out to a
986// label if the bytecode (without prefix) is a return bytecode. Will not advance
987// the bytecode offset if the current bytecode is a JumpLoop, instead just
988// re-executing the JumpLoop to jump to the correct bytecode.
989static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
990 Register bytecode_array,
991 Register bytecode_offset,
992 Register bytecode, Register scratch1,
993 Register scratch2, Label* if_return) {
994 ASM_CODE_COMMENT(masm);
995 Register bytecode_size_table = scratch1;
996
997 // The bytecode offset value will be increased by one in wide and extra wide
998 // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
999 // will restore the original bytecode. In order to simplify the code, we have
1000 // a backup of it.
1001 Register original_bytecode_offset = scratch2;
1002 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode,
1003 bytecode_size_table, original_bytecode_offset));
1004
1005 __ movq(original_bytecode_offset, bytecode_offset);
1006
1007 __ Move(bytecode_size_table,
1008 ExternalReference::bytecode_size_table_address());
1009
1010 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
1011 Label process_bytecode, extra_wide;
1012 static_assert(0 == static_cast<int>(interpreter::Bytecode::kWide));
1013 static_assert(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
1014 static_assert(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
1015 static_assert(3 ==
1016 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
1017 __ cmpb(bytecode, Immediate(0x3));
1018 __ j(above, &process_bytecode, Label::kNear);
1019 // The code to load the next bytecode is common to both wide and extra wide.
1020 // We can hoist them up here. incl has to happen before testb since it
1021 // modifies the ZF flag.
1022 __ incl(bytecode_offset);
1023 __ testb(bytecode, Immediate(0x1));
1024 __ movzxbq(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
1025 __ j(not_equal, &extra_wide, Label::kNear);
1026
1027 // Update table to the wide scaled table.
1028 __ addq(bytecode_size_table,
1030 __ jmp(&process_bytecode, Label::kNear);
1031
1032 __ bind(&extra_wide);
1033 // Update table to the extra wide scaled table.
1034 __ addq(bytecode_size_table,
1036
1037 __ bind(&process_bytecode);
1038
1039// Bailout to the return label if this is a return bytecode.
1040#define JUMP_IF_EQUAL(NAME) \
1041 __ cmpb(bytecode, \
1042 Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
1043 __ j(equal, if_return, Label::kFar);
1045#undef JUMP_IF_EQUAL
1046
1047 // If this is a JumpLoop, re-execute it to perform the jump to the beginning
1048 // of the loop.
1049 Label end, not_jump_loop;
1050 __ cmpb(bytecode,
1051 Immediate(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
1052 __ j(not_equal, &not_jump_loop, Label::kNear);
1053 // We need to restore the original bytecode_offset since we might have
1054 // increased it to skip the wide / extra-wide prefix bytecode.
1055 __ movq(bytecode_offset, original_bytecode_offset);
1056 __ jmp(&end, Label::kNear);
1057
1058 __ bind(&not_jump_loop);
1059 // Otherwise, load the size of the current bytecode and advance the offset.
1060 __ movzxbl(kScratchRegister,
1061 Operand(bytecode_size_table, bytecode, times_1, 0));
1062 __ addl(bytecode_offset, kScratchRegister);
1063
1064 __ bind(&end);
1065}
1066
1067namespace {
1068
1069void ResetSharedFunctionInfoAge(MacroAssembler* masm, Register sfi) {
1070 __ movw(FieldOperand(sfi, SharedFunctionInfo::kAgeOffset), Immediate(0));
1071}
1072
1073void ResetJSFunctionAge(MacroAssembler* masm, Register js_function) {
1074 const Register shared_function_info(kScratchRegister);
1075 __ LoadTaggedField(
1076 shared_function_info,
1077 FieldOperand(js_function, JSFunction::kSharedFunctionInfoOffset));
1078 ResetSharedFunctionInfoAge(masm, shared_function_info);
1079}
1080
1081void ResetFeedbackVectorOsrUrgency(MacroAssembler* masm,
1082 Register feedback_vector, Register scratch) {
1083 __ movb(scratch,
1084 FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset));
1085 __ andb(scratch, Immediate(~FeedbackVector::OsrUrgencyBits::kMask));
1086 __ movb(FieldOperand(feedback_vector, FeedbackVector::kOsrStateOffset),
1087 scratch);
1088}
1089
1090} // namespace
1091
1092// Generate code for entering a JS function with the interpreter.
1093// On entry to the function the receiver and arguments have been pushed on the
1094// stack left to right.
1095//
1096// The live registers are:
1097// o rax: actual argument count
1098// o rdi: the JS function object being called
1099// o rdx: the incoming new target or generator object
1100// o rsi: our context
1101// o rbp: the caller's frame pointer
1102// o rsp: stack pointer (pointing to return address)
1103//
1104// The function builds an interpreter frame. See InterpreterFrameConstants in
1105// frame-constants.h for its layout.
1107 MacroAssembler* masm, InterpreterEntryTrampolineMode mode) {
1108 Register closure = rdi;
1109
1110 // Get the bytecode array from the function object and load it into
1111 // kInterpreterBytecodeArrayRegister.
1112 const Register shared_function_info(r11);
1113 __ LoadTaggedField(
1114 shared_function_info,
1115 FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1116 ResetSharedFunctionInfoAge(masm, shared_function_info);
1117
1118 // The bytecode array could have been flushed from the shared function info,
1119 // if so, call into CompileLazy.
1120 Label is_baseline, compile_lazy;
1122 masm, shared_function_info, kInterpreterBytecodeArrayRegister,
1123 kScratchRegister, &is_baseline, &compile_lazy);
1124
1125#ifdef V8_ENABLE_SANDBOX
1126 // Validate the parameter count. This protects against an attacker swapping
1127 // the bytecode (or the dispatch handle) such that the parameter count of the
1128 // dispatch entry doesn't match the one of the BytecodeArray.
1129 // TODO(saelo): instead of this validation step, it would probably be nicer
1130 // if we could store the BytecodeArray directly in the dispatch entry and
1131 // load it from there. Then we can easily guarantee that the parameter count
1132 // of the entry matches the parameter count of the bytecode.
1135 __ LoadParameterCountFromJSDispatchTable(r8, dispatch_handle);
1137 BytecodeArray::kParameterSizeOffset));
1138 __ SbxCheck(equal, AbortReason::kJSSignatureMismatch);
1139#endif // V8_ENABLE_SANDBOX
1140
1141 Label push_stack_frame;
1142 Register feedback_vector = rbx;
1143 __ LoadFeedbackVector(feedback_vector, closure, &push_stack_frame,
1144 Label::kNear);
1145
1146#ifndef V8_JITLESS
1147#ifndef V8_ENABLE_LEAPTIERING
1148 // If feedback vector is valid, check for optimized code and update invocation
1149 // count.
1150 Label flags_need_processing;
1151 __ CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1152 feedback_vector, CodeKind::INTERPRETED_FUNCTION, &flags_need_processing);
1153#endif // !V8_ENABLE_LEAPTIERING
1154
1155 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, kScratchRegister);
1156
1157 // Increment invocation count for the function.
1158 __ incl(
1159 FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1160
1161 // Open a frame scope to indicate that there is a frame on the stack. The
1162 // MANUAL indicates that the scope shouldn't actually generate code to set up
1163 // the frame (that is done below).
1164#else
1165 // Note: By omitting the above code in jitless mode we also disable:
1166 // - kFlagsLogNextExecution: only used for logging/profiling; and
1167 // - kInvocationCountOffset: only used for tiering heuristics and code
1168 // coverage.
1169#endif // !V8_JITLESS
1170
1171 __ bind(&push_stack_frame);
1172 FrameScope frame_scope(masm, StackFrame::MANUAL);
1173 __ pushq(rbp); // Caller's frame pointer.
1174 __ movq(rbp, rsp);
1175 __ Push(kContextRegister); // Callee's context.
1176 __ Push(kJavaScriptCallTargetRegister); // Callee's JS function.
1177 __ Push(kJavaScriptCallArgCountRegister); // Actual argument count.
1178
1179 // Load initial bytecode offset.
1182
1183 // Push bytecode array and Smi tagged bytecode offset.
1186 __ Push(rcx);
1187
1188 // Push feedback vector.
1189 __ Push(feedback_vector);
1190
1191 // Allocate the local and temporary register file on the stack.
1192 Label stack_overflow;
1193 {
1194 // Load frame size from the BytecodeArray object.
1196 BytecodeArray::kFrameSizeOffset));
1197
1198 // Do a stack check to ensure we don't go over the limit.
1199 __ movq(rax, rsp);
1200 __ subq(rax, rcx);
1201 __ cmpq(rax, __ StackLimitAsOperand(StackLimitKind::kRealStackLimit));
1202 __ j(below, &stack_overflow);
1203
1204 // If ok, push undefined as the initial value for all register file entries.
1205 Label loop_header;
1206 Label loop_check;
1207 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1208 __ jmp(&loop_check, Label::kNear);
1209 __ bind(&loop_header);
1210 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1212 // Continue loop if not done.
1213 __ bind(&loop_check);
1214 __ subq(rcx, Immediate(kSystemPointerSize));
1215 __ j(greater_equal, &loop_header, Label::kNear);
1216 }
1217
1218 // If the bytecode array has a valid incoming new target or generator object
1219 // register, initialize it with incoming value which was passed in rdx.
1220 Label no_incoming_new_target_or_generator_register;
1221 __ movsxlq(
1222 rcx,
1224 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1225 __ testl(rcx, rcx);
1226 __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
1227 __ movq(Operand(rbp, rcx, times_system_pointer_size, 0), rdx);
1228 __ bind(&no_incoming_new_target_or_generator_register);
1229
1230 // Perform interrupt stack check.
1231 // TODO(solanes): Merge with the real stack limit check above.
1232 Label stack_check_interrupt, after_stack_check_interrupt;
1233 __ cmpq(rsp, __ StackLimitAsOperand(StackLimitKind::kInterruptStackLimit));
1234 __ j(below, &stack_check_interrupt);
1235 __ bind(&after_stack_check_interrupt);
1236
1237 // The accumulator is already loaded with undefined.
1238
1239 // Load the dispatch table into a register and dispatch to the bytecode
1240 // handler at the current bytecode offset.
1241 Label do_dispatch;
1242 __ bind(&do_dispatch);
1243 __ Move(
1245 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1246 __ movzxbq(kScratchRegister,
1252
1253 // X64 has this location as the interpreter_entry_return_offset for CET
1254 // shadow stack rather than after `call`. InterpreterEnterBytecode will
1255 // jump to this location and call kJavaScriptCallCodeStartRegister, which
1256 // will form the valid shadow stack.
1257 __ RecordComment("--- InterpreterEntryPC point ---");
1259 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(
1260 masm->pc_offset());
1261 } else {
1263 // Both versions must be the same up to this point otherwise the builtins
1264 // will not be interchangable.
1265 CHECK_EQ(
1266 masm->isolate()->heap()->interpreter_entry_return_pc_offset().value(),
1267 masm->pc_offset());
1268 }
1270
1271 // Any returns to the entry trampoline are either due to the return bytecode
1272 // or the interpreter tail calling a builtin and then a dispatch.
1273
1274 // Get bytecode array and bytecode offset from the stack frame.
1277 __ SmiUntagUnsigned(
1280
1281 // Either return, or advance to the next bytecode and dispatch.
1282 Label do_return;
1283 __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1287 r8, &do_return);
1288 __ jmp(&do_dispatch);
1289
1290 __ bind(&do_return);
1291 // The return value is in rax.
1292 LeaveInterpreterFrame(masm, rbx, rcx);
1293 __ ret(0);
1294
1295 __ bind(&stack_check_interrupt);
1296 // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1297 // for the call to the StackGuard.
1301 __ CallRuntime(Runtime::kStackGuard);
1302
1303 // After the call, restore the bytecode array, bytecode offset and accumulator
1304 // registers again. Also, restore the bytecode offset in the stack to its
1305 // previous value.
1310 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1311
1314
1315 __ jmp(&after_stack_check_interrupt);
1316
1317 __ bind(&compile_lazy);
1318 __ GenerateTailCallToReturnedCode(Runtime::kCompileLazy);
1319 __ int3(); // Should not return.
1320
1321#ifndef V8_JITLESS
1322#ifndef V8_ENABLE_LEAPTIERING
1323 __ bind(&flags_need_processing);
1324 __ OptimizeCodeOrTailCallOptimizedCodeSlot(feedback_vector, closure,
1326#endif // !V8_ENABLE_LEAPTIERING
1327
1328 __ bind(&is_baseline);
1329 {
1330#ifndef V8_ENABLE_LEAPTIERING
1331 // Load the feedback vector from the closure.
1332 TaggedRegister feedback_cell(feedback_vector);
1333 __ LoadTaggedField(feedback_cell,
1334 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1335 __ LoadTaggedField(feedback_vector,
1336 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
1337
1338 Label install_baseline_code;
1339 // Check if feedback vector is valid. If not, call prepare for baseline to
1340 // allocate it.
1341 __ IsObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, rcx);
1342 __ j(not_equal, &install_baseline_code);
1343
1344 // Check the tiering state.
1345 __ CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1346 feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1347
1348 // TODO(olivf, 42204201): This fastcase is difficult to support with the
1349 // sandbox as it requires getting write access to the dispatch table. See
1350 // `JSFunction::UpdateCode`. We might want to remove it for all
1351 // configurations as it does not seem to be performance sensitive.
1352
1353 // Load the baseline code into the closure.
1355 static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1356 __ ReplaceClosureCodeWithOptimizedCode(
1359 __ JumpCodeObject(rcx, kJSEntrypointTag);
1360
1361 __ bind(&install_baseline_code);
1362#endif // !V8_ENABLE_LEAPTIERING
1363
1364 __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode);
1365 }
1366#endif // !V8_JITLESS
1367
1368 __ bind(&stack_overflow);
1369 __ CallRuntime(Runtime::kThrowStackOverflow);
1370 __ int3(); // Should not return.
1371}
1372
1373static void GenerateInterpreterPushArgs(MacroAssembler* masm, Register num_args,
1374 Register start_address,
1375 Register scratch) {
1376 ASM_CODE_COMMENT(masm);
1377 // Find the argument with lowest address.
1378 __ movq(scratch, num_args);
1379 __ negq(scratch);
1380 __ leaq(start_address,
1381 Operand(start_address, scratch, times_system_pointer_size,
1383 // Push the arguments.
1384 __ PushArray(start_address, num_args, scratch,
1386}
1387
1388// static
1390 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1393 // ----------- S t a t e -------------
1394 // -- rax : the number of arguments
1395 // -- rbx : the address of the first argument to be pushed. Subsequent
1396 // arguments should be consecutive above this, in the same order as
1397 // they are to be pushed onto the stack.
1398 // -- rdi : the target to call (can be any Object).
1399 // -----------------------------------
1400 Label stack_overflow;
1401
1403 // The spread argument should not be pushed.
1404 __ decl(rax);
1405 }
1406
1407 __ movl(rcx, rax);
1408 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1409 __ decl(rcx); // Exclude receiver.
1410 }
1411
1412 // Add a stack check before pushing arguments.
1413 __ StackOverflowCheck(rcx, &stack_overflow);
1414
1415 // Pop return address to allow tail-call after pushing arguments.
1416 __ PopReturnAddressTo(kScratchRegister);
1417
1418 // rbx and rdx will be modified.
1419 GenerateInterpreterPushArgs(masm, rcx, rbx, rdx);
1420
1421 // Push "undefined" as the receiver arg if we need to.
1422 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1423 __ PushRoot(RootIndex::kUndefinedValue);
1424 }
1425
1427 // Pass the spread in the register rbx.
1428 // rbx already points to the penultime argument, the spread
1429 // is below that.
1430 __ movq(rbx, Operand(rbx, -kSystemPointerSize));
1431 }
1432
1433 // Call the target.
1434 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
1435
1437 __ TailCallBuiltin(Builtin::kCallWithSpread);
1438 } else {
1439 __ TailCallBuiltin(Builtins::Call(receiver_mode));
1440 }
1441
1442 // Throw stack overflow exception.
1443 __ bind(&stack_overflow);
1444 {
1445 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1446 // This should be unreachable.
1447 __ int3();
1448 }
1449}
1450
1451// static
1453 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1454 // ----------- S t a t e -------------
1455 // -- rax : the number of arguments
1456 // -- rdx : the new target (either the same as the constructor or
1457 // the JSFunction on which new was invoked initially)
1458 // -- rdi : the constructor to call (can be any Object)
1459 // -- rbx : the allocation site feedback if available, undefined otherwise
1460 // -- rcx : the address of the first argument to be pushed. Subsequent
1461 // arguments should be consecutive above this, in the same order as
1462 // they are to be pushed onto the stack.
1463 // -----------------------------------
1464 Label stack_overflow;
1465
1466 // Add a stack check before pushing arguments.
1467 __ StackOverflowCheck(rax, &stack_overflow);
1468
1469 // Pop return address to allow tail-call after pushing arguments.
1470 __ PopReturnAddressTo(kScratchRegister);
1471
1473 // The spread argument should not be pushed.
1474 __ decl(rax);
1475 }
1476
1477 // rcx and r8 will be modified.
1478 Register argc_without_receiver = r11;
1479 __ leaq(argc_without_receiver, Operand(rax, -kJSArgcReceiverSlots));
1480 GenerateInterpreterPushArgs(masm, argc_without_receiver, rcx, r8);
1481
1482 // Push slot for the receiver to be constructed.
1483 __ Push(Immediate(0));
1484
1486 // Pass the spread in the register rbx.
1487 __ movq(rbx, Operand(rcx, -kSystemPointerSize));
1488 // Push return address in preparation for the tail-call.
1489 __ PushReturnAddressFrom(kScratchRegister);
1490 } else {
1491 __ PushReturnAddressFrom(kScratchRegister);
1492 __ AssertUndefinedOrAllocationSite(rbx);
1493 }
1494
1496 // Tail call to the array construct stub (still in the caller
1497 // context at this point).
1498 __ AssertFunction(rdi);
1499 // Jump to the constructor function (rax, rbx, rdx passed on).
1500 __ TailCallBuiltin(Builtin::kArrayConstructorImpl);
1501 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1502 // Call the constructor (rax, rdx, rdi passed on).
1503 __ TailCallBuiltin(Builtin::kConstructWithSpread);
1504 } else {
1506 // Call the constructor (rax, rdx, rdi passed on).
1507 __ TailCallBuiltin(Builtin::kConstruct);
1508 }
1509
1510 // Throw stack overflow exception.
1511 __ bind(&stack_overflow);
1512 {
1513 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1514 // This should be unreachable.
1515 __ int3();
1516 }
1517}
1518
1519// static
1521 MacroAssembler* masm, ForwardWhichFrame which_frame) {
1522 // ----------- S t a t e -------------
1523 // -- rdx : the new target (either the same as the constructor or
1524 // the JSFunction on which new was invoked initially)
1525 // -- rdi : the constructor to call (can be any Object)
1526 // -----------------------------------
1527 Label stack_overflow;
1528
1529 // Load the frame pointer into rcx.
1530 switch (which_frame) {
1532 __ movq(rcx, rbp);
1533 break;
1535 __ movq(rcx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1536 break;
1537 }
1538
1539 // Load the argument count into rax.
1540 __ movq(rax, Operand(rcx, StandardFrameConstants::kArgCOffset));
1541
1542 // Add a stack check before copying arguments.
1543 __ StackOverflowCheck(rax, &stack_overflow);
1544
1545 // Pop return address to allow tail-call after forwarding arguments.
1546 __ PopReturnAddressTo(kScratchRegister);
1547
1548 // Point rcx to the base of the argument list to forward, excluding the
1549 // receiver.
1550 __ addq(rcx, Immediate((StandardFrameConstants::kFixedSlotCountAboveFp + 1) *
1552
1553 // Copy the arguments on the stack. r8 is a scratch register.
1554 Register argc_without_receiver = r11;
1555 __ leaq(argc_without_receiver, Operand(rax, -kJSArgcReceiverSlots));
1556 __ PushArray(rcx, argc_without_receiver, r8);
1557
1558 // Push slot for the receiver to be constructed.
1559 __ Push(Immediate(0));
1560
1561 __ PushReturnAddressFrom(kScratchRegister);
1562
1563 // Call the constructor (rax, rdx, rdi passed on).
1564 __ TailCallBuiltin(Builtin::kConstruct);
1565
1566 // Throw stack overflow exception.
1567 __ bind(&stack_overflow);
1568 {
1569 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1570 // This should be unreachable.
1571 __ int3();
1572 }
1573}
1574
1575namespace {
1576
1577void NewImplicitReceiver(MacroAssembler* masm) {
1578 // ----------- S t a t e -------------
1579 // -- rax : the number of arguments
1580 // -- rdx : the new target
1581 // -- rdi : the constructor to call (checked to be a JSFunction)
1582 //
1583 // Stack:
1584 // -- Implicit Receiver
1585 // -- [arguments without receiver]
1586 // -- Implicit Receiver
1587 // -- Context
1588 // -- FastConstructMarker
1589 // -- FramePointer
1590 // -----------------------------------
1591 Register implicit_receiver = rcx;
1592
1593 // Save live registers.
1594 __ SmiTag(rax);
1595 __ Push(rax); // Number of arguments
1596 __ Push(rdx); // NewTarget
1597 __ Push(rdi); // Target
1598 __ CallBuiltin(Builtin::kFastNewObject);
1599 // Save result.
1600 __ movq(implicit_receiver, rax);
1601 // Restore live registers.
1602 __ Pop(rdi);
1603 __ Pop(rdx);
1604 __ Pop(rax);
1605 __ SmiUntagUnsigned(rax);
1606
1607 // Patch implicit receiver (in arguments)
1608 __ movq(Operand(rsp, 0 /* first argument */), implicit_receiver);
1609 // Patch second implicit (in construct frame)
1611 implicit_receiver);
1612
1613 // Restore context.
1614 __ movq(rsi, Operand(rbp, FastConstructFrameConstants::kContextOffset));
1615}
1616
1617} // namespace
1618
1619// static
1620void Builtins::Generate_InterpreterPushArgsThenFastConstructFunction(
1621 MacroAssembler* masm) {
1622 // ----------- S t a t e -------------
1623 // -- rax : the number of arguments
1624 // -- rdx : the new target
1625 // -- rdi : the constructor to call (checked to be a JSFunction)
1626 // -- rcx : the address of the first argument to be pushed. Subsequent
1627 // arguments should be consecutive above this, in the same order as
1628 // they are to be pushed onto the stack.
1629 // -----------------------------------
1630 __ AssertFunction(rdi);
1631
1632 // Check if target has a [[Construct]] internal method.
1633 Label non_constructor;
1634 __ LoadMap(kScratchRegister, rdi);
1635 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
1636 Immediate(Map::Bits1::IsConstructorBit::kMask));
1637 __ j(zero, &non_constructor);
1638
1639 // Add a stack check before pushing arguments.
1640 Label stack_overflow;
1641 __ StackOverflowCheck(rax, &stack_overflow);
1642
1643 // Enter a construct frame.
1644 FrameScope scope(masm, StackFrame::MANUAL);
1645 __ EnterFrame(StackFrame::FAST_CONSTRUCT);
1646 __ Push(rsi);
1647 // Implicit receiver stored in the construct frame.
1648 __ PushRoot(RootIndex::kTheHoleValue);
1649
1650 // Push arguments + implicit receiver.
1651 Register argc_without_receiver = r11;
1652 __ leaq(argc_without_receiver, Operand(rax, -kJSArgcReceiverSlots));
1653 GenerateInterpreterPushArgs(masm, argc_without_receiver, rcx, r12);
1654 // Implicit receiver as part of the arguments (patched later if needed).
1655 __ PushRoot(RootIndex::kTheHoleValue);
1656
1657 // Check if it is a builtin call.
1658 Label builtin_call;
1659 const TaggedRegister shared_function_info(kScratchRegister);
1660 __ LoadTaggedField(shared_function_info,
1661 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1662 __ testl(FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset),
1663 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1664 __ j(not_zero, &builtin_call);
1665
1666 // Check if we need to create an implicit receiver.
1667 Label not_create_implicit_receiver;
1668 __ movl(kScratchRegister,
1669 FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset));
1670 __ DecodeField<SharedFunctionInfo::FunctionKindBits>(kScratchRegister);
1671 __ JumpIfIsInRange(
1673 static_cast<uint32_t>(FunctionKind::kDefaultDerivedConstructor),
1674 static_cast<uint32_t>(FunctionKind::kDerivedConstructor),
1675 &not_create_implicit_receiver, Label::kNear);
1676 NewImplicitReceiver(masm);
1677 __ bind(&not_create_implicit_receiver);
1678
1679 // Call the function.
1680 __ InvokeFunction(rdi, rdx, rax, InvokeType::kCall);
1681
1682 // ----------- S t a t e -------------
1683 // -- rax constructor result
1684 //
1685 // Stack:
1686 // -- Implicit Receiver
1687 // -- Context
1688 // -- FastConstructMarker
1689 // -- FramePointer
1690 // -----------------------------------
1691
1692 Label deopt_entry;
1693 __ bind(&deopt_entry);
1694
1695 // If the result is an object (in the ECMA sense), we should get rid
1696 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
1697 // on page 74.
1698 Label use_receiver, do_throw, leave_and_return, check_result;
1699
1700 // If the result is undefined, we'll use the implicit receiver. Otherwise we
1701 // do a smi check and fall through to check if the return value is a valid
1702 // receiver.
1703 __ JumpIfNotRoot(rax, RootIndex::kUndefinedValue, &check_result,
1704 Label::kNear);
1705
1706 // Throw away the result of the constructor invocation and use the
1707 // on-stack receiver as the result.
1708 __ bind(&use_receiver);
1709 __ movq(rax,
1711 __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
1712
1713 __ bind(&leave_and_return);
1714 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1715 __ ret(0);
1716
1717 // If the result is a smi, it is *not* an object in the ECMA sense.
1718 __ bind(&check_result);
1719 __ JumpIfSmi(rax, &use_receiver, Label::kNear);
1720
1721 // Check if the type of the result is not an object in the ECMA sense.
1722 __ JumpIfJSAnyIsNotPrimitive(rax, rcx, &leave_and_return, Label::kNear);
1723 __ jmp(&use_receiver);
1724
1725 __ bind(&do_throw);
1726 __ movq(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
1727 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
1728 // We don't return here.
1729 __ int3();
1730
1731 __ bind(&builtin_call);
1732 // TODO(victorgomes): Check the possibility to turn this into a tailcall.
1733 __ InvokeFunction(rdi, rdx, rax, InvokeType::kCall);
1734 __ LeaveFrame(StackFrame::FAST_CONSTRUCT);
1735 __ ret(0);
1736
1737 // Called Construct on an Object that doesn't have a [[Construct]] internal
1738 // method.
1739 __ bind(&non_constructor);
1740 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
1741
1742 // Throw stack overflow exception.
1743 __ bind(&stack_overflow);
1744 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1745 // This should be unreachable.
1746 __ int3();
1747
1748 Generate_CallToAdaptShadowStackForDeopt(masm, false);
1749 // Store offset of return address for deoptimizer.
1750 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
1751 masm->pc_offset());
1752 __ jmp(&deopt_entry);
1753}
1754
1755static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1756 // Set the return address to the correct point in the interpreter entry
1757 // trampoline.
1758 Label builtin_trampoline, trampoline_loaded;
1759 Tagged<Smi> interpreter_entry_return_pc_offset(
1760 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1761 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1762
1763 // If the SFI function_data is an InterpreterData, the function will have a
1764 // custom copy of the interpreter entry trampoline for profiling. If so,
1765 // get the custom trampoline, otherwise grab the entry address of the global
1766 // trampoline.
1767 __ movq(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1768 const Register shared_function_info(rbx);
1769 __ LoadTaggedField(shared_function_info,
1770 FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1771
1772 __ LoadTrustedPointerField(
1773 rbx,
1774 FieldOperand(shared_function_info,
1775 SharedFunctionInfo::kTrustedFunctionDataOffset),
1777 __ IsObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1778 __ j(not_equal, &builtin_trampoline, Label::kNear);
1779 __ LoadProtectedPointerField(
1780 rbx, FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1781 __ LoadCodeInstructionStart(rbx, rbx, kJSEntrypointTag);
1782 __ jmp(&trampoline_loaded, Label::kNear);
1783
1784 __ bind(&builtin_trampoline);
1785 // TODO(jgruber): Replace this by a lookup in the builtin entry table.
1786 __ movq(rbx,
1787 __ ExternalReferenceAsOperand(
1788 ExternalReference::
1789 address_of_interpreter_entry_trampoline_instruction_start(
1790 masm->isolate()),
1792
1793 __ bind(&trampoline_loaded);
1794 __ addq(rbx, Immediate(interpreter_entry_return_pc_offset.value()));
1795 __ movq(kScratchRegister, rbx);
1796
1797 // Initialize dispatch table register.
1798 __ Move(
1800 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1801
1802 // Get the bytecode array pointer from the frame.
1805
1806 if (v8_flags.debug_code) {
1807 // Check function data field is actually a BytecodeArray object.
1809 __ IsObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1811 __ Assert(
1812 equal,
1813 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1814 }
1815
1816 // Get the target bytecode offset from the frame.
1817 __ SmiUntagUnsigned(
1820
1821 if (v8_flags.debug_code) {
1822 Label okay;
1825 __ j(greater_equal, &okay, Label::kNear);
1826 __ int3();
1827 __ bind(&okay);
1828 }
1829
1830 // Dispatch to the target bytecode.
1831 __ movzxbq(kScratchRegister,
1837
1838 // Jump to the interpreter entry, and call kJavaScriptCallCodeStartRegister.
1839 __ jmp(rbx, /*notrack=*/true);
1840}
1841
1842void Builtins::Generate_InterpreterEnterAtNextBytecode(MacroAssembler* masm) {
1843 Generate_CallToAdaptShadowStackForDeopt(masm, true);
1844 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
1845 masm->pc_offset());
1846
1847 // Get bytecode array and bytecode offset from the stack frame.
1850 __ SmiUntagUnsigned(
1853
1854 Label enter_bytecode, function_entry_bytecode;
1858 __ j(equal, &function_entry_bytecode);
1859
1860 // Load the current bytecode.
1861 __ movzxbq(rbx, Operand(kInterpreterBytecodeArrayRegister,
1863
1864 // Advance to the next bytecode.
1865 Label if_return;
1868 r8, &if_return);
1869
1870 __ bind(&enter_bytecode);
1871 // Convert new bytecode offset to a Smi and save in the stackframe.
1875
1877
1878 __ bind(&function_entry_bytecode);
1879 // If the code deoptimizes during the implicit function entry stack interrupt
1880 // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1881 // not a valid bytecode offset. Detect this case and advance to the first
1882 // actual bytecode.
1885 __ jmp(&enter_bytecode);
1886
1887 // We should never take the if_return path.
1888 __ bind(&if_return);
1889 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1890}
1891
1892void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
1893 Generate_CallToAdaptShadowStackForDeopt(masm, true);
1894 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
1895 masm->pc_offset());
1896
1898}
1899
1900// static
1901void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
1902 Register feedback_cell = r8;
1903 Register feedback_vector = r9;
1904 Register return_address = r11;
1905
1906#ifdef DEBUG
1908 DCHECK(!AreAliased(feedback_vector, return_address, reg));
1909 }
1910#endif
1911
1912 auto descriptor =
1913 Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
1914 Register closure = descriptor.GetRegisterParameter(
1915 BaselineOutOfLinePrologueDescriptor::kClosure);
1916 // Load the feedback cell and vector from the closure.
1917 __ LoadTaggedField(feedback_cell,
1918 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
1919 __ LoadTaggedField(feedback_vector,
1920 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
1921 __ AssertFeedbackVector(feedback_vector, kScratchRegister);
1922
1923#ifndef V8_ENABLE_LEAPTIERING
1924 // Check the tiering state.
1925 Label flags_need_processing;
1926 __ CheckFeedbackVectorFlagsAndJumpIfNeedsProcessing(
1927 feedback_vector, CodeKind::BASELINE, &flags_need_processing);
1928#endif // !V8_ENABLE_LEAPTIERING
1929
1930 ResetFeedbackVectorOsrUrgency(masm, feedback_vector, kScratchRegister);
1931
1932 // Increment invocation count for the function.
1933 __ incl(
1934 FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
1935
1936 // Save the return address, so that we can push it to the end of the newly
1937 // set-up frame once we're done setting it up.
1938 __ PopReturnAddressTo(return_address);
1939 FrameScope frame_scope(masm, StackFrame::MANUAL);
1940 {
1941 ASM_CODE_COMMENT_STRING(masm, "Frame Setup");
1942 __ EnterFrame(StackFrame::BASELINE);
1943
1944 __ Push(descriptor.GetRegisterParameter(
1945 BaselineOutOfLinePrologueDescriptor::kCalleeContext)); // Callee's
1946 // context.
1947 Register callee_js_function = descriptor.GetRegisterParameter(
1948 BaselineOutOfLinePrologueDescriptor::kClosure);
1949 DCHECK_EQ(callee_js_function, kJavaScriptCallTargetRegister);
1950 DCHECK_EQ(callee_js_function, kJSFunctionRegister);
1951 ResetJSFunctionAge(masm, callee_js_function);
1952 __ Push(callee_js_function); // Callee's JS function.
1953 __ Push(descriptor.GetRegisterParameter(
1954 BaselineOutOfLinePrologueDescriptor::
1955 kJavaScriptCallArgCount)); // Actual argument count.
1956
1957 // We'll use the bytecode for both code age/OSR resetting, and pushing
1958 // onto the frame, so load it into a register.
1959 Register bytecode_array = descriptor.GetRegisterParameter(
1960 BaselineOutOfLinePrologueDescriptor::kInterpreterBytecodeArray);
1961 __ Push(bytecode_array);
1962 __ Push(feedback_cell);
1963 __ Push(feedback_vector);
1964 }
1965
1966 Register new_target = descriptor.GetRegisterParameter(
1967 BaselineOutOfLinePrologueDescriptor::kJavaScriptCallNewTarget);
1968
1969 Label call_stack_guard;
1970 Register frame_size = descriptor.GetRegisterParameter(
1971 BaselineOutOfLinePrologueDescriptor::kStackFrameSize);
1972 {
1973 ASM_CODE_COMMENT_STRING(masm, " Stack/interrupt check");
1974 // Stack check. This folds the checks for both the interrupt stack limit
1975 // check and the real stack limit into one by just checking for the
1976 // interrupt limit. The interrupt limit is either equal to the real stack
1977 // limit or tighter. By ensuring we have space until that limit after
1978 // building the frame we can quickly precheck both at once.
1979 //
1980 // TODO(v8:11429): Backport this folded check to the
1981 // InterpreterEntryTrampoline.
1982 __ Move(kScratchRegister, rsp);
1983 DCHECK_NE(frame_size, new_target);
1984 __ subq(kScratchRegister, frame_size);
1985 __ cmpq(kScratchRegister,
1986 __ StackLimitAsOperand(StackLimitKind::kInterruptStackLimit));
1987 __ j(below, &call_stack_guard);
1988 }
1989
1990 // Push the return address back onto the stack for return.
1991 __ PushReturnAddressFrom(return_address);
1992 // Return to caller pushed pc, without any frame teardown.
1993 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1994 __ Ret();
1995
1996#ifndef V8_ENABLE_LEAPTIERING
1997 __ bind(&flags_need_processing);
1998 {
1999 ASM_CODE_COMMENT_STRING(masm, "Optimized marker check");
2000 // Drop the return address, rebalancing the return stack buffer by using
2001 // JumpMode::kPushAndReturn. We can't leave the slot and overwrite it on
2002 // return since we may do a runtime call along the way that requires the
2003 // stack to only contain valid frames.
2004 __ Drop(1);
2005 __ OptimizeCodeOrTailCallOptimizedCodeSlot(feedback_vector, closure,
2007 __ Trap();
2008 }
2009#endif
2010
2011 __ bind(&call_stack_guard);
2012 {
2013 ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call");
2014 {
2015 // Push the baseline code return address now, as if it had been pushed by
2016 // the call to this builtin.
2017 __ PushReturnAddressFrom(return_address);
2018 FrameScope inner_frame_scope(masm, StackFrame::INTERNAL);
2019 // Save incoming new target or generator
2020 __ Push(new_target);
2021#ifdef V8_ENABLE_LEAPTIERING
2022 // No need to SmiTag as dispatch handles always look like Smis.
2023 static_assert(kJSDispatchHandleShift > 0);
2025#endif
2026 __ SmiTag(frame_size);
2027 __ Push(frame_size);
2028 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
2029#ifdef V8_ENABLE_LEAPTIERING
2031#endif
2032 __ Pop(new_target);
2033 }
2034
2035 // Return to caller pushed pc, without any frame teardown.
2036 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
2037 __ Ret();
2038 }
2039}
2040
2041// static
2042void Builtins::Generate_BaselineOutOfLinePrologueDeopt(MacroAssembler* masm) {
2043 // We're here because we got deopted during BaselineOutOfLinePrologue's stack
2044 // check. Undo all its frame creation and call into the interpreter instead.
2045
2046 // Drop feedback vector.
2047 __ Pop(kScratchRegister);
2048 // Drop bytecode offset (was the feedback vector but got replaced during
2049 // deopt).
2050 __ Pop(kScratchRegister);
2051 // Drop bytecode array
2052 __ Pop(kScratchRegister);
2053
2054 // argc.
2056 // Closure.
2058 // Context.
2059 __ Pop(kContextRegister);
2060
2061 // Drop frame pointer
2062 __ LeaveFrame(StackFrame::BASELINE);
2063
2064 // Enter the interpreter.
2065 __ TailCallBuiltin(Builtin::kInterpreterEntryTrampoline);
2066}
2067
2068namespace {
2069void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
2070 bool javascript_builtin,
2071 bool with_result) {
2072 Generate_CallToAdaptShadowStackForDeopt(masm, true);
2073 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
2074 masm->pc_offset());
2075
2076 ASM_CODE_COMMENT(masm);
2077 const RegisterConfiguration* config(RegisterConfiguration::Default());
2078 int allocatable_register_count = config->num_allocatable_general_registers();
2079 if (with_result) {
2080 if (javascript_builtin) {
2081 // kScratchRegister is not included in the allocateable registers.
2082 __ movq(kScratchRegister, rax);
2083 } else {
2084 // Overwrite the hole inserted by the deoptimizer with the return value
2085 // from the LAZY deopt point.
2086 __ movq(
2087 Operand(rsp, config->num_allocatable_general_registers() *
2090 rax);
2091 }
2092 }
2093 for (int i = allocatable_register_count - 1; i >= 0; --i) {
2094 int code = config->GetAllocatableGeneralCode(i);
2095 __ popq(Register::from_code(code));
2096 if (javascript_builtin && code == kJavaScriptCallArgCountRegister.code()) {
2097 __ SmiUntagUnsigned(Register::from_code(code));
2098 }
2099 }
2100 if (with_result && javascript_builtin) {
2101 // Overwrite the hole inserted by the deoptimizer with the return value from
2102 // the LAZY deopt point. rax contains the arguments count, the return value
2103 // from LAZY is always the last argument.
2104 __ movq(Operand(rsp, rax, times_system_pointer_size,
2108 }
2109 __ movq(
2110 rbp,
2112 const int offsetToPC =
2115 __ popq(Operand(rsp, offsetToPC));
2116 __ Drop(offsetToPC / kSystemPointerSize);
2117
2118 // Replace the builtin index Smi on the stack with the instruction start
2119 // address of the builtin from the builtins table, and then jump to this
2120 // address
2121 __ popq(kScratchRegister);
2122 __ movq(kScratchRegister,
2123 __ EntryFromBuiltinIndexAsOperand(kScratchRegister));
2124 __ jmp(kScratchRegister);
2125}
2126} // namespace
2127
2128void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
2129 Generate_ContinueToBuiltinHelper(masm, false, false);
2130}
2131
2132void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
2133 MacroAssembler* masm) {
2134 Generate_ContinueToBuiltinHelper(masm, false, true);
2135}
2136
2137void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
2138 Generate_ContinueToBuiltinHelper(masm, true, false);
2139}
2140
2141void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
2142 MacroAssembler* masm) {
2143 Generate_ContinueToBuiltinHelper(masm, true, true);
2144}
2145
2146void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
2147 // Enter an internal frame.
2148 {
2149 FrameScope scope(masm, StackFrame::INTERNAL);
2150 __ CallRuntime(Runtime::kNotifyDeoptimized);
2151 // Tear down internal frame.
2152 }
2153
2155 __ movq(rax, Operand(rsp, kPCOnStackSize));
2156 __ ret(1 * kSystemPointerSize); // Remove rax.
2157}
2158
2159// static
2160void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
2161 // ----------- S t a t e -------------
2162 // -- rax : argc
2163 // -- rsp[0] : return address
2164 // -- rsp[1] : receiver
2165 // -- rsp[2] : thisArg
2166 // -- rsp[3] : argArray
2167 // -----------------------------------
2168
2169 // 1. Load receiver into rdi, argArray into rbx (if present), remove all
2170 // arguments from the stack (including the receiver), and push thisArg (if
2171 // present) instead.
2172 {
2173 Label no_arg_array, no_this_arg;
2174 StackArgumentsAccessor args(rax);
2175 __ LoadRoot(rdx, RootIndex::kUndefinedValue);
2176 __ movq(rbx, rdx);
2177 __ movq(rdi, args[0]);
2178 __ cmpq(rax, Immediate(JSParameterCount(0)));
2179 __ j(equal, &no_this_arg, Label::kNear);
2180 {
2181 __ movq(rdx, args[1]);
2182 __ cmpq(rax, Immediate(JSParameterCount(1)));
2183 __ j(equal, &no_arg_array, Label::kNear);
2184 __ movq(rbx, args[2]);
2185 __ bind(&no_arg_array);
2186 }
2187 __ bind(&no_this_arg);
2188 __ DropArgumentsAndPushNewReceiver(rax, rdx, rcx);
2189 }
2190
2191 // ----------- S t a t e -------------
2192 // -- rbx : argArray
2193 // -- rdi : receiver
2194 // -- rsp[0] : return address
2195 // -- rsp[8] : thisArg
2196 // -----------------------------------
2197
2198 // 2. We don't need to check explicitly for callable receiver here,
2199 // since that's the first thing the Call/CallWithArrayLike builtins
2200 // will do.
2201
2202 // 3. Tail call with no arguments if argArray is null or undefined.
2203 Label no_arguments;
2204 __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
2205 __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
2206
2207 // 4a. Apply the receiver to the given argArray.
2208 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2209
2210 // 4b. The argArray is either null or undefined, so we tail call without any
2211 // arguments to the receiver. Since we did not create a frame for
2212 // Function.prototype.apply() yet, we use a normal Call builtin here.
2213 __ bind(&no_arguments);
2214 {
2215 __ Move(rax, JSParameterCount(0));
2216 __ TailCallBuiltin(Builtins::Call());
2217 }
2218}
2219
2220// static
2221void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2222 // Stack Layout:
2223 // rsp[0] : Return address
2224 // rsp[8] : Argument 0 (receiver: callable to call)
2225 // rsp[16] : Argument 1
2226 // ...
2227 // rsp[8 * n] : Argument n-1
2228 // rsp[8 * (n + 1)] : Argument n
2229 // rax contains the number of arguments, n.
2230
2231 // 1. Get the callable to call (passed as receiver) from the stack.
2232 {
2233 StackArgumentsAccessor args(rax);
2234 __ movq(rdi, args.GetReceiverOperand());
2235 }
2236
2237 // 2. Save the return address and drop the callable.
2238 __ PopReturnAddressTo(rbx);
2239 __ Pop(kScratchRegister);
2240
2241 // 3. Make sure we have at least one argument.
2242 {
2243 Label done;
2244 __ cmpq(rax, Immediate(JSParameterCount(0)));
2245 __ j(greater, &done, Label::kNear);
2246 __ PushRoot(RootIndex::kUndefinedValue);
2247 __ incq(rax);
2248 __ bind(&done);
2249 }
2250
2251 // 4. Push back the return address one slot down on the stack (overwriting the
2252 // original callable), making the original first argument the new receiver.
2253 __ PushReturnAddressFrom(rbx);
2254 __ decq(rax); // One fewer argument (first argument is new receiver).
2255
2256 // 5. Call the callable.
2257 // Since we did not create a frame for Function.prototype.call() yet,
2258 // we use a normal Call builtin here.
2259 __ TailCallBuiltin(Builtins::Call());
2260}
2261
2262void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2263 // ----------- S t a t e -------------
2264 // -- rax : argc
2265 // -- rsp[0] : return address
2266 // -- rsp[8] : receiver
2267 // -- rsp[16] : target (if argc >= 1)
2268 // -- rsp[24] : thisArgument (if argc >= 2)
2269 // -- rsp[32] : argumentsList (if argc == 3)
2270 // -----------------------------------
2271
2272 // 1. Load target into rdi (if present), argumentsList into rbx (if present),
2273 // remove all arguments from the stack (including the receiver), and push
2274 // thisArgument (if present) instead.
2275 {
2276 Label done;
2277 StackArgumentsAccessor args(rax);
2278 __ LoadRoot(rdi, RootIndex::kUndefinedValue);
2279 __ movq(rdx, rdi);
2280 __ movq(rbx, rdi);
2281 __ cmpq(rax, Immediate(JSParameterCount(1)));
2282 __ j(below, &done, Label::kNear);
2283 __ movq(rdi, args[1]); // target
2284 __ j(equal, &done, Label::kNear);
2285 __ movq(rdx, args[2]); // thisArgument
2286 __ cmpq(rax, Immediate(JSParameterCount(3)));
2287 __ j(below, &done, Label::kNear);
2288 __ movq(rbx, args[3]); // argumentsList
2289 __ bind(&done);
2290 __ DropArgumentsAndPushNewReceiver(rax, rdx, rcx);
2291 }
2292
2293 // ----------- S t a t e -------------
2294 // -- rbx : argumentsList
2295 // -- rdi : target
2296 // -- rsp[0] : return address
2297 // -- rsp[8] : thisArgument
2298 // -----------------------------------
2299
2300 // 2. We don't need to check explicitly for callable target here,
2301 // since that's the first thing the Call/CallWithArrayLike builtins
2302 // will do.
2303
2304 // 3. Apply the target to the given argumentsList.
2305 __ TailCallBuiltin(Builtin::kCallWithArrayLike);
2306}
2307
2308void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2309 // ----------- S t a t e -------------
2310 // -- rax : argc
2311 // -- rsp[0] : return address
2312 // -- rsp[8] : receiver
2313 // -- rsp[16] : target
2314 // -- rsp[24] : argumentsList
2315 // -- rsp[32] : new.target (optional)
2316 // -----------------------------------
2317
2318 // 1. Load target into rdi (if present), argumentsList into rbx (if present),
2319 // new.target into rdx (if present, otherwise use target), remove all
2320 // arguments from the stack (including the receiver), and push thisArgument
2321 // (if present) instead.
2322 {
2323 Label done;
2324 StackArgumentsAccessor args(rax);
2325 __ LoadRoot(rdi, RootIndex::kUndefinedValue);
2326 __ movq(rdx, rdi);
2327 __ movq(rbx, rdi);
2328 __ cmpq(rax, Immediate(JSParameterCount(1)));
2329 __ j(below, &done, Label::kNear);
2330 __ movq(rdi, args[1]); // target
2331 __ movq(rdx, rdi); // new.target defaults to target
2332 __ j(equal, &done, Label::kNear);
2333 __ movq(rbx, args[2]); // argumentsList
2334 __ cmpq(rax, Immediate(JSParameterCount(3)));
2335 __ j(below, &done, Label::kNear);
2336 __ movq(rdx, args[3]); // new.target
2337 __ bind(&done);
2338 __ DropArgumentsAndPushNewReceiver(
2339 rax, masm->RootAsOperand(RootIndex::kUndefinedValue), rcx);
2340 }
2341
2342 // ----------- S t a t e -------------
2343 // -- rbx : argumentsList
2344 // -- rdx : new.target
2345 // -- rdi : target
2346 // -- rsp[0] : return address
2347 // -- rsp[8] : receiver (undefined)
2348 // -----------------------------------
2349
2350 // 2. We don't need to check explicitly for constructor target here,
2351 // since that's the first thing the Construct/ConstructWithArrayLike
2352 // builtins will do.
2353
2354 // 3. We don't need to check explicitly for constructor new.target here,
2355 // since that's the second thing the Construct/ConstructWithArrayLike
2356 // builtins will do.
2357
2358 // 4. Construct the target with the given new.target and argumentsList.
2359 __ TailCallBuiltin(Builtin::kConstructWithArrayLike);
2360}
2361
2362namespace {
2363
2364// Allocate new stack space for |count| arguments and shift all existing
2365// arguments already on the stack. |pointer_to_new_space_out| points to the
2366// first free slot on the stack to copy additional arguments to and
2367// |argc_in_out| is updated to include |count|.
2368void Generate_AllocateSpaceAndShiftExistingArguments(
2369 MacroAssembler* masm, Register count, Register argc_in_out,
2370 Register pointer_to_new_space_out, Register scratch1, Register scratch2) {
2371 DCHECK(!AreAliased(count, argc_in_out, pointer_to_new_space_out, scratch1,
2372 scratch2, kScratchRegister));
2373 // Use pointer_to_new_space_out as scratch until we set it to the correct
2374 // value at the end.
2375 Register old_rsp = pointer_to_new_space_out;
2376 Register new_space = kScratchRegister;
2377 __ movq(old_rsp, rsp);
2378
2379 __ leaq(new_space, Operand(count, times_system_pointer_size, 0));
2380 __ AllocateStackSpace(new_space);
2381
2382 Register copy_count = argc_in_out;
2383 Register current = scratch2;
2384 Register value = kScratchRegister;
2385
2386 Label loop, entry;
2387 __ Move(current, 0);
2388 __ jmp(&entry);
2389 __ bind(&loop);
2390 __ movq(value, Operand(old_rsp, current, times_system_pointer_size, 0));
2391 __ movq(Operand(rsp, current, times_system_pointer_size, 0), value);
2392 __ incq(current);
2393 __ bind(&entry);
2394 __ cmpq(current, copy_count);
2395 __ j(less_equal, &loop, Label::kNear);
2396
2397 // Point to the next free slot above the shifted arguments (copy_count + 1
2398 // slot for the return address).
2399 __ leaq(
2400 pointer_to_new_space_out,
2401 Operand(rsp, copy_count, times_system_pointer_size, kSystemPointerSize));
2402 // We use addl instead of addq here because we can omit REX.W, saving 1 byte.
2403 // We are especially constrained here because we are close to reaching the
2404 // limit for a near jump to the stackoverflow label, so every byte counts.
2405 __ addl(argc_in_out, count); // Update total number of arguments.
2406}
2407
2408} // namespace
2409
2410// static
2411// TODO(v8:11615): Observe Code::kMaxArguments in CallOrConstructVarargs
2412void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
2413 Builtin target_builtin) {
2414 // ----------- S t a t e -------------
2415 // -- rdi : target
2416 // -- rax : number of parameters on the stack
2417 // -- rbx : arguments list (a FixedArray)
2418 // -- rcx : len (number of elements to push from args)
2419 // -- rdx : new.target (for [[Construct]])
2420 // -- rsp[0] : return address
2421 // -----------------------------------
2422
2423 if (v8_flags.debug_code) {
2424 // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
2425 Label ok, fail;
2426 __ AssertNotSmi(rbx);
2427 Register map = r9;
2428 __ LoadMap(map, rbx);
2429 __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
2430 __ j(equal, &ok);
2431 __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
2432 __ j(not_equal, &fail);
2433 __ Cmp(rcx, 0);
2434 __ j(equal, &ok);
2435 // Fall through.
2436 __ bind(&fail);
2437 __ Abort(AbortReason::kOperandIsNotAFixedArray);
2438
2439 __ bind(&ok);
2440 }
2441
2442 Label stack_overflow;
2443 __ StackOverflowCheck(rcx, &stack_overflow,
2445
2446 // Push additional arguments onto the stack.
2447 // Move the arguments already in the stack,
2448 // including the receiver and the return address.
2449 // rcx: Number of arguments to make room for.
2450 // rax: Number of arguments already on the stack.
2451 // r8: Points to first free slot on the stack after arguments were shifted.
2452 Generate_AllocateSpaceAndShiftExistingArguments(masm, rcx, rax, r8, r9, r12);
2453 // Copy the additional arguments onto the stack.
2454 {
2455 Register value = r12;
2456 Register src = rbx, dest = r8, num = rcx, current = r9;
2457 __ Move(current, 0);
2458 Label done, push, loop;
2459 __ bind(&loop);
2460 __ cmpl(current, num);
2461 __ j(equal, &done, Label::kNear);
2462 // Turn the hole into undefined as we go.
2463 __ LoadTaggedField(value, FieldOperand(src, current, times_tagged_size,
2464 OFFSET_OF_DATA_START(FixedArray)));
2465 __ CompareRoot(value, RootIndex::kTheHoleValue);
2466 __ j(not_equal, &push, Label::kNear);
2467 __ LoadRoot(value, RootIndex::kUndefinedValue);
2468 __ bind(&push);
2469 __ movq(Operand(dest, current, times_system_pointer_size, 0), value);
2470 __ incl(current);
2471 __ jmp(&loop);
2472 __ bind(&done);
2473 }
2474
2475 // Tail-call to the actual Call or Construct builtin.
2476 __ TailCallBuiltin(target_builtin);
2477
2478 __ bind(&stack_overflow);
2479 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2480}
2481
2482// static
2483void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2484 CallOrConstructMode mode,
2485 Builtin target_builtin) {
2486 // ----------- S t a t e -------------
2487 // -- rax : the number of arguments
2488 // -- rdx : the new target (for [[Construct]] calls)
2489 // -- rdi : the target to call (can be any Object)
2490 // -- rcx : start index (to support rest parameters)
2491 // -----------------------------------
2492
2493 // Check if new.target has a [[Construct]] internal method.
2494 if (mode == CallOrConstructMode::kConstruct) {
2495 Label new_target_constructor, new_target_not_constructor;
2496 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
2497 __ LoadMap(rbx, rdx);
2498 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2499 Immediate(Map::Bits1::IsConstructorBit::kMask));
2500 __ j(not_zero, &new_target_constructor, Label::kNear);
2501 __ bind(&new_target_not_constructor);
2502 {
2503 FrameScope scope(masm, StackFrame::MANUAL);
2504 __ EnterFrame(StackFrame::INTERNAL);
2505 __ Push(rdx);
2506 __ CallRuntime(Runtime::kThrowNotConstructor);
2507 }
2508 __ bind(&new_target_constructor);
2509 }
2510
2511 Label stack_done, stack_overflow;
2512 __ movq(r8, Operand(rbp, StandardFrameConstants::kArgCOffset));
2513 __ decq(r8); // Exclude receiver.
2514 __ subl(r8, rcx);
2515 __ j(less_equal, &stack_done);
2516 {
2517 // ----------- S t a t e -------------
2518 // -- rax : the number of arguments already in the stack
2519 // -- rbp : point to the caller stack frame
2520 // -- rcx : start index (to support rest parameters)
2521 // -- rdx : the new target (for [[Construct]] calls)
2522 // -- rdi : the target to call (can be any Object)
2523 // -- r8 : number of arguments to copy, i.e. arguments count - start index
2524 // -----------------------------------
2525
2526 // Check for stack overflow.
2527 __ StackOverflowCheck(r8, &stack_overflow, Label::kNear);
2528
2529 // Forward the arguments from the caller frame.
2530 // Move the arguments already in the stack,
2531 // including the receiver and the return address.
2532 // r8: Number of arguments to make room for.
2533 // rax: Number of arguments already on the stack.
2534 // r9: Points to first free slot on the stack after arguments were shifted.
2535 Generate_AllocateSpaceAndShiftExistingArguments(masm, r8, rax, r9, r12,
2536 r15);
2537
2538 // Point to the first argument to copy (skipping receiver).
2539 __ leaq(rcx, Operand(rcx, times_system_pointer_size,
2542 __ addq(rcx, rbp);
2543
2544 // Copy the additional caller arguments onto the stack.
2545 // TODO(victorgomes): Consider using forward order as potentially more cache
2546 // friendly.
2547 {
2548 Register src = rcx, dest = r9, num = r8;
2549 Label loop;
2550 __ bind(&loop);
2551 __ decq(num);
2552 __ movq(kScratchRegister,
2553 Operand(src, num, times_system_pointer_size, 0));
2554 __ movq(Operand(dest, num, times_system_pointer_size, 0),
2556 __ j(not_zero, &loop);
2557 }
2558 }
2559 __ bind(&stack_done);
2560 // Tail-call to the actual Call or Construct builtin.
2561 __ TailCallBuiltin(target_builtin);
2562
2563 __ bind(&stack_overflow);
2564 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2565}
2566
2567// static
2568void Builtins::Generate_CallFunction(MacroAssembler* masm,
2569 ConvertReceiverMode mode) {
2570 // ----------- S t a t e -------------
2571 // -- rax : the number of arguments
2572 // -- rdi : the function to call (checked to be a JSFunction)
2573 // -----------------------------------
2574
2575 StackArgumentsAccessor args(rax);
2576 __ AssertCallableFunction(rdi);
2577
2578 __ LoadTaggedField(rdx,
2579 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2580 // ----------- S t a t e -------------
2581 // -- rax : the number of arguments
2582 // -- rdx : the shared function info.
2583 // -- rdi : the function to call (checked to be a JSFunction)
2584 // -----------------------------------
2585
2586 // Enter the context of the function; ToObject has to run in the function
2587 // context, and we also need to take the global proxy from the function
2588 // context in case of conversion.
2589 __ LoadTaggedField(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2590 // We need to convert the receiver for non-native sloppy mode functions.
2591 Label done_convert;
2592 __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
2593 Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2594 SharedFunctionInfo::IsStrictBit::kMask));
2595 __ j(not_zero, &done_convert);
2596 {
2597 // ----------- S t a t e -------------
2598 // -- rax : the number of arguments
2599 // -- rdx : the shared function info.
2600 // -- rdi : the function to call (checked to be a JSFunction)
2601 // -- rsi : the function context.
2602 // -----------------------------------
2603
2605 // Patch receiver to global proxy.
2606 __ LoadGlobalProxy(rcx);
2607 } else {
2608 Label convert_to_object, convert_receiver;
2609 __ movq(rcx, args.GetReceiverOperand());
2610 __ JumpIfSmi(rcx, &convert_to_object,
2612 __ JumpIfJSAnyIsNotPrimitive(rcx, rbx, &done_convert,
2615 Label convert_global_proxy;
2616 __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
2618 __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
2620 __ bind(&convert_global_proxy);
2621 {
2622 // Patch receiver to global proxy.
2623 __ LoadGlobalProxy(rcx);
2624 }
2625 __ jmp(&convert_receiver);
2626 }
2627 __ bind(&convert_to_object);
2628 {
2629 // Convert receiver using ToObject.
2630 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2631 // in the fast case? (fall back to AllocateInNewSpace?)
2632 FrameScope scope(masm, StackFrame::INTERNAL);
2633 __ SmiTag(rax);
2634 __ Push(rax);
2635 __ Push(rdi);
2636 __ movq(rax, rcx);
2637 __ Push(rsi);
2638 __ CallBuiltin(Builtin::kToObject);
2639 __ Pop(rsi);
2640 __ movq(rcx, rax);
2641 __ Pop(rdi);
2642 __ Pop(rax);
2643 __ SmiUntagUnsigned(rax);
2644 }
2645 __ LoadTaggedField(
2646 rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2647 __ bind(&convert_receiver);
2648 }
2649 __ movq(args.GetReceiverOperand(), rcx);
2650 }
2651 __ bind(&done_convert);
2652
2653 // ----------- S t a t e -------------
2654 // -- rax : the number of arguments
2655 // -- rdx : the shared function info.
2656 // -- rdi : the function to call (checked to be a JSFunction)
2657 // -- rsi : the function context.
2658 // -----------------------------------
2659
2660#ifdef V8_ENABLE_LEAPTIERING
2661 __ InvokeFunctionCode(rdi, no_reg, rax, InvokeType::kJump);
2662#else
2663 __ movzxwq(
2664 rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2665 __ InvokeFunctionCode(rdi, no_reg, rbx, rax, InvokeType::kJump);
2666#endif // V8_ENABLE_LEAPTIERING
2667}
2668
2669namespace {
2670
2671void Generate_PushBoundArguments(MacroAssembler* masm) {
2672 // ----------- S t a t e -------------
2673 // -- rax : the number of arguments
2674 // -- rdx : new.target (only in case of [[Construct]])
2675 // -- rdi : target (checked to be a JSBoundFunction)
2676 // -----------------------------------
2677
2678 // Load [[BoundArguments]] into rcx and length of that into rbx.
2679 Label no_bound_arguments;
2680 __ LoadTaggedField(rcx,
2681 FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2682 __ SmiUntagFieldUnsigned(rbx,
2683 FieldOperand(rcx, offsetof(FixedArray, length_)));
2684 __ testl(rbx, rbx);
2685 __ j(zero, &no_bound_arguments);
2686 {
2687 // ----------- S t a t e -------------
2688 // -- rax : the number of arguments
2689 // -- rdx : new.target (only in case of [[Construct]])
2690 // -- rdi : target (checked to be a JSBoundFunction)
2691 // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2692 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2693 // -----------------------------------
2694
2695 // TODO(victor): Use Generate_StackOverflowCheck here.
2696 // Check the stack for overflow.
2697 {
2698 Label done;
2699 __ shlq(rbx, Immediate(kSystemPointerSizeLog2));
2700 __ movq(kScratchRegister, rsp);
2701 __ subq(kScratchRegister, rbx);
2702
2703 // We are not trying to catch interruptions (i.e. debug break and
2704 // preemption) here, so check the "real stack limit".
2705 __ cmpq(kScratchRegister,
2706 __ StackLimitAsOperand(StackLimitKind::kRealStackLimit));
2707 __ j(above_equal, &done, Label::kNear);
2708 {
2709 FrameScope scope(masm, StackFrame::MANUAL);
2710 __ EnterFrame(StackFrame::INTERNAL);
2711 __ CallRuntime(Runtime::kThrowStackOverflow);
2712 }
2713 __ bind(&done);
2714 }
2715
2716 // Save Return Address and Receiver into registers.
2717 __ Pop(r8);
2718 __ Pop(r10);
2719
2720 // Push [[BoundArguments]] to the stack.
2721 {
2722 Label loop;
2723 __ LoadTaggedField(
2724 rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2725 __ SmiUntagFieldUnsigned(
2726 rbx, FieldOperand(rcx, offsetof(FixedArray, length_)));
2727 __ addq(rax, rbx); // Adjust effective number of arguments.
2728 __ bind(&loop);
2729 // Instead of doing decl(rbx) here subtract kTaggedSize from the header
2730 // offset in order to be able to move decl(rbx) right before the loop
2731 // condition. This is necessary in order to avoid flags corruption by
2732 // pointer decompression code.
2733 __ LoadTaggedField(
2734 r12, FieldOperand(rcx, rbx, times_tagged_size,
2735 OFFSET_OF_DATA_START(FixedArray) - kTaggedSize));
2736 __ Push(r12);
2737 __ decl(rbx);
2738 __ j(greater, &loop);
2739 }
2740
2741 // Recover Receiver and Return Address.
2742 __ Push(r10);
2743 __ Push(r8);
2744 }
2745 __ bind(&no_bound_arguments);
2746}
2747
2748} // namespace
2749
2750// static
2751void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2752 // ----------- S t a t e -------------
2753 // -- rax : the number of arguments
2754 // -- rdi : the function to call (checked to be a JSBoundFunction)
2755 // -----------------------------------
2756 __ AssertBoundFunction(rdi);
2757
2758 // Patch the receiver to [[BoundThis]].
2759 StackArgumentsAccessor args(rax);
2760 __ LoadTaggedField(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2761 __ movq(args.GetReceiverOperand(), rbx);
2762
2763 // Push the [[BoundArguments]] onto the stack.
2764 Generate_PushBoundArguments(masm);
2765
2766 // Call the [[BoundTargetFunction]] via the Call builtin.
2767 __ LoadTaggedField(
2768 rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2769 __ TailCallBuiltin(Builtins::Call());
2770}
2771
2772// static
2773void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2774 // ----------- S t a t e -------------
2775 // -- rax : the number of arguments
2776 // -- rdi : the target to call (can be any Object)
2777 // -----------------------------------
2778 Register argc = rax;
2779 Register target = rdi;
2780 Register map = rcx;
2781 Register instance_type = rdx;
2782 DCHECK(!AreAliased(argc, target, map, instance_type));
2783
2784 StackArgumentsAccessor args(argc);
2785
2786 Label non_callable, class_constructor;
2787 __ JumpIfSmi(target, &non_callable);
2788 __ LoadMap(map, target);
2789 __ CmpInstanceTypeRange(map, instance_type, FIRST_CALLABLE_JS_FUNCTION_TYPE,
2791 __ TailCallBuiltin(Builtins::CallFunction(mode), below_equal);
2792
2793 __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2794 __ TailCallBuiltin(Builtin::kCallBoundFunction, equal);
2795
2796 // Check if target has a [[Call]] internal method.
2797 __ testb(FieldOperand(map, Map::kBitFieldOffset),
2798 Immediate(Map::Bits1::IsCallableBit::kMask));
2799 __ j(zero, &non_callable, Label::kNear);
2800
2801 // Check if target is a proxy and call CallProxy external builtin
2802 __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2803 __ TailCallBuiltin(Builtin::kCallProxy, equal);
2804
2805 // Check if target is a wrapped function and call CallWrappedFunction external
2806 // builtin
2807 __ cmpw(instance_type, Immediate(JS_WRAPPED_FUNCTION_TYPE));
2808 __ TailCallBuiltin(Builtin::kCallWrappedFunction, equal);
2809
2810 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2811 // Check that the function is not a "classConstructor".
2812 __ cmpw(instance_type, Immediate(JS_CLASS_CONSTRUCTOR_TYPE));
2813 __ j(equal, &class_constructor);
2814
2815 // 2. Call to something else, which might have a [[Call]] internal method (if
2816 // not we raise an exception).
2817
2818 // Overwrite the original receiver with the (original) target.
2819 __ movq(args.GetReceiverOperand(), target);
2820 // Let the "call_as_function_delegate" take care of the rest.
2821 __ LoadNativeContextSlot(target, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2822 __ TailCallBuiltin(
2824
2825 // 3. Call to something that is not callable.
2826 __ bind(&non_callable);
2827 {
2828 FrameScope scope(masm, StackFrame::INTERNAL);
2829 __ Push(target);
2830 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2831 __ Trap(); // Unreachable.
2832 }
2833
2834 // 4. The function is a "classConstructor", need to raise an exception.
2835 __ bind(&class_constructor);
2836 {
2837 FrameScope frame(masm, StackFrame::INTERNAL);
2838 __ Push(target);
2839 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2840 __ Trap(); // Unreachable.
2841 }
2842}
2843
2844// static
2845void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2846 // ----------- S t a t e -------------
2847 // -- rax : the number of arguments
2848 // -- rdx : the new target (checked to be a constructor)
2849 // -- rdi : the constructor to call (checked to be a JSFunction)
2850 // -----------------------------------
2851 __ AssertConstructor(rdi);
2852 __ AssertFunction(rdi);
2853
2854 // Calling convention for function specific ConstructStubs require
2855 // rbx to contain either an AllocationSite or undefined.
2856 __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2857
2858 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2859 const TaggedRegister shared_function_info(rcx);
2860 __ LoadTaggedField(shared_function_info,
2861 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2862 __ testl(FieldOperand(shared_function_info, SharedFunctionInfo::kFlagsOffset),
2863 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2864 __ TailCallBuiltin(Builtin::kJSBuiltinsConstructStub, not_zero);
2865
2866 __ TailCallBuiltin(Builtin::kJSConstructStubGeneric);
2867}
2868
2869// static
2870void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2871 // ----------- S t a t e -------------
2872 // -- rax : the number of arguments
2873 // -- rdx : the new target (checked to be a constructor)
2874 // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2875 // -----------------------------------
2876 __ AssertConstructor(rdi);
2877 __ AssertBoundFunction(rdi);
2878
2879 // Push the [[BoundArguments]] onto the stack.
2880 Generate_PushBoundArguments(masm);
2881
2882 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2883 {
2884 Label done;
2885 __ cmpq(rdi, rdx);
2886 __ j(not_equal, &done, Label::kNear);
2887 __ LoadTaggedField(
2888 rdx, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2889 __ bind(&done);
2890 }
2891
2892 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2893 __ LoadTaggedField(
2894 rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2895 __ TailCallBuiltin(Builtin::kConstruct);
2896}
2897
2898// static
2899void Builtins::Generate_Construct(MacroAssembler* masm) {
2900 // ----------- S t a t e -------------
2901 // -- rax : the number of arguments
2902 // -- rdx : the new target (either the same as the constructor or
2903 // the JSFunction on which new was invoked initially)
2904 // -- rdi : the constructor to call (can be any Object)
2905 // -----------------------------------
2906 Register argc = rax;
2907 Register target = rdi;
2908 Register map = rcx;
2909 Register instance_type = r8;
2910 DCHECK(!AreAliased(argc, target, map, instance_type));
2911
2912 StackArgumentsAccessor args(argc);
2913
2914 // Check if target is a Smi.
2915 Label non_constructor;
2916 __ JumpIfSmi(target, &non_constructor);
2917
2918 // Check if target has a [[Construct]] internal method.
2919 __ LoadMap(map, target);
2920 __ testb(FieldOperand(map, Map::kBitFieldOffset),
2921 Immediate(Map::Bits1::IsConstructorBit::kMask));
2922 __ j(zero, &non_constructor);
2923
2924 // Dispatch based on instance type.
2925 __ CmpInstanceTypeRange(map, instance_type, FIRST_JS_FUNCTION_TYPE,
2926 LAST_JS_FUNCTION_TYPE);
2927 __ TailCallBuiltin(Builtin::kConstructFunction, below_equal);
2928
2929 // Only dispatch to bound functions after checking whether they are
2930 // constructors.
2931 __ cmpw(instance_type, Immediate(JS_BOUND_FUNCTION_TYPE));
2932 __ TailCallBuiltin(Builtin::kConstructBoundFunction, equal);
2933
2934 // Only dispatch to proxies after checking whether they are constructors.
2935 __ cmpw(instance_type, Immediate(JS_PROXY_TYPE));
2936 __ TailCallBuiltin(Builtin::kConstructProxy, equal);
2937
2938 // Called Construct on an exotic Object with a [[Construct]] internal method.
2939 {
2940 // Overwrite the original receiver with the (original) target.
2941 __ movq(args.GetReceiverOperand(), target);
2942 // Let the "call_as_constructor_delegate" take care of the rest.
2943 __ LoadNativeContextSlot(target,
2944 Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2945 __ TailCallBuiltin(Builtins::CallFunction());
2946 }
2947
2948 // Called Construct on an Object that doesn't have a [[Construct]] internal
2949 // method.
2950 __ bind(&non_constructor);
2951 __ TailCallBuiltin(Builtin::kConstructedNonConstructable);
2952}
2953
2954namespace {
2955
2956void Generate_OSREntry(MacroAssembler* masm, Register entry_address) {
2957 // Drop the return address on the stack and jump to the OSR entry
2958 // point of the function.
2959 __ Drop(1);
2960 // TODO(sroettger): Use the notrack prefix since not all OSR entries emit an
2961 // endbr instruction yet.
2962 __ jmp(entry_address, /*notrack=*/true);
2963}
2964
2965enum class OsrSourceTier {
2966 kInterpreter,
2967 kBaseline,
2968 kMaglev,
2969};
2970
2971void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source,
2972 Register maybe_target_code,
2973 Register expected_param_count) {
2974 Label jump_to_optimized_code;
2975 {
2976 // If maybe_target_code is not null, no need to call into runtime. A
2977 // precondition here is: if maybe_target_code is an InstructionStream
2978 // object, it must NOT be marked_for_deoptimization (callers must ensure
2979 // this).
2980 __ testq(maybe_target_code, maybe_target_code);
2981 __ j(not_equal, &jump_to_optimized_code, Label::kNear);
2982 }
2983
2984 {
2985 FrameScope scope(masm, StackFrame::INTERNAL);
2986 // Preserve arguments
2987 __ Push(expected_param_count);
2988 __ CallRuntime(Runtime::kCompileOptimizedOSR);
2989 DCHECK_EQ(maybe_target_code, rax);
2990 __ Pop(expected_param_count);
2991 }
2992
2993 // If the code object is null, just return to the caller.
2994 __ testq(rax, rax);
2995 __ j(not_equal, &jump_to_optimized_code, Label::kNear);
2996 __ ret(0);
2997
2998 __ bind(&jump_to_optimized_code);
2999
3000 if (source == OsrSourceTier::kMaglev) {
3001 // Maglev doesn't enter OSR'd code itself, since OSR depends on the
3002 // unoptimized (~= Ignition) stack frame layout. Instead, return to Maglev
3003 // code and let it deoptimize.
3004 __ ret(0);
3005 return;
3006 }
3007
3008 const Register scratch(rcx);
3009 CHECK(!AreAliased(maybe_target_code, expected_param_count, scratch));
3010
3011 // OSR entry tracing.
3012 {
3013 Label next;
3014 __ cmpb(
3015 __ ExternalReferenceAsOperand(
3016 ExternalReference::address_of_log_or_trace_osr(), kScratchRegister),
3017 Immediate(0));
3018 __ j(equal, &next, Label::kNear);
3019
3020 {
3021 FrameScope scope(masm, StackFrame::INTERNAL);
3022 // Preserve arguments
3023 __ Push(maybe_target_code);
3024 __ Push(expected_param_count);
3025 __ CallRuntime(Runtime::kLogOrTraceOptimizedOSREntry, 0);
3026 __ Pop(expected_param_count);
3027 __ Pop(maybe_target_code);
3028 }
3029
3030 __ bind(&next);
3031 }
3032
3033 if (source == OsrSourceTier::kInterpreter) {
3034 // Drop the handler frame that is be sitting on top of the actual
3035 // JavaScript frame.
3036 __ leave();
3037 }
3038
3039 // Check we are actually jumping to an OSR code object. This among other
3040 // things ensures that the object contains deoptimization data below.
3041 __ movl(scratch, FieldOperand(maybe_target_code, Code::kOsrOffsetOffset));
3042 __ cmpl(scratch, Immediate(BytecodeOffset::None().ToInt()));
3043 __ SbxCheck(Condition::not_equal, AbortReason::kExpectedOsrCode);
3044
3045 // Check the target has a matching parameter count. This ensures that the OSR
3046 // code will correctly tear down our frame when leaving.
3047 __ movzxwq(scratch,
3048 FieldOperand(maybe_target_code, Code::kParameterCountOffset));
3049 __ SmiUntag(expected_param_count);
3050 __ cmpq(scratch, expected_param_count);
3051 __ SbxCheck(Condition::equal, AbortReason::kOsrUnexpectedStackSize);
3052
3053 __ LoadProtectedPointerField(
3054 scratch, FieldOperand(maybe_target_code,
3055 Code::kDeoptimizationDataOrInterpreterDataOffset));
3056
3057 // Load the OSR entrypoint offset from the deoptimization data.
3058 __ SmiUntagField(
3059 scratch,
3062
3063 __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code,
3065
3066 // Compute the target address = code_entry + osr_offset
3067 __ addq(maybe_target_code, scratch);
3068
3069 Generate_OSREntry(masm, maybe_target_code);
3070}
3071
3072} // namespace
3073
3074void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3075 using D = OnStackReplacementDescriptor;
3076 static_assert(D::kParameterCount == 2);
3077 OnStackReplacement(masm, OsrSourceTier::kInterpreter,
3078 D::MaybeTargetCodeRegister(),
3079 D::ExpectedParameterCountRegister());
3080}
3081
3082void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) {
3083 using D = OnStackReplacementDescriptor;
3084 static_assert(D::kParameterCount == 2);
3085 __ movq(kContextRegister,
3087 OnStackReplacement(masm, OsrSourceTier::kBaseline,
3088 D::MaybeTargetCodeRegister(),
3089 D::ExpectedParameterCountRegister());
3090}
3091
3092#ifdef V8_ENABLE_MAGLEV
3093
3094// static
3095void Builtins::Generate_MaglevFunctionEntryStackCheck(MacroAssembler* masm,
3096 bool save_new_target) {
3097 // Input (rax): Stack size (Smi).
3098 // This builtin can be invoked just after Maglev's prologue.
3099 // All registers are available, except (possibly) new.target.
3100 ASM_CODE_COMMENT(masm);
3101 {
3102 FrameScope scope(masm, StackFrame::INTERNAL);
3103 __ AssertSmi(rax);
3104 if (save_new_target) {
3106 __ AssertSmiOrHeapObjectInMainCompressionCage(
3108 }
3110 }
3111 __ Push(rax);
3112 __ CallRuntime(Runtime::kStackGuardWithGap, 1);
3113 if (save_new_target) {
3115 }
3116 }
3117 __ Ret();
3118}
3119
3120#endif // V8_ENABLE_MAGLEV
3121
3122namespace {
3123
3124void Generate_RestoreFrameDescriptionRegisters(MacroAssembler* masm,
3125 Register frame_description) {
3126 // Set the xmm (simd / double) registers.
3127 const RegisterConfiguration* config = RegisterConfiguration::Default();
3128 int simd128_regs_offset = FrameDescription::simd128_registers_offset();
3129 for (int i = 0; i < config->num_allocatable_simd128_registers(); ++i) {
3130 int code = config->GetAllocatableSimd128Code(i);
3131 XMMRegister xmm_reg = XMMRegister::from_code(code);
3132 int src_offset = code * kSimd128Size + simd128_regs_offset;
3133 __ movdqu(xmm_reg, Operand(frame_description, src_offset));
3134 }
3135
3136 // Restore the non-xmm registers from the stack.
3137 for (int i = Register::kNumRegisters - 1; i >= 0; i--) {
3139 // Do not restore rsp and kScratchRegister.
3140 if (r == rsp || r == kScratchRegister) continue;
3141 __ popq(r);
3142 }
3143}
3144
3145} // namespace
3146
3147#ifdef V8_ENABLE_CET_SHADOW_STACK
3148// AdaptShadowStackForDeopt assists the deoptimizer in getting continuation
3149// addresses placed on the shadow stack. This can only be done with a call
3150// instruction. Earlier in the deoptimization process, the user stack was
3151// seeded with return addresses into the continuations. At this stage, we
3152// make calls into the continuations such that the shadow stack contains
3153// precisely those necessary return addresses back into those continuations,
3154// and in the appropriate order that the shadow stack and the user stack
3155// perfectly match up at the points where return instructions are executed.
3156//
3157// The stack layout on entry to AdaptShadowStackForDeopt is as follows:
3158//
3159// ReturnAddress_1
3160// ReturnAddress_2
3161// ...
3162// ReturnAddresss_N
3163// LastFrameDescription (for restoring registers)
3164// savedRegister_1
3165// savedRegister_2
3166// ...
3167//
3168// kAdaptShadowStackCountRegister, on entry, has the value N, matching the
3169// number of identifiers to pop from the stack above. It is decremented each
3170// time AdaptShadowStackForDeopt pops a return address from the stack. This
3171// happens once per invocation of AdaptShadowStackForDeopt. When the value
3172// is 0, the function jumps to the last return address and will not be called
3173// again for this deoptimization process.
3174//
3175// The other cpu registers have already been populated with the required values
3176// to kick off execution running the builtin continuation associated with
3177// ReturnAddress_N on the stack above. AdaptShadowStackForDeopt uses
3178// kScratchRegister and kAdaptShadowStackRegister for its own work, and
3179// that is why those registers are additionaly saved on the stack, to be
3180// restored at the end of the process.
3181
3182// kAdaptShadowStackDispatchFirstEntryOffset marks the "kick-off" location in
3183// AdaptShadowStackForDeopt for the process.
3184constexpr int kAdaptShadowStackDispatchFirstEntryOffset = 1;
3185
3186// kAdaptShadowStackCountRegister contains the number of identifiers on
3187// the stack to be consumed via repeated calls into AdaptShadowStackForDeopt.
3188constexpr Register kAdaptShadowStackCountRegister = r11;
3189
3190void Builtins::Generate_AdaptShadowStackForDeopt(MacroAssembler* masm) {
3191 Register count_reg = kAdaptShadowStackCountRegister;
3192 Register addr = rax;
3193
3194 // Pop unnecessary return address on stack.
3195 __ popq(addr);
3196
3197 // DeoptimizationEntry enters here.
3198 CHECK_EQ(masm->pc_offset(), kAdaptShadowStackDispatchFirstEntryOffset);
3199
3200 __ decl(count_reg);
3201 __ popq(addr); // Pop the next target address.
3202
3203 __ pushq(count_reg);
3205 __ movq(kCArgRegs[1], addr);
3206 __ PrepareCallCFunction(2);
3207 {
3208 AllowExternalCallThatCantCauseGC scope(masm);
3209 // We should block jumps to arbitrary locations for a security reason.
3210 // This function will crash if the address is not in the allow list.
3211 // And, return the given address if it is valid.
3212 __ CallCFunction(ExternalReference::ensure_valid_return_address(), 2);
3213 }
3214 __ popq(count_reg);
3215 // Now `kReturnRegister0` is the address we want to jump to.
3216
3217 __ cmpl(count_reg, Immediate(0));
3218 Label finished;
3219 __ j(equal, &finished, Label::kNear);
3220 // This will jump to CallToAdaptShadowStackForDeopt which call back into this
3221 // function and continue adapting shadow stack.
3222 __ jmp(kReturnRegister0);
3223
3224 __ bind(&finished);
3225 __ movb(__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
3226 Immediate(1));
3228
3229 __ popq(rbx); // Restore the last FrameDescription.
3230 Generate_RestoreFrameDescriptionRegisters(masm, rbx);
3231 __ jmp(kScratchRegister);
3232}
3233#endif // V8_ENABLE_CET_SHADOW_STACK
3234
3235#if V8_ENABLE_WEBASSEMBLY
3236
3237// Returns the offset beyond the last saved FP register.
3238int SaveWasmParams(MacroAssembler* masm) {
3239 // Save all parameter registers (see wasm-linkage.h). They might be
3240 // overwritten in the subsequent runtime call. We don't have any callee-saved
3241 // registers in wasm, so no need to store anything else.
3244 "frame size mismatch");
3245 for (Register reg : wasm::kGpParamRegisters) {
3246 __ Push(reg);
3247 }
3250 "frame size mismatch");
3251 __ AllocateStackSpace(kSimd128Size * arraysize(wasm::kFpParamRegisters));
3252 int offset = 0;
3254 __ movdqu(Operand(rsp, offset), reg);
3256 }
3257 return offset;
3258}
3259
3260// Consumes the offset beyond the last saved FP register (as returned by
3261// {SaveWasmParams}).
3262void RestoreWasmParams(MacroAssembler* masm, int offset) {
3265 __ movdqu(reg, Operand(rsp, offset));
3266 }
3267 DCHECK_EQ(0, offset);
3268 __ addq(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
3269 for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
3270 __ Pop(reg);
3271 }
3272}
3273
3274// When this builtin is called, the topmost stack entry is the calling pc.
3275// This is replaced with the following:
3276//
3277// [ calling pc ] <-- rsp; popped by {ret}.
3278// [ feedback vector ]
3279// [ Wasm instance data ]
3280// [ WASM frame marker ]
3281// [ saved rbp ] <-- rbp; this is where "calling pc" used to be.
3282void Builtins::Generate_WasmLiftoffFrameSetup(MacroAssembler* masm) {
3283 Register func_index = wasm::kLiftoffFrameSetupFunctionReg;
3284 Register vector = r15;
3285 Register calling_pc = rdi;
3286
3287 __ Pop(calling_pc);
3288 __ Push(rbp);
3289 __ Move(rbp, rsp);
3290 __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::WASM)));
3291 __ LoadTaggedField(
3293 WasmTrustedInstanceData::kFeedbackVectorsOffset));
3294 __ LoadTaggedField(vector, FieldOperand(vector, func_index, times_tagged_size,
3295 OFFSET_OF_DATA_START(FixedArray)));
3296 Label allocate_vector, done;
3297 __ JumpIfSmi(vector, &allocate_vector);
3298 __ bind(&done);
3300 __ Push(vector);
3301 __ Push(calling_pc);
3302 __ ret(0);
3303
3304 __ bind(&allocate_vector);
3305 // Feedback vector doesn't exist yet. Call the runtime to allocate it.
3306 // We temporarily change the frame type for this, because we need special
3307 // handling by the stack walker in case of GC.
3308 // For the runtime call, we create the following stack layout:
3309 //
3310 // [ reserved slot for NativeModule ] <-- arg[2]
3311 // [ ("declared") function index ] <-- arg[1] for runtime func.
3312 // [ Wasm instance data ] <-- arg[0]
3313 // [ ...spilled Wasm parameters... ]
3314 // [ calling pc ]
3315 // [ WASM_LIFTOFF_SETUP marker ]
3316 // [ saved rbp ]
3317 __ movq(Operand(rbp, TypedFrameConstants::kFrameTypeOffset),
3318 Immediate(StackFrame::TypeToMarker(StackFrame::WASM_LIFTOFF_SETUP)));
3319 __ set_has_frame(true);
3320 __ Push(calling_pc);
3321 int offset = SaveWasmParams(masm);
3322
3323 // Arguments to the runtime function: instance data, func_index.
3325 __ SmiTag(func_index);
3326 __ Push(func_index);
3327 // Allocate a stack slot where the runtime function can spill a pointer
3328 // to the NativeModule.
3329 __ Push(rsp);
3330 __ Move(kContextRegister, Smi::zero());
3331 __ CallRuntime(Runtime::kWasmAllocateFeedbackVector, 3);
3332 __ movq(vector, kReturnRegister0);
3333
3334 RestoreWasmParams(masm, offset);
3335 __ Pop(calling_pc);
3336 // Restore correct frame type.
3337 __ movq(Operand(rbp, TypedFrameConstants::kFrameTypeOffset),
3338 Immediate(StackFrame::TypeToMarker(StackFrame::WASM)));
3339 __ jmp(&done);
3340}
3341
3342void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
3343 // The function index was pushed to the stack by the caller as int32.
3344 __ Pop(r15);
3345 // Convert to Smi for the runtime call.
3346 __ SmiTag(r15);
3347
3348 {
3349 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3350 FrameScope scope(masm, StackFrame::INTERNAL);
3351
3352 int offset = SaveWasmParams(masm);
3353
3354 // Push arguments for the runtime function.
3356 __ Push(r15);
3357 // Initialize the JavaScript context with 0. CEntry will use it to
3358 // set the current context on the isolate.
3359 __ Move(kContextRegister, Smi::zero());
3360 __ CallRuntime(Runtime::kWasmCompileLazy, 2);
3361 // The runtime function returns the jump table slot offset as a Smi. Use
3362 // that to compute the jump target in r15.
3363 __ SmiUntagUnsigned(kReturnRegister0);
3364 __ movq(r15, kReturnRegister0);
3365
3366 RestoreWasmParams(masm, offset);
3367 // After the instance data register has been restored, we can add the jump
3368 // table start to the jump table offset already stored in r15.
3369 __ addq(r15,
3372 WasmTrustedInstanceData::kJumpTableStartOffset)));
3373 }
3374
3375 // Finally, jump to the jump table slot for the function.
3376 __ jmp(r15);
3377}
3378
3379void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
3380 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
3381 {
3382 FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
3383
3384 // Save all parameter registers. They might hold live values, we restore
3385 // them after the runtime call.
3386 for (Register reg :
3388 __ Push(reg);
3389 }
3390
3391 constexpr int kFpStackSize =
3393 __ AllocateStackSpace(kFpStackSize);
3394 int offset = kFpStackSize;
3395 for (DoubleRegister reg :
3398 __ movdqu(Operand(rsp, offset), reg);
3399 }
3400
3401 // Initialize the JavaScript context with 0. CEntry will use it to
3402 // set the current context on the isolate.
3403 __ Move(kContextRegister, Smi::zero());
3404 __ CallRuntime(Runtime::kWasmDebugBreak, 0);
3405
3406 // Restore registers.
3408 __ movdqu(reg, Operand(rsp, offset));
3410 }
3411 __ addq(rsp, Immediate(kFpStackSize));
3413 __ Pop(reg);
3414 }
3415 }
3416
3417 __ ret(0);
3418}
3419
3420namespace {
3421// Check that the stack was in the old state (if generated code assertions are
3422// enabled), and switch to the new state.
3423void SwitchStackState(MacroAssembler* masm, Register stack,
3425 wasm::JumpBuffer::StackState new_state) {
3426#if V8_ENABLE_SANDBOX
3427 __ cmpl(MemOperand(stack, wasm::kStackStateOffset), Immediate(old_state));
3428 Label ok;
3429 __ j(equal, &ok, Label::kNear);
3430 __ Trap();
3431 __ bind(&ok);
3432#endif
3433 __ movl(MemOperand(stack, wasm::kStackStateOffset), Immediate(new_state));
3434}
3435
3436void FillJumpBuffer(MacroAssembler* masm, Register stack, Label* pc) {
3437 __ movq(MemOperand(stack, wasm::kStackSpOffset), rsp);
3438 __ movq(MemOperand(stack, wasm::kStackFpOffset), rbp);
3439 __ movq(kScratchRegister,
3440 __ StackLimitAsOperand(StackLimitKind::kRealStackLimit));
3442 __ leaq(kScratchRegister, MemOperand(pc, 0));
3444}
3445
3446void LoadJumpBuffer(MacroAssembler* masm, Register stack, bool load_pc,
3447 wasm::JumpBuffer::StackState expected_state) {
3448 __ movq(rsp, MemOperand(stack, wasm::kStackSpOffset));
3449 __ movq(rbp, MemOperand(stack, wasm::kStackFpOffset));
3450 SwitchStackState(masm, stack, expected_state, wasm::JumpBuffer::Active);
3451 if (load_pc) {
3452 __ jmp(MemOperand(stack, wasm::kStackPcOffset));
3453 }
3454 // The stack limit is set separately under the ExecutionAccess lock.
3455}
3456
3457void LoadTargetJumpBuffer(MacroAssembler* masm, Register target_stack,
3458 wasm::JumpBuffer::StackState expected_state) {
3459 MemOperand GCScanSlotPlace =
3460 MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3461 __ Move(GCScanSlotPlace, 0);
3462 // Switch stack!
3463 LoadJumpBuffer(masm, target_stack, false, expected_state);
3464}
3465
3466// Updates the stack limit and central stack info, and validates the switch.
3467void SwitchStacks(MacroAssembler* masm, Register old_stack, bool return_switch,
3468 const std::initializer_list<Register> keep) {
3469 using ER = ExternalReference;
3470 for (auto reg : keep) {
3471 __ Push(reg);
3472 }
3473 {
3474 FrameScope scope(masm, StackFrame::MANUAL);
3475 // Move {old_stack} first in case it aliases kCArgRegs[0].
3476 __ Move(kCArgRegs[1], old_stack);
3478 __ PrepareCallCFunction(2);
3479 __ CallCFunction(
3480 return_switch ? ER::wasm_return_switch() : ER::wasm_switch_stacks(), 2);
3481 }
3482 for (auto it = std::rbegin(keep); it != std::rend(keep); ++it) {
3483 __ Pop(*it);
3484 }
3485}
3486
3487void ReloadParentStack(MacroAssembler* masm, Register promise,
3488 Register return_value, Register context, Register tmp1,
3489 Register tmp2) {
3490 Register active_stack = tmp1;
3491 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
3492
3493 // We don't need to save the full register state since we are switching out of
3494 // this stack for the last time. Mark the stack as retired.
3495 SwitchStackState(masm, active_stack, wasm::JumpBuffer::Active,
3497 Register parent = tmp2;
3498 __ Move(parent, MemOperand(active_stack, wasm::kStackParentOffset));
3499 __ StoreRootRelative(IsolateData::active_stack_offset(), parent);
3500 // Switch stack!
3501 SwitchStacks(masm, active_stack, true,
3502 {promise, return_value, context, parent});
3503 LoadJumpBuffer(masm, parent, false, wasm::JumpBuffer::Inactive);
3504}
3505
3506// Loads the context field of the WasmTrustedInstanceData or WasmImportData
3507// depending on the data's type, and places the result in the input register.
3508void GetContextFromImplicitArg(MacroAssembler* masm, Register data) {
3509 __ LoadTaggedField(kScratchRegister,
3511 __ CmpInstanceType(kScratchRegister, WASM_TRUSTED_INSTANCE_DATA_TYPE);
3512 Label instance;
3513 Label end;
3514 __ j(equal, &instance);
3515 __ LoadTaggedField(data,
3516 FieldOperand(data, WasmImportData::kNativeContextOffset));
3517 __ jmp(&end);
3518 __ bind(&instance);
3519 __ LoadTaggedField(
3520 data, FieldOperand(data, WasmTrustedInstanceData::kNativeContextOffset));
3521 __ bind(&end);
3522}
3523
3524void RestoreParentSuspender(MacroAssembler* masm, Register tmp1) {
3525 Register suspender = tmp1;
3526 __ LoadRoot(suspender, RootIndex::kActiveSuspender);
3527 __ LoadTaggedField(
3528 suspender, FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3529 __ CompareRoot(suspender, RootIndex::kUndefinedValue);
3530 __ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
3531}
3532
3533void ResetStackSwitchFrameStackSlots(MacroAssembler* masm) {
3534 __ Move(kScratchRegister, Smi::zero());
3535 __ movq(MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset),
3537 __ movq(MemOperand(rbp, StackSwitchFrameConstants::kResultArrayOffset),
3539}
3540
3541void SwitchToAllocatedStack(MacroAssembler* masm, Register wasm_instance,
3542 Register wrapper_buffer, Register original_fp,
3543 Register new_wrapper_buffer, Register scratch,
3544 Label* suspend) {
3545 ResetStackSwitchFrameStackSlots(masm);
3546 Register parent_stack = new_wrapper_buffer;
3547 __ LoadRootRelative(parent_stack, IsolateData::active_stack_offset());
3548 __ Move(parent_stack, MemOperand(parent_stack, wasm::kStackParentOffset));
3549 FillJumpBuffer(masm, parent_stack, suspend);
3550 SwitchStacks(masm, parent_stack, false,
3551 {kWasmImplicitArgRegister, wrapper_buffer});
3552 parent_stack = no_reg;
3553 Register target_stack = scratch;
3554 __ LoadRootRelative(target_stack, IsolateData::active_stack_offset());
3555 // Save the old stack's rbp in r9, and use it to access the parameters in
3556 // the parent frame.
3557 __ movq(original_fp, rbp);
3558 LoadTargetJumpBuffer(masm, target_stack, wasm::JumpBuffer::Suspended);
3559 // Return address slot. The builtin itself returns by switching to the parent
3560 // jump buffer and does not actually use this slot, but it is read by the
3561 // profiler.
3562 __ Push(Immediate(kNullAddress));
3563 // Push the loaded rbp. We know it is null, because there is no frame yet,
3564 // so we could also push 0 directly. In any case we need to push it, because
3565 // this marks the base of the stack segment for the stack frame iterator.
3566 __ EnterFrame(StackFrame::STACK_SWITCH);
3567 int stack_space =
3568 StackSwitchFrameConstants::kNumSpillSlots * kSystemPointerSize +
3569 JSToWasmWrapperFrameConstants::kWrapperBufferSize;
3570 __ AllocateStackSpace(stack_space);
3571 __ movq(new_wrapper_buffer, rsp);
3572 // Copy data needed for return handling from old wrapper buffer to new one.
3573 // kWrapperBufferRefReturnCount will be copied too, because 8 bytes are copied
3574 // at the same time.
3575 static_assert(JSToWasmWrapperFrameConstants::kWrapperBufferRefReturnCount ==
3576 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount + 4);
3577 __ movq(kScratchRegister,
3578 MemOperand(wrapper_buffer,
3579 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount));
3580 __ movq(MemOperand(new_wrapper_buffer,
3581 JSToWasmWrapperFrameConstants::kWrapperBufferReturnCount),
3583 __ movq(
3585 MemOperand(
3586 wrapper_buffer,
3587 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray));
3588 __ movq(
3589 MemOperand(
3590 new_wrapper_buffer,
3591 JSToWasmWrapperFrameConstants::kWrapperBufferSigRepresentationArray),
3593}
3594
3595void SwitchBackAndReturnPromise(MacroAssembler* masm, Register tmp1,
3596 Register tmp2, wasm::Promise mode,
3597 Label* return_promise) {
3598 // The return value of the wasm function becomes the parameter of the
3599 // FulfillPromise builtin, and the promise is the return value of this
3600 // wrapper.
3601 static const Builtin_FulfillPromise_InterfaceDescriptor desc;
3602 static_assert(kReturnRegister0 == desc.GetRegisterParameter(0));
3603 Register promise = desc.GetRegisterParameter(0);
3604 Register return_value = desc.GetRegisterParameter(1);
3605 if (mode == wasm::kPromise) {
3606 __ movq(return_value, kReturnRegister0);
3607 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3608 __ LoadTaggedField(
3609 promise, FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3610 }
3611
3612 __ movq(kContextRegister,
3613 MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset));
3614 GetContextFromImplicitArg(masm, kContextRegister);
3615 ReloadParentStack(masm, promise, return_value, kContextRegister, tmp1, tmp2);
3616 RestoreParentSuspender(masm, tmp1);
3617
3618 if (mode == wasm::kPromise) {
3619 __ Move(MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3620 1);
3621 __ Push(promise);
3622 __ CallBuiltin(Builtin::kFulfillPromise);
3623 __ Pop(promise);
3624 }
3625
3626 __ bind(return_promise);
3627}
3628
3629void GenerateExceptionHandlingLandingPad(MacroAssembler* masm,
3630 Label* return_promise) {
3631 int catch_handler = __ pc_offset();
3632
3633 __ endbr64();
3634
3635 // Restore rsp to free the reserved stack slots for the sections.
3636 __ leaq(rsp, MemOperand(rbp, StackSwitchFrameConstants::kLastSpillOffset));
3637
3638 // Unset thread_in_wasm_flag.
3639 Register thread_in_wasm_flag_addr = r8;
3640 __ movq(
3641 thread_in_wasm_flag_addr,
3643 __ movl(MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3644 thread_in_wasm_flag_addr = no_reg;
3645
3646 // The exception becomes the parameter of the RejectPromise builtin, and the
3647 // promise is the return value of this wrapper.
3648 static const Builtin_RejectPromise_InterfaceDescriptor desc;
3649 Register promise = desc.GetRegisterParameter(0);
3650 Register reason = desc.GetRegisterParameter(1);
3651 Register debug_event = desc.GetRegisterParameter(2);
3652 __ movq(reason, kReturnRegister0);
3653 __ LoadRoot(promise, RootIndex::kActiveSuspender);
3654 __ LoadTaggedField(
3655 promise, FieldOperand(promise, WasmSuspenderObject::kPromiseOffset));
3656 __ movq(kContextRegister,
3657 MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset));
3658 GetContextFromImplicitArg(masm, kContextRegister);
3659
3660 ReloadParentStack(masm, promise, reason, kContextRegister, r8, rdi);
3661 RestoreParentSuspender(masm, r8);
3662
3663 __ Move(MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3664 1);
3665 __ Push(promise);
3666 __ LoadRoot(debug_event, RootIndex::kTrueValue);
3667 __ CallBuiltin(Builtin::kRejectPromise);
3668 __ Pop(promise);
3669
3670 // Run the rest of the wrapper normally (switch to the old stack,
3671 // deconstruct the frame, ...).
3672 __ jmp(return_promise);
3673
3674 masm->isolate()->builtins()->SetJSPIPromptHandlerOffset(catch_handler);
3675}
3676
3677void JSToWasmWrapperHelper(MacroAssembler* masm, wasm::Promise mode) {
3678 bool stack_switch = mode == wasm::kPromise || mode == wasm::kStressSwitch;
3679 __ EnterFrame(stack_switch ? StackFrame::STACK_SWITCH
3680 : StackFrame::JS_TO_WASM);
3681
3682 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3684
3685 // Load the implicit argument (instance data or import data) from the frame.
3687 MemOperand(rbp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3688
3689 Register wrapper_buffer =
3691 Register original_fp = stack_switch ? r9 : rbp;
3692 Register new_wrapper_buffer = stack_switch ? rbx : wrapper_buffer;
3693 Label suspend;
3694 if (stack_switch) {
3695 SwitchToAllocatedStack(masm, kWasmImplicitArgRegister, wrapper_buffer,
3696 original_fp, new_wrapper_buffer, rax, &suspend);
3697 }
3698
3699 __ movq(MemOperand(rbp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset),
3700 new_wrapper_buffer);
3701 if (stack_switch) {
3702 __ movq(MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset),
3704 Register result_array = kScratchRegister;
3705 __ movq(result_array,
3706 MemOperand(original_fp,
3707 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3708 __ movq(MemOperand(rbp, StackSwitchFrameConstants::kResultArrayOffset),
3709 result_array);
3710 }
3711
3712 Register result_size = rax;
3713 __ movq(
3714 result_size,
3715 MemOperand(
3716 wrapper_buffer,
3717 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferSize));
3718 __ shlq(result_size, Immediate(kSystemPointerSizeLog2));
3719 __ subq(rsp, result_size);
3720 __ movq(
3721 MemOperand(
3722 new_wrapper_buffer,
3723 JSToWasmWrapperFrameConstants::kWrapperBufferStackReturnBufferStart),
3724 rsp);
3725 Register call_target = rdi;
3726 // param_start should not alias with any parameter registers.
3727 Register params_start = r11;
3728 __ movq(params_start,
3729 MemOperand(wrapper_buffer,
3730 JSToWasmWrapperFrameConstants::kWrapperBufferParamStart));
3731 Register params_end = rbx;
3732 __ movq(params_end,
3733 MemOperand(wrapper_buffer,
3734 JSToWasmWrapperFrameConstants::kWrapperBufferParamEnd));
3735
3736 __ LoadWasmCodePointer(
3737 call_target,
3738 MemOperand(wrapper_buffer,
3739 JSToWasmWrapperFrameConstants::kWrapperBufferCallTarget));
3740
3741 Register last_stack_param = rcx;
3742
3743 // The first GP parameter is the data, which we handle specially.
3744 int stack_params_offset =
3747
3748 __ leaq(last_stack_param, MemOperand(params_start, stack_params_offset));
3749
3750 Label loop_start;
3751 __ bind(&loop_start);
3752
3753 Label finish_stack_params;
3754 __ cmpq(last_stack_param, params_end);
3755 __ j(greater_equal, &finish_stack_params);
3756
3757 // Push parameter
3758 __ subq(params_end, Immediate(kSystemPointerSize));
3759 __ pushq(MemOperand(params_end, 0));
3760 __ jmp(&loop_start);
3761
3762 __ bind(&finish_stack_params);
3763
3764 int next_offset = 0;
3765 for (size_t i = 1; i < arraysize(wasm::kGpParamRegisters); ++i) {
3766 // Check that {params_start} does not overlap with any of the parameter
3767 // registers, so that we don't overwrite it by accident with the loads
3768 // below.
3769 DCHECK_NE(params_start, wasm::kGpParamRegisters[i]);
3770 __ movq(wasm::kGpParamRegisters[i], MemOperand(params_start, next_offset));
3771 next_offset += kSystemPointerSize;
3772 }
3773
3774 for (size_t i = 0; i < arraysize(wasm::kFpParamRegisters); ++i) {
3775 __ Movsd(wasm::kFpParamRegisters[i], MemOperand(params_start, next_offset));
3776 next_offset += kDoubleSize;
3777 }
3778 DCHECK_EQ(next_offset, stack_params_offset);
3779
3780 Register thread_in_wasm_flag_addr = r12;
3781 __ movq(
3782 thread_in_wasm_flag_addr,
3784 __ movl(MemOperand(thread_in_wasm_flag_addr, 0), Immediate(1));
3785 if (stack_switch) {
3786 __ Move(MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
3787 0);
3788 }
3789
3790 // We do the call without a signature check here, since the wrapper loaded the
3791 // signature from the same trusted object as the call target to set up the
3792 // stack layout. We could add a signature hash and pass it through to verify
3793 // it here, but an attacker that could corrupt the signature could also
3794 // corrupt that signature hash (which is outside of the sandbox).
3795 __ CallWasmCodePointerNoSignatureCheck(call_target);
3796
3797 __ movq(
3798 thread_in_wasm_flag_addr,
3800 __ movl(MemOperand(thread_in_wasm_flag_addr, 0), Immediate(0));
3801 thread_in_wasm_flag_addr = no_reg;
3802
3803 wrapper_buffer = rcx;
3804 for (size_t i = 0; i < arraysize(wasm::kGpReturnRegisters); ++i) {
3805 DCHECK_NE(wrapper_buffer, wasm::kGpReturnRegisters[i]);
3806 }
3807
3808 __ movq(wrapper_buffer,
3809 MemOperand(rbp, JSToWasmWrapperFrameConstants::kWrapperBufferOffset));
3810
3811 __ Movsd(MemOperand(
3812 wrapper_buffer,
3813 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister1),
3815 __ Movsd(MemOperand(
3816 wrapper_buffer,
3817 JSToWasmWrapperFrameConstants::kWrapperBufferFPReturnRegister2),
3819 __ movq(MemOperand(
3820 wrapper_buffer,
3821 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister1),
3823 __ movq(MemOperand(
3824 wrapper_buffer,
3825 JSToWasmWrapperFrameConstants::kWrapperBufferGPReturnRegister2),
3827
3828 // Call the return value builtin with
3829 // rax: wasm instance.
3830 // rbx: the result JSArray for multi-return.
3831 // rcx: pointer to the byte buffer which contains all parameters.
3832 if (stack_switch) {
3833 __ movq(rbx,
3834 MemOperand(rbp, StackSwitchFrameConstants::kResultArrayOffset));
3835 __ movq(rax,
3836 MemOperand(rbp, StackSwitchFrameConstants::kImplicitArgOffset));
3837 } else {
3838 __ movq(rbx,
3839 MemOperand(rbp,
3840 JSToWasmWrapperFrameConstants::kResultArrayParamOffset));
3841 __ movq(rax,
3842 MemOperand(rbp, JSToWasmWrapperFrameConstants::kImplicitArgOffset));
3843 }
3844 GetContextFromImplicitArg(masm, rax);
3845 __ CallBuiltin(Builtin::kJSToWasmHandleReturns);
3846
3847 Label return_promise;
3848 if (stack_switch) {
3849 SwitchBackAndReturnPromise(masm, r8, rdi, mode, &return_promise);
3850 }
3851 __ bind(&suspend);
3852 __ endbr64();
3853
3854 __ LeaveFrame(stack_switch ? StackFrame::STACK_SWITCH
3855 : StackFrame::JS_TO_WASM);
3856 __ ret(0);
3857
3858 // Catch handler for the stack-switching wrapper: reject the promise with the
3859 // thrown exception.
3860 if (mode == wasm::kPromise) {
3861 GenerateExceptionHandlingLandingPad(masm, &return_promise);
3862 }
3863}
3864} // namespace
3865
3866void Builtins::Generate_JSToWasmWrapperAsm(MacroAssembler* masm) {
3867 JSToWasmWrapperHelper(masm, wasm::kNoPromise);
3868}
3869
3870void Builtins::Generate_WasmReturnPromiseOnSuspendAsm(MacroAssembler* masm) {
3871 JSToWasmWrapperHelper(masm, wasm::kPromise);
3872}
3873
3874void Builtins::Generate_JSToWasmStressSwitchStacksAsm(MacroAssembler* masm) {
3875 JSToWasmWrapperHelper(masm, wasm::kStressSwitch);
3876}
3877
3878void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) {
3879 // Pop the return address into a scratch register and push it later again. The
3880 // return address has to be on top of the stack after all registers have been
3881 // pushed, so that the return instruction can find it.
3882 __ popq(kScratchRegister);
3883
3884 int required_stack_space = arraysize(wasm::kFpParamRegisters) * kDoubleSize;
3885 __ subq(rsp, Immediate(required_stack_space));
3886 for (int i = 0; i < static_cast<int>(arraysize(wasm::kFpParamRegisters));
3887 ++i) {
3889 }
3890 // Push the GP registers in reverse order so that they are on the stack like
3891 // in an array, with the first item being at the lowest address.
3892 for (size_t i = arraysize(wasm::kGpParamRegisters) - 1; i > 0; --i) {
3894 }
3895 // Signature slot.
3896 __ pushq(rax);
3897 // Push the return address again.
3898 __ pushq(kScratchRegister);
3899 __ TailCallBuiltin(Builtin::kWasmToJsWrapperCSA);
3900}
3901
3902void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) {
3903 __ addq(
3905 Immediate(WasmFrameConstants::kProtectedInstructionReturnAddressOffset));
3907#ifdef V8_ENABLE_CET_SHADOW_STACK
3908 // This landing pad pushes kWasmTrapHandlerFaultAddressRegister to the stack
3909 // as a return address, allowing `Isolate::UnwindAndFindHandler` to locate it.
3910 // However, this creates a mismatch in the return address count between the
3911 // shadow stack and the real stack. To resolve this, we push a dummy value
3912 // onto the shadow stack to maintain the correct count. This should be used
3913 // only for unwinding, not for returning.
3914 Label push_dummy_on_shadow_stack;
3915 __ call(&push_dummy_on_shadow_stack);
3916 __ Trap(); // Unreachable.
3917 __ bind(&push_dummy_on_shadow_stack);
3918 // Remove the return address pushed onto the real stack.
3919 __ addq(rsp, Immediate(kSystemPointerSize));
3920#endif // V8_ENABLE_CET_SHADOW_STACK
3921 __ TailCallBuiltin(Builtin::kWasmTrapHandlerThrowTrap);
3922}
3923
3924void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
3925 // Set up the stackframe.
3926 __ EnterFrame(StackFrame::STACK_SWITCH);
3927
3928 Register suspender = rax;
3929
3930 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
3932 // Set a sentinel value for the spill slots visited by the GC.
3933 ResetStackSwitchFrameStackSlots(masm);
3934
3935 // -------------------------------------------
3936 // Save current state in active jump buffer.
3937 // -------------------------------------------
3938 Label resume;
3939 Register stack = rbx;
3940 __ LoadRootRelative(stack, IsolateData::active_stack_offset());
3941 FillJumpBuffer(masm, stack, &resume);
3942 SwitchStackState(masm, stack, wasm::JumpBuffer::Active,
3944 // live: [rax, rbx]
3945
3946 Register suspender_stack = rdx;
3947 __ LoadExternalPointerField(
3948 suspender_stack,
3949 FieldOperand(suspender, WasmSuspenderObject::kStackOffset),
3951#ifdef DEBUG
3952 // -------------------------------------------
3953 // Check that the suspender's stack is the active stack.
3954 // -------------------------------------------
3955 // TODO(thibaudm): Once we add core stack-switching instructions, this check
3956 // will not hold anymore: it's possible that the active stack changed
3957 // (due to an internal switch), so we have to update the suspender.
3958 __ cmpq(suspender_stack, stack);
3959 Label ok;
3960 __ j(equal, &ok);
3961 __ Trap();
3962 __ bind(&ok);
3963#endif
3964
3965 // -------------------------------------------
3966 // Update roots.
3967 // -------------------------------------------
3968 Register caller = rcx;
3969 __ Move(caller, MemOperand(suspender_stack, wasm::kStackParentOffset));
3970 __ StoreRootRelative(IsolateData::active_stack_offset(), caller);
3971 Register parent = rdx;
3972 __ LoadTaggedField(
3973 parent, FieldOperand(suspender, WasmSuspenderObject::kParentOffset));
3974 __ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), parent);
3975 parent = no_reg;
3976 // live: [suspender:rax, stack:rbx, caller:rcx]
3977
3978 // -------------------------------------------
3979 // Load jump buffer.
3980 // -------------------------------------------
3981 SwitchStacks(masm, stack, false, {caller, suspender});
3982 __ LoadTaggedField(
3984 FieldOperand(suspender, WasmSuspenderObject::kPromiseOffset));
3985 MemOperand GCScanSlotPlace =
3986 MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset);
3987 __ Move(GCScanSlotPlace, 0);
3988 LoadJumpBuffer(masm, caller, true, wasm::JumpBuffer::Inactive);
3989 __ Trap();
3990 __ bind(&resume);
3991 __ endbr64();
3992 __ LeaveFrame(StackFrame::STACK_SWITCH);
3993 __ ret(0);
3994}
3995
3996namespace {
3997// Resume the suspender stored in the closure. We generate two variants of this
3998// builtin: the onFulfilled variant resumes execution at the saved PC and
3999// forwards the value, the onRejected variant throws the value.
4000
4001void Generate_WasmResumeHelper(MacroAssembler* masm, wasm::OnResume on_resume) {
4002 __ EnterFrame(StackFrame::STACK_SWITCH);
4003
4004 Register param_count = rax;
4005 __ decq(param_count); // Exclude receiver.
4006 Register closure = kJSFunctionRegister; // rdi
4007
4008 __ AllocateStackSpace(StackSwitchFrameConstants::kNumSpillSlots *
4010 // Set a sentinel value for the spill slots visited by the GC.
4011 ResetStackSwitchFrameStackSlots(masm);
4012
4013 param_count = no_reg;
4014
4015 // -------------------------------------------
4016 // Load suspender from closure.
4017 // -------------------------------------------
4018 Register sfi = closure;
4019 __ LoadTaggedField(
4020 sfi,
4021 MemOperand(
4022 closure,
4024 Register resume_data = sfi;
4025 __ LoadTaggedField(
4026 resume_data,
4027 FieldOperand(sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset));
4028 // The write barrier uses a fixed register for the host object (rdi). The next
4029 // barrier is on the suspender, so load it in rdi directly.
4030 Register suspender = rdi;
4031 __ LoadTaggedField(
4032 suspender, FieldOperand(resume_data, WasmResumeData::kSuspenderOffset));
4033 closure = no_reg;
4034 sfi = no_reg;
4035
4036 // -------------------------------------------
4037 // Save current state.
4038 // -------------------------------------------
4039 Label suspend;
4040 Register active_stack = r9;
4041 __ LoadRootRelative(active_stack, IsolateData::active_stack_offset());
4042 FillJumpBuffer(masm, active_stack, &suspend);
4043 SwitchStackState(masm, active_stack, wasm::JumpBuffer::Active,
4045
4046 // -------------------------------------------
4047 // Set the suspender and stack parents and update the roots
4048 // -------------------------------------------
4049 Register active_suspender = rcx;
4051 // Check that the fixed register isn't one that is already in use.
4052 DCHECK(slot_address == rbx || slot_address == r8);
4053 __ LoadRoot(active_suspender, RootIndex::kActiveSuspender);
4054 __ StoreTaggedField(
4055 FieldOperand(suspender, WasmSuspenderObject::kParentOffset),
4056 active_suspender);
4057 __ RecordWriteField(suspender, WasmSuspenderObject::kParentOffset,
4058 active_suspender, slot_address, SaveFPRegsMode::kIgnore);
4059 __ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
4060
4061 Register target_stack = suspender;
4062 __ LoadExternalPointerField(
4063 target_stack, FieldOperand(suspender, WasmSuspenderObject::kStackOffset),
4065 suspender = no_reg;
4066 __ StoreRootRelative(IsolateData::active_stack_offset(), target_stack);
4067
4068 SwitchStacks(masm, active_stack, false, {target_stack});
4069
4070 // -------------------------------------------
4071 // Load state from target jmpbuf (longjmp).
4072 // -------------------------------------------
4073 // Move resolved value to return register.
4074 __ movq(kReturnRegister0, Operand(rbp, 3 * kSystemPointerSize));
4075 __ Move(MemOperand(rbp, StackSwitchFrameConstants::kGCScanSlotCountOffset),
4076 0);
4077 if (on_resume == wasm::OnResume::kThrow) {
4078 // Switch to the target stack without restoring the PC.
4079 LoadJumpBuffer(masm, target_stack, false, wasm::JumpBuffer::Suspended);
4080 // Pop this frame now. The unwinder expects that the first STACK_SWITCH
4081 // frame is the outermost one.
4082 __ LeaveFrame(StackFrame::STACK_SWITCH);
4083 // Forward the onRejected value to kThrow.
4084 __ pushq(kReturnRegister0);
4085 __ Move(kContextRegister, Smi::zero());
4086 __ CallRuntime(Runtime::kThrow);
4087 } else {
4088 // Resume the stack normally.
4089 LoadJumpBuffer(masm, target_stack, true, wasm::JumpBuffer::Suspended);
4090 }
4091 __ Trap();
4092 __ bind(&suspend);
4093 __ endbr64();
4094 __ LeaveFrame(StackFrame::STACK_SWITCH);
4095 // Pop receiver + parameter.
4096 __ ret(2 * kSystemPointerSize);
4097}
4098} // namespace
4099
4100void Builtins::Generate_WasmResume(MacroAssembler* masm) {
4101 Generate_WasmResumeHelper(masm, wasm::OnResume::kContinue);
4102}
4103
4104void Builtins::Generate_WasmReject(MacroAssembler* masm) {
4105 Generate_WasmResumeHelper(masm, wasm::OnResume::kThrow);
4106}
4107
4108void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
4109 MemOperand OSRTargetSlot(rbp, -wasm::kOSRTargetOffset);
4110 __ movq(kScratchRegister, OSRTargetSlot);
4111 __ Move(OSRTargetSlot, 0);
4112 __ jmp(kScratchRegister);
4113}
4114
4115namespace {
4116static constexpr Register kOldSPRegister = r12;
4117
4118void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm,
4119 int r12_stack_slot_index) {
4120 using ER = ExternalReference;
4121
4122 // Store r12 value on the stack to restore on exit from the builtin.
4123 __ movq(ExitFrameStackSlotOperand(r12_stack_slot_index * kSystemPointerSize),
4124 r12);
4125
4126 // kOldSPRegister used as a switch flag, if it is zero - no switch performed
4127 // if it is not zero, it contains old sp value.
4128 __ Move(kOldSPRegister, 0);
4129
4130 // Using arg1-2 regs as temporary registers, because they will be rewritten
4131 // before exiting to native code anyway.
4132 DCHECK(
4133 !AreAliased(kCArgRegs[0], kCArgRegs[1], kOldSPRegister, rax, rbx, r15));
4134
4135 ER on_central_stack_flag = ER::Create(
4136 IsolateAddressId::kIsOnCentralStackFlagAddress, masm->isolate());
4137
4138 Label do_not_need_to_switch;
4139 __ cmpb(__ ExternalReferenceAsOperand(on_central_stack_flag), Immediate(0));
4140 __ j(not_zero, &do_not_need_to_switch);
4141
4142 // Perform switching to the central stack.
4143
4144 __ movq(kOldSPRegister, rsp);
4145
4146 static constexpr Register argc_input = rax;
4147 Register central_stack_sp = kCArgRegs[1];
4148 DCHECK(!AreAliased(central_stack_sp, argc_input));
4149 {
4150 FrameScope scope(masm, StackFrame::MANUAL);
4151 __ pushq(argc_input);
4152
4153 __ Move(kCArgRegs[0], ER::isolate_address());
4154 __ Move(kCArgRegs[1], kOldSPRegister);
4155 __ PrepareCallCFunction(2);
4156 __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2,
4158 __ movq(central_stack_sp, kReturnRegister0);
4159
4160 __ popq(argc_input);
4161 }
4162
4163 static constexpr int kReturnAddressSlotOffset = 1 * kSystemPointerSize;
4164 __ subq(central_stack_sp, Immediate(kReturnAddressSlotOffset));
4165 __ movq(rsp, central_stack_sp);
4166 // rsp should be aligned by 16 bytes,
4167 // but it is not guaranteed for stored SP.
4168 __ AlignStackPointer();
4169
4170#ifdef V8_TARGET_OS_WIN
4171 // When we switch stack we leave home space allocated on the old stack.
4172 // Allocate home space on the central stack to prevent stack corruption.
4173 __ subq(rsp, Immediate(kWindowsHomeStackSlots * kSystemPointerSize));
4174#endif // V8_TARGET_OS_WIN
4175
4176 // Update the sp saved in the frame.
4177 // It will be used to calculate the callee pc during GC.
4178 // The pc is going to be on the new stack segment, so rewrite it here.
4179 __ movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
4180
4181 __ bind(&do_not_need_to_switch);
4182}
4183
4184void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm,
4185 int r12_stack_slot_index) {
4186 using ER = ExternalReference;
4187
4188 Label no_stack_change;
4189 __ cmpq(kOldSPRegister, Immediate(0));
4190 __ j(equal, &no_stack_change);
4191 __ movq(rsp, kOldSPRegister);
4192
4193 {
4194 FrameScope scope(masm, StackFrame::MANUAL);
4195 __ pushq(kReturnRegister0);
4196 __ pushq(kReturnRegister1);
4197
4198 __ Move(kCArgRegs[0], ER::isolate_address());
4199 __ PrepareCallCFunction(1);
4200 __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1,
4202
4203 __ popq(kReturnRegister1);
4204 __ popq(kReturnRegister0);
4205 }
4206
4207 __ bind(&no_stack_change);
4208
4209 // Restore previous value of r12.
4210 __ movq(r12,
4211 ExitFrameStackSlotOperand(r12_stack_slot_index * kSystemPointerSize));
4212}
4213
4214} // namespace
4215
4216#endif // V8_ENABLE_WEBASSEMBLY
4217
4218void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
4219 ArgvMode argv_mode, bool builtin_exit_frame,
4220 bool switch_to_central_stack) {
4221 CHECK(result_size == 1 || result_size == 2);
4222
4223 using ER = ExternalReference;
4224
4225 // rax: number of arguments including receiver
4226 // rbx: pointer to C function (C callee-saved)
4227 // rbp: frame pointer of calling JS frame (restored after C call)
4228 // rsp: stack pointer (restored after C call)
4229 // rsi: current context (restored)
4230 //
4231 // If argv_mode == ArgvMode::kRegister:
4232 // r15: pointer to the first argument
4233
4234 const int kSwitchToTheCentralStackSlots = switch_to_central_stack ? 1 : 0;
4235#ifdef V8_TARGET_OS_WIN
4236 // Windows 64-bit ABI only allows a single-word to be returned in register
4237 // rax. Larger return sizes must be written to an address passed as a hidden
4238 // first argument.
4239 static constexpr int kMaxRegisterResultSize = 1;
4240 const int kReservedStackSlots = kSwitchToTheCentralStackSlots +
4241 (result_size <= kMaxRegisterResultSize ? 0 : result_size);
4242#else
4243 // Simple results are returned in rax, and a struct of two pointers are
4244 // returned in rax+rdx.
4245 static constexpr int kMaxRegisterResultSize = 2;
4246 const int kReservedStackSlots = kSwitchToTheCentralStackSlots;
4247 CHECK_LE(result_size, kMaxRegisterResultSize);
4248#endif // V8_TARGET_OS_WIN
4249#if V8_ENABLE_WEBASSEMBLY
4250 const int kR12SpillSlot = kReservedStackSlots - 1;
4251#endif // V8_ENABLE_WEBASSEMBLY
4252
4253 __ EnterExitFrame(
4254 kReservedStackSlots,
4255 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT, rbx);
4256
4257 // Set up argv in a callee-saved register. It is reused below so it must be
4258 // retained across the C call. In case of ArgvMode::kRegister, r15 has
4259 // already been set by the caller.
4260 static constexpr Register kArgvRegister = r15;
4261 if (argv_mode == ArgvMode::kStack) {
4262 int offset =
4264 __ leaq(kArgvRegister,
4265 Operand(rbp, rax, times_system_pointer_size, offset));
4266 }
4267
4268 // rbx: pointer to builtin function (C callee-saved).
4269 // rbp: frame pointer of exit frame (restored after C call).
4270 // rsp: stack pointer (restored after C call).
4271 // rax: number of arguments including receiver
4272 // r15: argv pointer (C callee-saved).
4273
4274#if V8_ENABLE_WEBASSEMBLY
4275 if (switch_to_central_stack) {
4276 SwitchToTheCentralStackIfNeeded(masm, kR12SpillSlot);
4277 }
4278#endif // V8_ENABLE_WEBASSEMBLY
4279
4280 // Check stack alignment.
4281 if (v8_flags.debug_code) {
4282 __ CheckStackAlignment();
4283 }
4284
4285 // Call C function. The arguments object will be created by stubs declared by
4286 // DECLARE_RUNTIME_FUNCTION().
4287 if (result_size <= kMaxRegisterResultSize) {
4288 // Pass a pointer to the Arguments object as the first argument.
4289 // Return result in single register (rax), or a register pair (rax, rdx).
4290 __ movq(kCArgRegs[0], rax); // argc.
4291 __ movq(kCArgRegs[1], kArgvRegister); // argv.
4292 __ Move(kCArgRegs[2], ER::isolate_address());
4293 } else {
4294#ifdef V8_TARGET_OS_WIN
4295 DCHECK_LE(result_size, 2);
4296 // Pass a pointer to the result location as the first argument.
4298 // Pass a pointer to the Arguments object as the second argument.
4299 __ movq(kCArgRegs[1], rax); // argc.
4300 __ movq(kCArgRegs[2], kArgvRegister); // argv.
4301 __ Move(kCArgRegs[3], ER::isolate_address());
4302#else
4303 UNREACHABLE();
4304#endif // V8_TARGET_OS_WIN
4305 }
4306 __ call(rbx);
4307
4308#ifdef V8_TARGET_OS_WIN
4309 if (result_size > kMaxRegisterResultSize) {
4310 // Read result values stored on stack.
4311 DCHECK_EQ(result_size, 2);
4312 __ movq(kReturnRegister0,
4314 __ movq(kReturnRegister1,
4316 }
4317#endif // V8_TARGET_OS_WIN
4318
4319 // Result is in rax or rdx:rax - do not destroy these registers!
4320
4321 // Check result for exception sentinel.
4322 Label exception_returned;
4323 // The returned value may be a trusted object, living outside of the main
4324 // pointer compression cage, so we need to use full pointer comparison here.
4325 __ CompareRoot(rax, RootIndex::kException, ComparisonMode::kFullPointer);
4326 __ j(equal, &exception_returned);
4327
4328#if V8_ENABLE_WEBASSEMBLY
4329 if (switch_to_central_stack) {
4330 SwitchFromTheCentralStackIfNeeded(masm, kR12SpillSlot);
4331 }
4332#endif // V8_ENABLE_WEBASSEMBLY
4333
4334 // Check that there is no exception, otherwise we
4335 // should have returned the exception sentinel.
4336 if (v8_flags.debug_code) {
4337 Label okay;
4338 __ LoadRoot(kScratchRegister, RootIndex::kTheHoleValue);
4339 ER exception_address =
4340 ER::Create(IsolateAddressId::kExceptionAddress, masm->isolate());
4341 __ cmp_tagged(kScratchRegister,
4342 masm->ExternalReferenceAsOperand(exception_address));
4343 __ j(equal, &okay, Label::kNear);
4344 __ int3();
4345 __ bind(&okay);
4346 }
4347
4348 __ LeaveExitFrame();
4349 if (argv_mode == ArgvMode::kStack) {
4350 // Drop arguments and the receiver from the caller stack.
4351 __ PopReturnAddressTo(rcx);
4352 __ leaq(rsp, Operand(kArgvRegister, kReceiverOnStackSize));
4353 __ PushReturnAddressFrom(rcx);
4354 }
4355 __ ret(0);
4356
4357 // Handling of exception.
4358 __ bind(&exception_returned);
4359
4360 ER pending_handler_context_address = ER::Create(
4361 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
4362 ER pending_handler_entrypoint_address = ER::Create(
4363 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
4364 ER pending_handler_fp_address =
4365 ER::Create(IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
4366 ER pending_handler_sp_address =
4367 ER::Create(IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
4368
4369 // Ask the runtime for help to determine the handler. This will set rax to
4370 // contain the current exception, don't clobber it.
4371 ER find_handler = ER::Create(Runtime::kUnwindAndFindExceptionHandler);
4372 {
4373 FrameScope scope(masm, StackFrame::MANUAL);
4374 __ Move(kCArgRegs[0], 0); // argc.
4375 __ Move(kCArgRegs[1], 0); // argv.
4376 __ Move(kCArgRegs[2], ER::isolate_address());
4377 __ PrepareCallCFunction(3);
4378 __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo);
4379 }
4380
4381#ifdef V8_ENABLE_CET_SHADOW_STACK
4382 // Drop frames from the shadow stack.
4383 ER num_frames_above_pending_handler_address = ER::Create(
4384 IsolateAddressId::kNumFramesAbovePendingHandlerAddress, masm->isolate());
4385 __ movq(rcx, masm->ExternalReferenceAsOperand(
4386 num_frames_above_pending_handler_address));
4387 __ IncsspqIfSupported(rcx, kScratchRegister);
4388#endif // V8_ENABLE_CET_SHADOW_STACK
4389
4390 // Retrieve the handler context, SP and FP.
4391 __ movq(rsi,
4392 masm->ExternalReferenceAsOperand(pending_handler_context_address));
4393 __ movq(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
4394 __ movq(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
4395
4396 // If the handler is a JS frame, restore the context to the frame. Note that
4397 // the context will be set to (rsi == 0) for non-JS frames.
4398 Label skip;
4399 __ testq(rsi, rsi);
4400 __ j(zero, &skip, Label::kNear);
4401 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4402 __ bind(&skip);
4403
4404 // Clear c_entry_fp, like we do in `LeaveExitFrame`.
4405 ER c_entry_fp_address =
4406 ER::Create(IsolateAddressId::kCEntryFPAddress, masm->isolate());
4407 Operand c_entry_fp_operand =
4408 masm->ExternalReferenceAsOperand(c_entry_fp_address);
4409 __ movq(c_entry_fp_operand, Immediate(0));
4410
4411 // Compute the handler entry address and jump to it.
4412 __ movq(rdi,
4413 masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
4414 __ jmp(rdi);
4415}
4416
4417#if V8_ENABLE_WEBASSEMBLY
4418void Builtins::Generate_WasmHandleStackOverflow(MacroAssembler* masm) {
4419 using ER = ExternalReference;
4420 Register frame_base = WasmHandleStackOverflowDescriptor::FrameBaseRegister();
4422 {
4423 DCHECK_NE(kCArgRegs[1], frame_base);
4424 DCHECK_NE(kCArgRegs[3], frame_base);
4425 __ movq(kCArgRegs[3], gap);
4426 __ movq(kCArgRegs[1], rsp);
4427 __ movq(kCArgRegs[2], frame_base);
4428 __ subq(kCArgRegs[2], kCArgRegs[1]);
4429#ifdef V8_TARGET_OS_WIN
4430 Register old_fp = rcx;
4431 // On windows we need preserve rbp value somewhere before entering
4432 // INTERNAL frame later. It will be placed on the stack as an argument.
4433 __ movq(old_fp, rbp);
4434#else
4435 __ movq(kCArgRegs[4], rbp);
4436#endif
4437 FrameScope scope(masm, StackFrame::INTERNAL);
4438 __ pushq(kCArgRegs[3]);
4439 __ PrepareCallCFunction(5);
4440 // On windows put the arguments on the stack (PrepareCallCFunction
4441 // has created space for this).
4442#ifdef V8_TARGET_OS_WIN
4443 __ movq(Operand(rsp, 4 * kSystemPointerSize), old_fp);
4444#endif
4445 __ Move(kCArgRegs[0], ER::isolate_address());
4446 __ CallCFunction(ER::wasm_grow_stack(), 5);
4447 __ popq(gap);
4449 }
4450 Label call_runtime;
4451 // wasm_grow_stack returns zero if it cannot grow a stack.
4453 __ j(zero, &call_runtime, Label::kNear);
4454 // Calculate old FP - SP offset to adjust FP accordingly to new SP.
4455 __ subq(rbp, rsp);
4456 __ addq(rbp, kReturnRegister0);
4457 __ movq(rsp, kReturnRegister0);
4458 __ movq(kScratchRegister,
4459 Immediate(StackFrame::TypeToMarker(StackFrame::WASM_SEGMENT_START)));
4462 __ ret(0);
4463
4464 // If wasm_grow_stack returns zero, interruption or stack overflow
4465 // should be handled by runtime call.
4466 {
4467 __ bind(&call_runtime);
4469 MemOperand(rbp, WasmFrameConstants::kWasmInstanceDataOffset));
4470 __ LoadTaggedField(
4473 WasmTrustedInstanceData::kNativeContextOffset));
4474 FrameScope scope(masm, StackFrame::MANUAL);
4475 __ EnterFrame(StackFrame::INTERNAL);
4476 __ SmiTag(gap);
4477 __ pushq(gap);
4478 __ CallRuntime(Runtime::kWasmStackGuard);
4479 __ LeaveFrame(StackFrame::INTERNAL);
4480 __ ret(0);
4481 }
4482}
4483#endif // V8_ENABLE_WEBASSEMBLY
4484
4485void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
4486 Label check_negative, process_64_bits, done;
4487
4488 // Account for return address and saved regs.
4489 const int kArgumentOffset = 4 * kSystemPointerSize;
4490
4491 MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
4492 MemOperand exponent_operand(
4493 MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
4494
4495 // The result is returned on the stack.
4496 MemOperand return_operand = mantissa_operand;
4497
4498 Register scratch1 = rbx;
4499
4500 // Since we must use rcx for shifts below, use some other register (rax)
4501 // to calculate the result if ecx is the requested return register.
4502 Register result_reg = rax;
4503 // Save ecx if it isn't the return register and therefore volatile, or if it
4504 // is the return register, then save the temp register we use in its stead
4505 // for the result.
4506 Register save_reg = rax;
4507 __ pushq(rcx);
4508 __ pushq(scratch1);
4509 __ pushq(save_reg);
4510
4511 __ movl(scratch1, mantissa_operand);
4512 __ Movsd(kScratchDoubleReg, mantissa_operand);
4513 __ movl(rcx, exponent_operand);
4514
4515 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
4516 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
4517 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
4518 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
4519 __ j(below, &process_64_bits, Label::kNear);
4520
4521 // Result is entirely in lower 32-bits of mantissa
4522 int delta =
4524 __ subl(rcx, Immediate(delta));
4525 __ xorl(result_reg, result_reg);
4526 __ cmpl(rcx, Immediate(31));
4527 __ j(above, &done, Label::kNear);
4528 __ shll_cl(scratch1);
4529 __ jmp(&check_negative, Label::kNear);
4530
4531 __ bind(&process_64_bits);
4532 __ Cvttsd2siq(result_reg, kScratchDoubleReg);
4533 __ jmp(&done, Label::kNear);
4534
4535 // If the double was negative, negate the integer result.
4536 __ bind(&check_negative);
4537 __ movl(result_reg, scratch1);
4538 __ negl(result_reg);
4539 __ cmpl(exponent_operand, Immediate(0));
4540 __ cmovl(greater, result_reg, scratch1);
4541
4542 // Restore registers
4543 __ bind(&done);
4544 __ movl(return_operand, result_reg);
4545 __ popq(save_reg);
4546 __ popq(scratch1);
4547 __ popq(rcx);
4548 __ ret(0);
4549}
4550
4551// TODO(jgruber): Instead of explicitly setting up implicit_args_ on the stack
4552// in CallApiCallback, we could use the calling convention to set up the stack
4553// correctly in the first place.
4554//
4555// TODO(jgruber): I suspect that most of CallApiCallback could be implemented
4556// as a C++ trampoline, vastly simplifying the assembly implementation.
4557
4558void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm,
4559 CallApiCallbackMode mode) {
4560 // ----------- S t a t e -------------
4561 // CallApiCallbackMode::kOptimizedNoProfiling/kOptimized modes:
4562 // -- rdx : api function address
4563 // Both modes:
4564 // -- rcx : arguments count (not including the receiver)
4565 // -- rbx : FunctionTemplateInfo
4566 // -- rsi : context
4567 // -- rsp[0] : return address
4568 // -- rsp[8] : argument 0 (receiver)
4569 // -- rsp[16] : argument 1
4570 // -- ...
4571 // -- rsp[argc * 8] : argument (argc - 1)
4572 // -- rsp[(argc + 1) * 8] : argument argc
4573 // -----------------------------------
4574
4575 Register function_callback_info_arg = kCArgRegs[0];
4576
4577 Register api_function_address = no_reg;
4578 Register argc = no_reg;
4579 Register func_templ = no_reg;
4580 Register topmost_script_having_context = no_reg;
4581 Register scratch = rax;
4582 Register scratch2 = no_reg;
4583
4584 switch (mode) {
4586 scratch2 = r9;
4587 argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister();
4588 topmost_script_having_context = CallApiCallbackGenericDescriptor::
4590 func_templ =
4592 break;
4593
4596 // Caller context is always equal to current context because we don't
4597 // inline Api calls cross-context.
4598 topmost_script_having_context = kContextRegister;
4599 api_function_address =
4600 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
4602 func_templ =
4604 break;
4605 }
4606 DCHECK(!AreAliased(api_function_address, topmost_script_having_context, argc,
4607 func_templ, scratch, scratch2, kScratchRegister));
4608
4609 using FCA = FunctionCallbackArguments;
4610 using ER = ExternalReference;
4611 using FC = ApiCallbackExitFrameConstants;
4612
4613 static_assert(FCA::kArgsLength == 6);
4614 static_assert(FCA::kNewTargetIndex == 5);
4615 static_assert(FCA::kTargetIndex == 4);
4616 static_assert(FCA::kReturnValueIndex == 3);
4617 static_assert(FCA::kContextIndex == 2);
4618 static_assert(FCA::kIsolateIndex == 1);
4619 static_assert(FCA::kUnusedIndex == 0);
4620
4621 // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
4622 //
4623 // Current state:
4624 // rsp[0]: return address
4625 //
4626 // Target state:
4627 // rsp[0 * kSystemPointerSize]: return address
4628 // rsp[1 * kSystemPointerSize]: kUnused <= FCA::implicit_args_
4629 // rsp[2 * kSystemPointerSize]: kIsolate
4630 // rsp[3 * kSystemPointerSize]: kContext
4631 // rsp[4 * kSystemPointerSize]: undefined (kReturnValue)
4632 // rsp[5 * kSystemPointerSize]: kTarget
4633 // rsp[6 * kSystemPointerSize]: undefined (kNewTarget)
4634 // Existing state:
4635 // rsp[7 * kSystemPointerSize]: <= FCA:::values_
4636
4637 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
4638 topmost_script_having_context);
4639
4640 if (mode == CallApiCallbackMode::kGeneric) {
4641 api_function_address = ReassignRegister(topmost_script_having_context);
4642 }
4643
4644 __ PopReturnAddressTo(scratch);
4645 __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
4646 __ Push(kScratchRegister); // kNewTarget
4647 __ Push(func_templ); // kTarget
4648 __ Push(kScratchRegister); // kReturnValue
4649 __ Push(kContextRegister); // kContext
4650 __ PushAddress(ER::isolate_address()); // kIsolate
4651 // TODO(ishell, http://crbug.com/326505377): in case of non-constructor
4652 // call, don't pass kNewTarget and kUnused. Add IsConstructCall flag to
4653 // kIsolate field.
4654 __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
4655 __ Push(kScratchRegister); // kUnused
4656
4657 if (mode == CallApiCallbackMode::kGeneric) {
4658 __ LoadExternalPointerField(
4659 api_function_address,
4660 FieldOperand(func_templ,
4661 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset),
4663 }
4664
4665 __ PushReturnAddressFrom(scratch);
4666 __ EnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4667 StackFrame::API_CALLBACK_EXIT, api_function_address);
4668
4669 Operand argc_operand = Operand(rbp, FC::kFCIArgcOffset);
4670 {
4671 ASM_CODE_COMMENT_STRING(masm, "Initialize v8::FunctionCallbackInfo");
4672 // FunctionCallbackInfo::length_.
4673 // TODO(ishell): pass JSParameterCount(argc) to simplify things on the
4674 // caller end.
4675 __ movq(argc_operand, argc);
4676
4677 // FunctionCallbackInfo::implicit_args_.
4678 __ leaq(scratch, Operand(rbp, FC::kImplicitArgsArrayOffset));
4679 __ movq(Operand(rbp, FC::kFCIImplicitArgsOffset), scratch);
4680
4681 // FunctionCallbackInfo::values_ (points at JS arguments on the stack).
4682 __ leaq(scratch, Operand(rbp, FC::kFirstArgumentOffset));
4683 __ movq(Operand(rbp, FC::kFCIValuesOffset), scratch);
4684 }
4685
4686 __ RecordComment("v8::FunctionCallback's argument.");
4687 __ leaq(function_callback_info_arg,
4688 Operand(rbp, FC::kFunctionCallbackInfoOffset));
4689
4690 DCHECK(!AreAliased(api_function_address, function_callback_info_arg));
4691
4692 ExternalReference thunk_ref = ER::invoke_function_callback(mode);
4693 Register no_thunk_arg = no_reg;
4694
4695 Operand return_value_operand = Operand(rbp, FC::kReturnValueOffset);
4696 static constexpr int kSlotsToDropOnReturn =
4697 FC::kFunctionCallbackInfoArgsLength + kJSArgcReceiverSlots;
4698
4699 const bool with_profiling =
4701 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4702 thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
4703 &argc_operand, return_value_operand);
4704}
4705
4706void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
4707 // ----------- S t a t e -------------
4708 // -- rsi : context
4709 // -- rdx : receiver
4710 // -- rcx : holder
4711 // -- rbx : accessor info
4712 // -- rsp[0] : return address
4713 // -----------------------------------
4714
4715 Register name_arg = kCArgRegs[0];
4716 Register property_callback_info_arg = kCArgRegs[1];
4717
4718 Register api_function_address = r8;
4719 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4722 Register scratch = rax;
4723 Register decompr_scratch1 = COMPRESS_POINTERS_BOOL ? r15 : no_reg;
4724
4725 DCHECK(!AreAliased(receiver, holder, callback, scratch, decompr_scratch1));
4726
4727 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4728 // name below the exit frame to make GC aware of them.
4729 using PCA = PropertyCallbackArguments;
4730 using ER = ExternalReference;
4731 using FC = ApiAccessorExitFrameConstants;
4732
4733 static_assert(PCA::kPropertyKeyIndex == 0);
4734 static_assert(PCA::kShouldThrowOnErrorIndex == 1);
4735 static_assert(PCA::kHolderIndex == 2);
4736 static_assert(PCA::kIsolateIndex == 3);
4737 static_assert(PCA::kHolderV2Index == 4);
4738 static_assert(PCA::kReturnValueIndex == 5);
4739 static_assert(PCA::kDataIndex == 6);
4740 static_assert(PCA::kThisIndex == 7);
4741 static_assert(PCA::kArgsLength == 8);
4742
4743 // Set up v8::PropertyCallbackInfo's (PCI) args_ on the stack as follows:
4744 // Current state:
4745 // rsp[0]: return address
4746 //
4747 // Target state:
4748 // rsp[0 * kSystemPointerSize]: return address
4749 // rsp[1 * kSystemPointerSize]: name <= PCI::args_
4750 // rsp[2 * kSystemPointerSize]: kShouldThrowOnErrorIndex
4751 // rsp[3 * kSystemPointerSize]: kHolderIndex
4752 // rsp[4 * kSystemPointerSize]: kIsolateIndex
4753 // rsp[5 * kSystemPointerSize]: kHolderV2Index
4754 // rsp[6 * kSystemPointerSize]: kReturnValueIndex
4755 // rsp[7 * kSystemPointerSize]: kDataIndex
4756 // rsp[8 * kSystemPointerSize]: kThisIndex / receiver
4757
4758 __ PopReturnAddressTo(scratch);
4759 __ Push(receiver);
4760 __ PushTaggedField(FieldOperand(callback, AccessorInfo::kDataOffset),
4761 decompr_scratch1);
4762 __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
4763 __ Push(kScratchRegister); // return value
4764 __ Push(Smi::zero()); // holderV2 value
4765 __ PushAddress(ER::isolate_address());
4766 __ Push(holder);
4767 __ Push(Smi::FromInt(kDontThrow)); // should_throw_on_error -> kDontThrow
4768
4769 // Register name = ReassignRegister(receiver);
4770 __ LoadTaggedField(name_arg,
4771 FieldOperand(callback, AccessorInfo::kNameOffset));
4772 __ Push(name_arg);
4773
4774 __ RecordComment("Load api_function_address");
4775 __ LoadExternalPointerField(
4776 api_function_address,
4777 FieldOperand(callback, AccessorInfo::kMaybeRedirectedGetterOffset),
4779
4780 __ PushReturnAddressFrom(scratch);
4781 __ EnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
4782 StackFrame::API_ACCESSOR_EXIT, api_function_address);
4783
4784 __ RecordComment("Create v8::PropertyCallbackInfo object on the stack.");
4785 // The context register (rsi) might overlap with property_callback_info_arg
4786 // but the context value has been saved in EnterExitFrame and thus it could
4787 // be used to pass arguments.
4788 // property_callback_info_arg = v8::PropertyCallbackInfo&
4789 __ leaq(property_callback_info_arg, Operand(rbp, FC::kArgsArrayOffset));
4790
4791 DCHECK(!AreAliased(api_function_address, property_callback_info_arg, name_arg,
4792 callback, scratch));
4793
4794#ifdef V8_ENABLE_DIRECT_HANDLE
4795 // name_arg = Local<Name>(name), name value was pushed to GC-ed stack space.
4796 //__ movq(name_arg, name);
4797 // |name_arg| is already initialized above.
4798#else
4799 // name_arg = Local<Name>(&name), which is &args_array[kPropertyKeyIndex].
4800 static_assert(PCA::kPropertyKeyIndex == 0);
4801 __ movq(name_arg, property_callback_info_arg);
4802#endif
4803
4804 ExternalReference thunk_ref = ER::invoke_accessor_getter_callback();
4805 // Pass AccessorInfo to thunk wrapper in case profiler or side-effect
4806 // checking is enabled.
4807 Register thunk_arg = callback;
4808
4809 Operand return_value_operand = Operand(rbp, FC::kReturnValueOffset);
4810 static constexpr int kSlotsToDropOnReturn =
4811 FC::kPropertyCallbackInfoArgsLength;
4812 Operand* const kUseStackSpaceConstant = nullptr;
4813
4814 const bool with_profiling = true;
4815 CallApiFunctionAndReturn(masm, with_profiling, api_function_address,
4816 thunk_ref, thunk_arg, kSlotsToDropOnReturn,
4817 kUseStackSpaceConstant, return_value_operand);
4818}
4819
4820void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
4821 __ int3(); // Unused on this architecture.
4822}
4823
4824namespace {
4825
4826void Generate_DeoptimizationEntry(MacroAssembler* masm,
4827 DeoptimizeKind deopt_kind) {
4828 Isolate* isolate = masm->isolate();
4829
4830 // Save all xmm (simd / double) registers, they will later be copied to the
4831 // deoptimizer's FrameDescription.
4832 static constexpr int kXmmRegsSize = kSimd128Size * XMMRegister::kNumRegisters;
4833 __ AllocateStackSpace(kXmmRegsSize);
4834
4835 const RegisterConfiguration* config = RegisterConfiguration::Default();
4837 config->num_allocatable_simd128_registers());
4838 DCHECK_EQ(config->num_allocatable_simd128_registers(),
4839 config->num_allocatable_double_registers());
4840 for (int i = 0; i < config->num_allocatable_simd128_registers(); ++i) {
4841 int code = config->GetAllocatableSimd128Code(i);
4842 XMMRegister xmm_reg = XMMRegister::from_code(code);
4843 int offset = code * kSimd128Size;
4844 __ movdqu(Operand(rsp, offset), xmm_reg);
4845 }
4846
4847 // Save all general purpose registers, they will later be copied to the
4848 // deoptimizer's FrameDescription.
4849 static constexpr int kNumberOfRegisters = Register::kNumRegisters;
4850 for (int i = 0; i < kNumberOfRegisters; i++) {
4851 __ pushq(Register::from_code(i));
4852 }
4853
4854 static constexpr int kSavedRegistersAreaSize =
4855 kNumberOfRegisters * kSystemPointerSize + kXmmRegsSize;
4856 static constexpr int kCurrentOffsetToReturnAddress = kSavedRegistersAreaSize;
4857 static constexpr int kCurrentOffsetToParentSP =
4858 kCurrentOffsetToReturnAddress + kPCOnStackSize;
4859
4860 __ Store(
4861 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate),
4862 rbp);
4863
4864 // Get the address of the location in the code object
4865 // and compute the fp-to-sp delta in register arg5.
4866 __ movq(kCArgRegs[2], Operand(rsp, kCurrentOffsetToReturnAddress));
4867 // Load the fp-to-sp-delta.
4868 __ leaq(kCArgRegs[3], Operand(rsp, kCurrentOffsetToParentSP));
4869 __ subq(kCArgRegs[3], rbp);
4870 __ negq(kCArgRegs[3]);
4871
4872 // Allocate a new deoptimizer object.
4873 __ PrepareCallCFunction(5);
4874 __ Move(rax, 0);
4875 Label context_check;
4876 __ movq(rdi, Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset));
4877 __ JumpIfSmi(rdi, &context_check);
4878 __ movq(rax, Operand(rbp, StandardFrameConstants::kFunctionOffset));
4879 __ bind(&context_check);
4880 __ movq(kCArgRegs[0], rax);
4881 __ Move(kCArgRegs[1], static_cast<int>(deopt_kind));
4882 // Args 3 and 4 are already in the right registers.
4883
4884 // On windows put the arguments on the stack (PrepareCallCFunction
4885 // has created space for this). On linux pass the arguments in r8.
4886#ifdef V8_TARGET_OS_WIN
4887 Register arg5 = r15;
4888 __ LoadAddress(arg5, ExternalReference::isolate_address());
4889 __ movq(Operand(rsp, 4 * kSystemPointerSize), arg5);
4890#else
4891 // r8 is kCArgRegs[4] on Linux.
4892 __ LoadAddress(r8, ExternalReference::isolate_address());
4893#endif
4894
4895 {
4896 AllowExternalCallThatCantCauseGC scope(masm);
4897 __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
4898 }
4899 // Preserve deoptimizer object in register rax and get the input
4900 // frame descriptor pointer.
4901 __ movq(rbx, Operand(rax, Deoptimizer::input_offset()));
4902
4903 // Fill in the input registers.
4904 for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
4905 int offset =
4907 __ PopQuad(Operand(rbx, offset));
4908 }
4909
4910 // Fill in the xmm (simd / double) input registers.
4911 int simd128_regs_offset = FrameDescription::simd128_registers_offset();
4912 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
4913 int dst_offset = i * kSimd128Size + simd128_regs_offset;
4914 __ movdqu(kScratchDoubleReg, Operand(rsp, i * kSimd128Size));
4915 __ movdqu(Operand(rbx, dst_offset), kScratchDoubleReg);
4916 }
4917 __ addq(rsp, Immediate(kXmmRegsSize));
4918
4919 // Mark the stack as not iterable for the CPU profiler which won't be able to
4920 // walk the stack without the return address.
4921 __ movb(__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
4922 Immediate(0));
4923
4924 // Remove the return address from the stack.
4925 __ addq(rsp, Immediate(kPCOnStackSize));
4926
4927 // Compute a pointer to the unwinding limit in register rcx; that is
4928 // the first stack slot not part of the input frame.
4929 __ movq(rcx, Operand(rbx, FrameDescription::frame_size_offset()));
4930 __ addq(rcx, rsp);
4931
4932 // Unwind the stack down to - but not including - the unwinding
4933 // limit and copy the contents of the activation frame to the input
4934 // frame description.
4935 __ leaq(rdx, Operand(rbx, FrameDescription::frame_content_offset()));
4936 Label pop_loop_header;
4937 __ jmp(&pop_loop_header);
4938 Label pop_loop;
4939 __ bind(&pop_loop);
4940 __ Pop(Operand(rdx, 0));
4941 __ addq(rdx, Immediate(sizeof(intptr_t)));
4942 __ bind(&pop_loop_header);
4943 __ cmpq(rcx, rsp);
4944 __ j(not_equal, &pop_loop);
4945
4946 // Compute the output frame in the deoptimizer.
4947 __ pushq(rax);
4948 __ PrepareCallCFunction(2);
4949 __ movq(kCArgRegs[0], rax);
4951 {
4952 AllowExternalCallThatCantCauseGC scope(masm);
4953 __ CallCFunction(ExternalReference::compute_output_frames_function(), 2);
4954 }
4955 __ popq(rax);
4956#ifdef V8_ENABLE_CET_SHADOW_STACK
4957 __ movq(r8, rax);
4958#endif // V8_ENABLE_CET_SHADOW_STACK
4959
4960 __ movq(rsp, Operand(rax, Deoptimizer::caller_frame_top_offset()));
4961
4962 // Replace the current (input) frame with the output frames.
4963 Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
4964 // Outer loop state: rax = current FrameDescription**, rdx = one past the
4965 // last FrameDescription**.
4966 __ movl(rdx, Operand(rax, Deoptimizer::output_count_offset()));
4967 __ movq(rax, Operand(rax, Deoptimizer::output_offset()));
4968 __ leaq(rdx, Operand(rax, rdx, times_system_pointer_size, 0));
4969 __ jmp(&outer_loop_header);
4970 __ bind(&outer_push_loop);
4971 // Inner loop state: rbx = current FrameDescription*, rcx = loop index.
4972 __ movq(rbx, Operand(rax, 0));
4973 __ movq(rcx, Operand(rbx, FrameDescription::frame_size_offset()));
4974 __ jmp(&inner_loop_header);
4975 __ bind(&inner_push_loop);
4976 __ subq(rcx, Immediate(sizeof(intptr_t)));
4977 __ Push(Operand(rbx, rcx, times_1, FrameDescription::frame_content_offset()));
4978 __ bind(&inner_loop_header);
4979 __ testq(rcx, rcx);
4980 __ j(not_zero, &inner_push_loop);
4981 __ addq(rax, Immediate(kSystemPointerSize));
4982 __ bind(&outer_loop_header);
4983 __ cmpq(rax, rdx);
4984 __ j(below, &outer_push_loop);
4985
4986 // Push pc and continuation from the last output frame.
4987 __ PushQuad(Operand(rbx, FrameDescription::pc_offset()));
4988 __ movq(rax, Operand(rbx, FrameDescription::continuation_offset()));
4989 // Skip pushing the continuation if it is zero. This is used as a marker for
4990 // wasm deopts that do not use a builtin call to finish the deopt.
4991 Label push_registers;
4992 __ testq(rax, rax);
4993 __ j(zero, &push_registers);
4994 __ Push(rax);
4995 __ bind(&push_registers);
4996 // Push the registers from the last output frame.
4997 for (int i = 0; i < kNumberOfRegisters; i++) {
4999 // Do not restore rsp and kScratchRegister.
5000 if (r == rsp || r == kScratchRegister) continue;
5001 int offset =
5003 __ PushQuad(Operand(rbx, offset));
5004 }
5005
5006#ifdef V8_ENABLE_CET_SHADOW_STACK
5007 // Check v8_flags.cet_compatible.
5008 Label shadow_stack_push;
5009 __ cmpb(__ ExternalReferenceAsOperand(
5010 ExternalReference::address_of_cet_compatible_flag(),
5012 Immediate(0));
5013 __ j(not_equal, &shadow_stack_push);
5014#endif // V8_ENABLE_CET_SHADOW_STACK
5015
5016 Generate_RestoreFrameDescriptionRegisters(masm, rbx);
5017
5018 __ movb(__ ExternalReferenceAsOperand(IsolateFieldId::kStackIsIterable),
5019 Immediate(1));
5020
5021 // Return to the continuation point.
5022 __ ret(0);
5023
5024#ifdef V8_ENABLE_CET_SHADOW_STACK
5025 // Push candidate return addresses for shadow stack onto the stack.
5026 __ bind(&shadow_stack_push);
5027
5028 // push the last FrameDescription onto the stack for restoring xmm registers
5029 // later.
5030 __ pushq(rbx);
5031
5032 // r8 = deoptimizer
5033 __ movl(kAdaptShadowStackCountRegister,
5034 Operand(r8, Deoptimizer::shadow_stack_count_offset()));
5035 __ movq(rax, Operand(r8, Deoptimizer::shadow_stack_offset()));
5036
5037 Label check_more_pushes, next_push;
5038 __ Move(kScratchRegister, 0);
5039 __ jmp(&check_more_pushes, Label::kNear);
5040 __ bind(&next_push);
5041 // rax points to the start of the shadow stack array.
5042 __ pushq(Operand(rax, kScratchRegister, times_system_pointer_size, 0));
5043 __ incl(kScratchRegister);
5044 __ bind(&check_more_pushes);
5045 __ cmpl(kScratchRegister, kAdaptShadowStackCountRegister);
5046 __ j(not_equal, &next_push);
5047
5048 // We drop 1 word from the shadow stack. It contains the return address from
5049 // DeoptimizationEntry.
5050 __ Move(rax, 1);
5051 __ IncsspqIfSupported(rax, kScratchRegister);
5052
5053 // Now, kick off the process of getting our continuations onto the shadow
5054 // stack. Note that the stack has 2 extra words to be popped at the end
5055 // of the process:
5056 // 1) the kAdaptShadowStackCountRegister
5057 // 2) kScratchRegister
5058 __ movq(kScratchRegister,
5060 Builtin::kAdaptShadowStackForDeopt)));
5061 // We don't enter at the start of AdaptShadowStackForDeopt, because that
5062 // is designed to be called by builtin continuations in order to get
5063 // return addresses into those continuations on the stack. Therefore, we
5064 // have to make a special entry at kAdaptShadowStackDispatchFirstEntryOffset.
5065 __ addq(kScratchRegister,
5066 Immediate(kAdaptShadowStackDispatchFirstEntryOffset));
5067 __ jmp(kScratchRegister);
5068
5069 __ int3();
5070#endif // V8_ENABLE_CET_SHADOW_STACK
5071}
5072
5073} // namespace
5074
5075void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
5076 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
5077}
5078
5079void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
5080 Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
5081}
5082
5083// If there is baseline code on the shared function info, converts an
5084// interpreter frame into a baseline frame and continues execution in baseline
5085// code. Otherwise execution continues with bytecode.
5086void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
5087 MacroAssembler* masm) {
5088 Label start;
5089 __ bind(&start);
5090
5091 // Get function from the frame.
5092 Register closure = rdi;
5094
5095 // Get the InstructionStream object from the shared function info.
5096 Register code_obj = rbx;
5097 Register shared_function_info(code_obj);
5098 __ LoadTaggedField(
5099 shared_function_info,
5100 FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
5101
5102 ResetSharedFunctionInfoAge(masm, shared_function_info);
5103
5104 __ LoadTrustedPointerField(
5105 code_obj,
5106 FieldOperand(shared_function_info,
5107 SharedFunctionInfo::kTrustedFunctionDataOffset),
5109
5110 // For OSR entry it is safe to assume we always have baseline code.
5111 if (v8_flags.debug_code) {
5112 __ IsObjectType(code_obj, CODE_TYPE, kScratchRegister);
5113 __ Assert(equal, AbortReason::kExpectedBaselineData);
5114 AssertCodeIsBaseline(masm, code_obj, r11);
5115 }
5116
5117 // Load the feedback cell and feedback vector.
5118 Register feedback_cell = r8;
5119 Register feedback_vector = r11;
5120 __ LoadTaggedField(feedback_cell,
5121 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
5122 __ LoadTaggedField(feedback_vector,
5123 FieldOperand(feedback_cell, FeedbackCell::kValueOffset));
5124
5125 Label install_baseline_code;
5126 // Check if feedback vector is valid. If not, call prepare for baseline to
5127 // allocate it.
5128 __ IsObjectType(feedback_vector, FEEDBACK_VECTOR_TYPE, kScratchRegister);
5129 __ j(not_equal, &install_baseline_code);
5130
5131 // Save bytecode offset from the stack frame.
5132 __ SmiUntagUnsigned(
5135 // Replace bytecode offset with feedback cell.
5139 feedback_cell);
5140 feedback_cell = no_reg;
5141 // Update feedback vector cache.
5145 feedback_vector);
5146 feedback_vector = no_reg;
5147
5148 // Compute baseline pc for bytecode offset.
5149 Register get_baseline_pc = r11;
5150 __ LoadAddress(get_baseline_pc,
5151 ExternalReference::baseline_pc_for_next_executed_bytecode());
5152
5155
5156 // Get bytecode array from the stack frame.
5160 {
5161 FrameScope scope(masm, StackFrame::INTERNAL);
5162 __ PrepareCallCFunction(3);
5163 __ movq(kCArgRegs[0], code_obj);
5166 __ CallCFunction(get_baseline_pc, 3);
5167 }
5168 __ LoadCodeInstructionStart(code_obj, code_obj, kJSEntrypointTag);
5169 __ addq(code_obj, kReturnRegister0);
5171
5172 Generate_OSREntry(masm, code_obj);
5173 __ Trap(); // Unreachable.
5174
5175 __ bind(&install_baseline_code);
5176 {
5177 FrameScope scope(masm, StackFrame::INTERNAL);
5179 __ Push(closure);
5180 __ CallRuntime(Runtime::kInstallBaselineCode, 1);
5182 }
5183 // Retry from the start after installing baseline code.
5184 __ jmp(&start);
5185}
5186
5187void Builtins::Generate_RestartFrameTrampoline(MacroAssembler* masm) {
5188 Generate_CallToAdaptShadowStackForDeopt(masm, true);
5189 masm->isolate()->heap()->SetDeoptPCOffsetAfterAdaptShadowStack(
5190 masm->pc_offset());
5191
5192 // Restart the current frame:
5193 // - Look up current function on the frame.
5194 // - Leave the frame.
5195 // - Restart the frame by calling the function.
5196
5197 __ movq(rdi, Operand(rbp, StandardFrameConstants::kFunctionOffset));
5198 __ movq(rax, Operand(rbp, StandardFrameConstants::kArgCOffset));
5199
5200 __ LeaveFrame(StackFrame::INTERPRETED);
5201
5202 // The arguments are already in the stack (including any necessary padding),
5203 // we should not try to massage the arguments again.
5204#ifdef V8_ENABLE_LEAPTIERING
5205 __ InvokeFunction(rdi, no_reg, rax, InvokeType::kJump,
5207#else
5208 __ movq(rbx, Immediate(kDontAdaptArgumentsSentinel));
5209 __ InvokeFunction(rdi, no_reg, rbx, rax, InvokeType::kJump);
5210#endif
5211}
5212
5213#undef __
5214
5215} // namespace internal
5216} // namespace v8
5217
5218#endif // V8_TARGET_ARCH_X64
#define Assert(condition)
#define JUMP_IF_EQUAL(NAME)
interpreter::Bytecode bytecode
Definition builtins.cc:43
#define RETURN_BYTECODE_LIST(V)
Definition bytecodes.h:575
static constexpr int kPhysicalSignificandSize
Definition double.h:31
static void Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler *masm, InterpreterPushArgsMode mode)
static void Generate_CallOrConstructForwardVarargs(MacroAssembler *masm, CallOrConstructMode mode, Builtin target_builtin)
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
Definition builtins.cc:189
static void Generate_InterpreterEntryTrampoline(MacroAssembler *masm, InterpreterEntryTrampolineMode mode)
static void Generate_Adaptor(MacroAssembler *masm, int formal_parameter_count, Address builtin_address)
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static constexpr Builtin CallFunction(ConvertReceiverMode=ConvertReceiverMode::kAny)
static constexpr Builtin AdaptorWithBuiltinExitFrame(int formal_parameter_count)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static void Generate_Call(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallFunction(MacroAssembler *masm, ConvertReceiverMode mode)
static void Generate_CallOrConstructVarargs(MacroAssembler *masm, Builtin target_builtin)
static void Generate_CallApiCallbackImpl(MacroAssembler *masm, CallApiCallbackMode mode)
static constexpr Builtin Call(ConvertReceiverMode=ConvertReceiverMode::kAny)
static void Generate_CallBoundFunctionImpl(MacroAssembler *masm)
static void Generate_ConstructForwardAllArgsImpl(MacroAssembler *masm, ForwardWhichFrame which_frame)
static void Generate_InterpreterPushArgsThenCallImpl(MacroAssembler *masm, ConvertReceiverMode receiver_mode, InterpreterPushArgsMode mode)
static constexpr BytecodeOffset None()
Definition utils.h:675
static DEFINE_PARAMETERS_VARARGS(kActualArgumentsCount, kTopmostScriptHavingContext, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register TopmostScriptHavingContextRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
static constexpr int kContextOrFrameTypeOffset
static constexpr int kFixedSlotCountAboveFp
static constexpr int kFixedFrameSizeAboveFp
static int caller_frame_top_offset()
static V8_EXPORT_PRIVATE const int kAdaptShadowStackOffsetToSubtract
static int output_count_offset()
static constexpr int kNextExitFrameFPOffset
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int simd128_registers_offset()
static const int kMantissaBits
Definition heap-number.h:39
static const uint32_t kExponentMask
Definition heap-number.h:37
static const int kExponentBias
Definition heap-number.h:41
static const int kExponentShift
Definition heap-number.h:42
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int BuiltinEntrySlotOffset(Builtin id)
static constexpr uint32_t thread_in_wasm_flag_address_offset()
Definition isolate.h:1338
static constexpr XMMRegister from_code(int8_t code)
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int32_t TypeToMarker(Type type)
Definition frames.h:196
static constexpr int kFixedFrameSizeFromFp
static constexpr int kFrameTypeOffset
static constexpr DoubleRegList kPushedFpRegs
static constexpr int SharedFunctionInfoOffsetInTaggedJSFunction()
static constexpr int ToTagged(int offset)
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define DEBUG_BOOL
Definition globals.h:87
#define V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE_BOOL
Definition globals.h:161
int start
int end
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
DirectHandle< Object > new_target
Definition execution.cc:75
bool is_construct
Definition execution.cc:82
#define V8_JITLESS_BOOL
int32_t offset
TNode< Context > context
TNode< Object > receiver
TNode< Object > callback
LiftoffRegister reg
int pc_offset
@ kPushAndReturn
const int length_
Definition mul-fft.cc:473
int r
Definition mul-fft.cc:298
auto Reversed(T &t)
Definition iterator.h:105
ApiCallbackExitFrameConstants FC
Definition frames.cc:1270
FunctionCallbackArguments FCA
Definition frames.cc:1271
void Store(LiftoffAssembler *assm, LiftoffRegister src, MemOperand dst, ValueKind kind)
void push(LiftoffAssembler *assm, LiftoffRegister reg, ValueKind kind, int padding=0)
constexpr int kStackStateOffset
Definition stacks.h:212
constexpr DoubleRegister kFpReturnRegisters[]
constexpr int kStackSpOffset
Definition stacks.h:202
constexpr int kStackFpOffset
Definition stacks.h:204
constexpr Register kGpParamRegisters[]
constexpr DoubleRegister kFpParamRegisters[]
constexpr int kStackParentOffset
Definition stacks.h:210
constexpr Register kGpReturnRegisters[]
constexpr int kStackLimitOffset
Definition stacks.h:208
constexpr int kStackPcOffset
Definition stacks.h:206
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kByteSize
Definition globals.h:395
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
constexpr int kTaggedSize
Definition globals.h:542
constexpr int kSimd128Size
Definition globals.h:706
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
static void Generate_InterpreterEnterBytecode(MacroAssembler *masm)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kPCOnStackSize
Definition globals.h:412
constexpr int kNumberOfRegisters
Operand FieldOperand(Register object, int offset)
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr Register kJavaScriptCallArgCountRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
InterpreterPushArgsMode
Definition globals.h:2233
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
static void GenerateInterpreterPushArgs(MacroAssembler *masm, Register num_args, Register start_address, Register scratch)
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler *masm, Register bytecode_array, Register bytecode_offset, Register bytecode, Register scratch1, Register scratch2, Register scratch3, Label *if_return)
constexpr int kSystemPointerSize
Definition globals.h:410
static void LeaveInterpreterFrame(MacroAssembler *masm, Register scratch1, Register scratch2)
constexpr Register kReturnRegister1
constexpr Register kReturnRegister0
constexpr Register kScratchRegister
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
constexpr Register kInterpreterDispatchTableRegister
const int kHeapObjectTag
Definition v8-internal.h:72
@ kFunctionTemplateInfoCallbackTag
constexpr Register kWasmTrapHandlerFaultAddressRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallExtraArg1Register
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
constexpr Register kJavaScriptCallCodeStartRegister
constexpr Register r11
constexpr Register kPtrComprCageBaseRegister
Register ReassignRegister(Register &source)
return value
Definition map-inl.h:893
static void AssertCodeIsBaseline(MacroAssembler *masm, Register code, Register scratch)
static void Generate_JSEntryTrampolineHelper(MacroAssembler *masm, bool is_construct)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
Definition globals.h:407
constexpr Register kJavaScriptCallDispatchHandleRegister
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler *masm, Register sfi, Register bytecode, Register scratch1, Label *is_baseline, Label *is_unavailable)
constexpr Register kInterpreterBytecodeOffsetRegister
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
MemOperand ExitFrameStackSlotOperand(int offset)
constexpr Register kInterpreterBytecodeArrayRegister
constexpr bool PointerCompressionIsEnabled()
i::Address Load(i::Address address)
Definition unwinder.cc:19
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LE(lhs, rhs)
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define arraysize(array)
Definition macros.h:67
#define OFFSET_OF_DATA_START(Type)